]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
Refactor arm_return_in_memory
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47
48 #include "arch/arm.h"
49 #include "arm-tdep.h"
50 #include "gdb/sim-arm.h"
51
52 #include "elf-bfd.h"
53 #include "coff/internal.h"
54 #include "elf/arm.h"
55
56 #include "vec.h"
57
58 #include "record.h"
59 #include "record-full.h"
60
61 #include "features/arm-with-m.c"
62 #include "features/arm-with-m-fpa-layout.c"
63 #include "features/arm-with-m-vfp-d16.c"
64 #include "features/arm-with-iwmmxt.c"
65 #include "features/arm-with-vfpv2.c"
66 #include "features/arm-with-vfpv3.c"
67 #include "features/arm-with-neon.c"
68
69 static int arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 /* Per-objfile data used for mapping symbols. */
85 static const struct objfile_data *arm_objfile_data_key;
86
87 struct arm_mapping_symbol
88 {
89 bfd_vma value;
90 char type;
91 };
92 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
93 DEF_VEC_O(arm_mapping_symbol_s);
94
95 struct arm_per_objfile
96 {
97 VEC(arm_mapping_symbol_s) **section_maps;
98 };
99
100 /* The list of available "set arm ..." and "show arm ..." commands. */
101 static struct cmd_list_element *setarmcmdlist = NULL;
102 static struct cmd_list_element *showarmcmdlist = NULL;
103
104 /* The type of floating-point to use. Keep this in sync with enum
105 arm_float_model, and the help string in _initialize_arm_tdep. */
106 static const char *const fp_model_strings[] =
107 {
108 "auto",
109 "softfpa",
110 "fpa",
111 "softvfp",
112 "vfp",
113 NULL
114 };
115
116 /* A variable that can be configured by the user. */
117 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
118 static const char *current_fp_model = "auto";
119
120 /* The ABI to use. Keep this in sync with arm_abi_kind. */
121 static const char *const arm_abi_strings[] =
122 {
123 "auto",
124 "APCS",
125 "AAPCS",
126 NULL
127 };
128
129 /* A variable that can be configured by the user. */
130 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
131 static const char *arm_abi_string = "auto";
132
133 /* The execution mode to assume. */
134 static const char *const arm_mode_strings[] =
135 {
136 "auto",
137 "arm",
138 "thumb",
139 NULL
140 };
141
142 static const char *arm_fallback_mode_string = "auto";
143 static const char *arm_force_mode_string = "auto";
144
145 /* Internal override of the execution mode. -1 means no override,
146 0 means override to ARM mode, 1 means override to Thumb mode.
147 The effect is the same as if arm_force_mode has been set by the
148 user (except the internal override has precedence over a user's
149 arm_force_mode override). */
150 static int arm_override_mode = -1;
151
152 /* Number of different reg name sets (options). */
153 static int num_disassembly_options;
154
155 /* The standard register names, and all the valid aliases for them. Note
156 that `fp', `sp' and `pc' are not added in this alias list, because they
157 have been added as builtin user registers in
158 std-regs.c:_initialize_frame_reg. */
159 static const struct
160 {
161 const char *name;
162 int regnum;
163 } arm_register_aliases[] = {
164 /* Basic register numbers. */
165 { "r0", 0 },
166 { "r1", 1 },
167 { "r2", 2 },
168 { "r3", 3 },
169 { "r4", 4 },
170 { "r5", 5 },
171 { "r6", 6 },
172 { "r7", 7 },
173 { "r8", 8 },
174 { "r9", 9 },
175 { "r10", 10 },
176 { "r11", 11 },
177 { "r12", 12 },
178 { "r13", 13 },
179 { "r14", 14 },
180 { "r15", 15 },
181 /* Synonyms (argument and variable registers). */
182 { "a1", 0 },
183 { "a2", 1 },
184 { "a3", 2 },
185 { "a4", 3 },
186 { "v1", 4 },
187 { "v2", 5 },
188 { "v3", 6 },
189 { "v4", 7 },
190 { "v5", 8 },
191 { "v6", 9 },
192 { "v7", 10 },
193 { "v8", 11 },
194 /* Other platform-specific names for r9. */
195 { "sb", 9 },
196 { "tr", 9 },
197 /* Special names. */
198 { "ip", 12 },
199 { "lr", 14 },
200 /* Names used by GCC (not listed in the ARM EABI). */
201 { "sl", 10 },
202 /* A special name from the older ATPCS. */
203 { "wr", 7 },
204 };
205
206 static const char *const arm_register_names[] =
207 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
208 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
209 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
210 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
211 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
212 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
213 "fps", "cpsr" }; /* 24 25 */
214
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
217
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
220
221 /* This is used to keep the bfd arch_info in sync with the disassembly
222 style. */
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void set_disassembly_style (void);
226
227 static void convert_from_extended (const struct floatformat *, const void *,
228 void *, int);
229 static void convert_to_extended (const struct floatformat *, void *,
230 const void *, int);
231
232 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, gdb_byte *buf);
235 static void arm_neon_quad_write (struct gdbarch *gdbarch,
236 struct regcache *regcache,
237 int regnum, const gdb_byte *buf);
238
239 struct arm_prologue_cache
240 {
241 /* The stack pointer at the time this frame was created; i.e. the
242 caller's stack pointer when this function was called. It is used
243 to identify this frame. */
244 CORE_ADDR prev_sp;
245
246 /* The frame base for this frame is just prev_sp - frame size.
247 FRAMESIZE is the distance from the frame pointer to the
248 initial stack pointer. */
249
250 int framesize;
251
252 /* The register used to hold the frame pointer for this frame. */
253 int framereg;
254
255 /* Saved register offsets. */
256 struct trad_frame_saved_reg *saved_regs;
257 };
258
259 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
260 CORE_ADDR prologue_start,
261 CORE_ADDR prologue_end,
262 struct arm_prologue_cache *cache);
263
264 /* Architecture version for displaced stepping. This effects the behaviour of
265 certain instructions, and really should not be hard-wired. */
266
267 #define DISPLACED_STEPPING_ARCH_VERSION 5
268
269 /* Set to true if the 32-bit mode is in use. */
270
271 int arm_apcs_32 = 1;
272
273 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
274
275 int
276 arm_psr_thumb_bit (struct gdbarch *gdbarch)
277 {
278 if (gdbarch_tdep (gdbarch)->is_m)
279 return XPSR_T;
280 else
281 return CPSR_T;
282 }
283
284 /* Determine if FRAME is executing in Thumb mode. */
285
286 int
287 arm_frame_is_thumb (struct frame_info *frame)
288 {
289 CORE_ADDR cpsr;
290 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
291
292 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
293 directly (from a signal frame or dummy frame) or by interpreting
294 the saved LR (from a prologue or DWARF frame). So consult it and
295 trust the unwinders. */
296 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
297
298 return (cpsr & t_bit) != 0;
299 }
300
301 /* Callback for VEC_lower_bound. */
302
303 static inline int
304 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
305 const struct arm_mapping_symbol *rhs)
306 {
307 return lhs->value < rhs->value;
308 }
309
310 /* Search for the mapping symbol covering MEMADDR. If one is found,
311 return its type. Otherwise, return 0. If START is non-NULL,
312 set *START to the location of the mapping symbol. */
313
314 static char
315 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
316 {
317 struct obj_section *sec;
318
319 /* If there are mapping symbols, consult them. */
320 sec = find_pc_section (memaddr);
321 if (sec != NULL)
322 {
323 struct arm_per_objfile *data;
324 VEC(arm_mapping_symbol_s) *map;
325 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
326 0 };
327 unsigned int idx;
328
329 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
330 arm_objfile_data_key);
331 if (data != NULL)
332 {
333 map = data->section_maps[sec->the_bfd_section->index];
334 if (!VEC_empty (arm_mapping_symbol_s, map))
335 {
336 struct arm_mapping_symbol *map_sym;
337
338 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
339 arm_compare_mapping_symbols);
340
341 /* VEC_lower_bound finds the earliest ordered insertion
342 point. If the following symbol starts at this exact
343 address, we use that; otherwise, the preceding
344 mapping symbol covers this address. */
345 if (idx < VEC_length (arm_mapping_symbol_s, map))
346 {
347 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
348 if (map_sym->value == map_key.value)
349 {
350 if (start)
351 *start = map_sym->value + obj_section_addr (sec);
352 return map_sym->type;
353 }
354 }
355
356 if (idx > 0)
357 {
358 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
359 if (start)
360 *start = map_sym->value + obj_section_addr (sec);
361 return map_sym->type;
362 }
363 }
364 }
365 }
366
367 return 0;
368 }
369
370 /* Determine if the program counter specified in MEMADDR is in a Thumb
371 function. This function should be called for addresses unrelated to
372 any executing frame; otherwise, prefer arm_frame_is_thumb. */
373
374 int
375 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
376 {
377 struct bound_minimal_symbol sym;
378 char type;
379 struct displaced_step_closure* dsc
380 = get_displaced_step_closure_by_addr(memaddr);
381
382 /* If checking the mode of displaced instruction in copy area, the mode
383 should be determined by instruction on the original address. */
384 if (dsc)
385 {
386 if (debug_displaced)
387 fprintf_unfiltered (gdb_stdlog,
388 "displaced: check mode of %.8lx instead of %.8lx\n",
389 (unsigned long) dsc->insn_addr,
390 (unsigned long) memaddr);
391 memaddr = dsc->insn_addr;
392 }
393
394 /* If bit 0 of the address is set, assume this is a Thumb address. */
395 if (IS_THUMB_ADDR (memaddr))
396 return 1;
397
398 /* Respect internal mode override if active. */
399 if (arm_override_mode != -1)
400 return arm_override_mode;
401
402 /* If the user wants to override the symbol table, let him. */
403 if (strcmp (arm_force_mode_string, "arm") == 0)
404 return 0;
405 if (strcmp (arm_force_mode_string, "thumb") == 0)
406 return 1;
407
408 /* ARM v6-M and v7-M are always in Thumb mode. */
409 if (gdbarch_tdep (gdbarch)->is_m)
410 return 1;
411
412 /* If there are mapping symbols, consult them. */
413 type = arm_find_mapping_symbol (memaddr, NULL);
414 if (type)
415 return type == 't';
416
417 /* Thumb functions have a "special" bit set in minimal symbols. */
418 sym = lookup_minimal_symbol_by_pc (memaddr);
419 if (sym.minsym)
420 return (MSYMBOL_IS_SPECIAL (sym.minsym));
421
422 /* If the user wants to override the fallback mode, let them. */
423 if (strcmp (arm_fallback_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
426 return 1;
427
428 /* If we couldn't find any symbol, but we're talking to a running
429 target, then trust the current value of $cpsr. This lets
430 "display/i $pc" always show the correct mode (though if there is
431 a symbol table we will not reach here, so it still may not be
432 displayed in the mode it will be executed). */
433 if (target_has_registers)
434 return arm_frame_is_thumb (get_current_frame ());
435
436 /* Otherwise we're out of luck; we assume ARM. */
437 return 0;
438 }
439
440 /* Remove useless bits from addresses in a running program. */
441 static CORE_ADDR
442 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
443 {
444 /* On M-profile devices, do not strip the low bit from EXC_RETURN
445 (the magic exception return address). */
446 if (gdbarch_tdep (gdbarch)->is_m
447 && (val & 0xfffffff0) == 0xfffffff0)
448 return val;
449
450 if (arm_apcs_32)
451 return UNMAKE_THUMB_ADDR (val);
452 else
453 return (val & 0x03fffffc);
454 }
455
456 /* Return 1 if PC is the start of a compiler helper function which
457 can be safely ignored during prologue skipping. IS_THUMB is true
458 if the function is known to be a Thumb function due to the way it
459 is being called. */
460 static int
461 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
462 {
463 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
464 struct bound_minimal_symbol msym;
465
466 msym = lookup_minimal_symbol_by_pc (pc);
467 if (msym.minsym != NULL
468 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
469 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
470 {
471 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
472
473 /* The GNU linker's Thumb call stub to foo is named
474 __foo_from_thumb. */
475 if (strstr (name, "_from_thumb") != NULL)
476 name += 2;
477
478 /* On soft-float targets, __truncdfsf2 is called to convert promoted
479 arguments to their argument types in non-prototyped
480 functions. */
481 if (startswith (name, "__truncdfsf2"))
482 return 1;
483 if (startswith (name, "__aeabi_d2f"))
484 return 1;
485
486 /* Internal functions related to thread-local storage. */
487 if (startswith (name, "__tls_get_addr"))
488 return 1;
489 if (startswith (name, "__aeabi_read_tp"))
490 return 1;
491 }
492 else
493 {
494 /* If we run against a stripped glibc, we may be unable to identify
495 special functions by name. Check for one important case,
496 __aeabi_read_tp, by comparing the *code* against the default
497 implementation (this is hand-written ARM assembler in glibc). */
498
499 if (!is_thumb
500 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
501 == 0xe3e00a0f /* mov r0, #0xffff0fff */
502 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
503 == 0xe240f01f) /* sub pc, r0, #31 */
504 return 1;
505 }
506
507 return 0;
508 }
509
510 /* Support routines for instruction parsing. */
511 #define submask(x) ((1L << ((x) + 1)) - 1)
512 #define bit(obj,st) (((obj) >> (st)) & 1)
513 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
514 #define sbits(obj,st,fn) \
515 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
516 #define BranchDest(addr,instr) \
517 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
518
519 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
520 the first 16-bit of instruction, and INSN2 is the second 16-bit of
521 instruction. */
522 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
523 ((bits ((insn1), 0, 3) << 12) \
524 | (bits ((insn1), 10, 10) << 11) \
525 | (bits ((insn2), 12, 14) << 8) \
526 | bits ((insn2), 0, 7))
527
528 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
529 the 32-bit instruction. */
530 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
531 ((bits ((insn), 16, 19) << 12) \
532 | bits ((insn), 0, 11))
533
534 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
535
536 static unsigned int
537 thumb_expand_immediate (unsigned int imm)
538 {
539 unsigned int count = imm >> 7;
540
541 if (count < 8)
542 switch (count / 2)
543 {
544 case 0:
545 return imm & 0xff;
546 case 1:
547 return (imm & 0xff) | ((imm & 0xff) << 16);
548 case 2:
549 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
550 case 3:
551 return (imm & 0xff) | ((imm & 0xff) << 8)
552 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
553 }
554
555 return (0x80 | (imm & 0x7f)) << (32 - count);
556 }
557
558 /* Return 1 if the 16-bit Thumb instruction INST might change
559 control flow, 0 otherwise. */
560
561 static int
562 thumb_instruction_changes_pc (unsigned short inst)
563 {
564 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
565 return 1;
566
567 if ((inst & 0xf000) == 0xd000) /* conditional branch */
568 return 1;
569
570 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
571 return 1;
572
573 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
574 return 1;
575
576 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
577 return 1;
578
579 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
580 return 1;
581
582 return 0;
583 }
584
585 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
586 might change control flow, 0 otherwise. */
587
588 static int
589 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
590 {
591 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
592 {
593 /* Branches and miscellaneous control instructions. */
594
595 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
596 {
597 /* B, BL, BLX. */
598 return 1;
599 }
600 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
601 {
602 /* SUBS PC, LR, #imm8. */
603 return 1;
604 }
605 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
606 {
607 /* Conditional branch. */
608 return 1;
609 }
610
611 return 0;
612 }
613
614 if ((inst1 & 0xfe50) == 0xe810)
615 {
616 /* Load multiple or RFE. */
617
618 if (bit (inst1, 7) && !bit (inst1, 8))
619 {
620 /* LDMIA or POP */
621 if (bit (inst2, 15))
622 return 1;
623 }
624 else if (!bit (inst1, 7) && bit (inst1, 8))
625 {
626 /* LDMDB */
627 if (bit (inst2, 15))
628 return 1;
629 }
630 else if (bit (inst1, 7) && bit (inst1, 8))
631 {
632 /* RFEIA */
633 return 1;
634 }
635 else if (!bit (inst1, 7) && !bit (inst1, 8))
636 {
637 /* RFEDB */
638 return 1;
639 }
640
641 return 0;
642 }
643
644 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
645 {
646 /* MOV PC or MOVS PC. */
647 return 1;
648 }
649
650 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
651 {
652 /* LDR PC. */
653 if (bits (inst1, 0, 3) == 15)
654 return 1;
655 if (bit (inst1, 7))
656 return 1;
657 if (bit (inst2, 11))
658 return 1;
659 if ((inst2 & 0x0fc0) == 0x0000)
660 return 1;
661
662 return 0;
663 }
664
665 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
666 {
667 /* TBB. */
668 return 1;
669 }
670
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
672 {
673 /* TBH. */
674 return 1;
675 }
676
677 return 0;
678 }
679
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
682
683 static int
684 thumb_instruction_restores_sp (unsigned short insn)
685 {
686 return (insn == 0x46bd /* mov sp, r7 */
687 || (insn & 0xff80) == 0xb000 /* add sp, imm */
688 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
689 }
690
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
696
697 static CORE_ADDR
698 thumb_analyze_prologue (struct gdbarch *gdbarch,
699 CORE_ADDR start, CORE_ADDR limit,
700 struct arm_prologue_cache *cache)
701 {
702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
704 int i;
705 pv_t regs[16];
706 struct pv_area *stack;
707 struct cleanup *back_to;
708 CORE_ADDR offset;
709 CORE_ADDR unrecognized_pc = 0;
710
711 for (i = 0; i < 16; i++)
712 regs[i] = pv_register (i, 0);
713 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
714 back_to = make_cleanup_free_pv_area (stack);
715
716 while (start < limit)
717 {
718 unsigned short insn;
719
720 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
721
722 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
723 {
724 int regno;
725 int mask;
726
727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
728 break;
729
730 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
731 whether to save LR (R14). */
732 mask = (insn & 0xff) | ((insn & 0x100) << 6);
733
734 /* Calculate offsets of saved R0-R7 and LR. */
735 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
736 if (mask & (1 << regno))
737 {
738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
739 -4);
740 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
741 }
742 }
743 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
744 {
745 offset = (insn & 0x7f) << 2; /* get scaled offset */
746 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
747 -offset);
748 }
749 else if (thumb_instruction_restores_sp (insn))
750 {
751 /* Don't scan past the epilogue. */
752 break;
753 }
754 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
755 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
756 (insn & 0xff) << 2);
757 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
758 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
759 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
760 bits (insn, 6, 8));
761 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
762 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
763 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
764 bits (insn, 0, 7));
765 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
766 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
767 && pv_is_constant (regs[bits (insn, 3, 5)]))
768 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
769 regs[bits (insn, 6, 8)]);
770 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
771 && pv_is_constant (regs[bits (insn, 3, 6)]))
772 {
773 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
774 int rm = bits (insn, 3, 6);
775 regs[rd] = pv_add (regs[rd], regs[rm]);
776 }
777 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
778 {
779 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
780 int src_reg = (insn & 0x78) >> 3;
781 regs[dst_reg] = regs[src_reg];
782 }
783 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
784 {
785 /* Handle stores to the stack. Normally pushes are used,
786 but with GCC -mtpcs-frame, there may be other stores
787 in the prologue to create the frame. */
788 int regno = (insn >> 8) & 0x7;
789 pv_t addr;
790
791 offset = (insn & 0xff) << 2;
792 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
793
794 if (pv_area_store_would_trash (stack, addr))
795 break;
796
797 pv_area_store (stack, addr, 4, regs[regno]);
798 }
799 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
800 {
801 int rd = bits (insn, 0, 2);
802 int rn = bits (insn, 3, 5);
803 pv_t addr;
804
805 offset = bits (insn, 6, 10) << 2;
806 addr = pv_add_constant (regs[rn], offset);
807
808 if (pv_area_store_would_trash (stack, addr))
809 break;
810
811 pv_area_store (stack, addr, 4, regs[rd]);
812 }
813 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
814 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
815 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
816 /* Ignore stores of argument registers to the stack. */
817 ;
818 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
819 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
820 /* Ignore block loads from the stack, potentially copying
821 parameters from memory. */
822 ;
823 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
824 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
825 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
826 /* Similarly ignore single loads from the stack. */
827 ;
828 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
829 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
830 /* Skip register copies, i.e. saves to another register
831 instead of the stack. */
832 ;
833 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
834 /* Recognize constant loads; even with small stacks these are necessary
835 on Thumb. */
836 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
837 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
838 {
839 /* Constant pool loads, for the same reason. */
840 unsigned int constant;
841 CORE_ADDR loc;
842
843 loc = start + 4 + bits (insn, 0, 7) * 4;
844 constant = read_memory_unsigned_integer (loc, 4, byte_order);
845 regs[bits (insn, 8, 10)] = pv_constant (constant);
846 }
847 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
848 {
849 unsigned short inst2;
850
851 inst2 = read_memory_unsigned_integer (start + 2, 2,
852 byte_order_for_code);
853
854 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
855 {
856 /* BL, BLX. Allow some special function calls when
857 skipping the prologue; GCC generates these before
858 storing arguments to the stack. */
859 CORE_ADDR nextpc;
860 int j1, j2, imm1, imm2;
861
862 imm1 = sbits (insn, 0, 10);
863 imm2 = bits (inst2, 0, 10);
864 j1 = bit (inst2, 13);
865 j2 = bit (inst2, 11);
866
867 offset = ((imm1 << 12) + (imm2 << 1));
868 offset ^= ((!j2) << 22) | ((!j1) << 23);
869
870 nextpc = start + 4 + offset;
871 /* For BLX make sure to clear the low bits. */
872 if (bit (inst2, 12) == 0)
873 nextpc = nextpc & 0xfffffffc;
874
875 if (!skip_prologue_function (gdbarch, nextpc,
876 bit (inst2, 12) != 0))
877 break;
878 }
879
880 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
881 { registers } */
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 {
884 pv_t addr = regs[bits (insn, 0, 3)];
885 int regno;
886
887 if (pv_area_store_would_trash (stack, addr))
888 break;
889
890 /* Calculate offsets of saved registers. */
891 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
892 if (inst2 & (1 << regno))
893 {
894 addr = pv_add_constant (addr, -4);
895 pv_area_store (stack, addr, 4, regs[regno]);
896 }
897
898 if (insn & 0x0020)
899 regs[bits (insn, 0, 3)] = addr;
900 }
901
902 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
903 [Rn, #+/-imm]{!} */
904 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
905 {
906 int regno1 = bits (inst2, 12, 15);
907 int regno2 = bits (inst2, 8, 11);
908 pv_t addr = regs[bits (insn, 0, 3)];
909
910 offset = inst2 & 0xff;
911 if (insn & 0x0080)
912 addr = pv_add_constant (addr, offset);
913 else
914 addr = pv_add_constant (addr, -offset);
915
916 if (pv_area_store_would_trash (stack, addr))
917 break;
918
919 pv_area_store (stack, addr, 4, regs[regno1]);
920 pv_area_store (stack, pv_add_constant (addr, 4),
921 4, regs[regno2]);
922
923 if (insn & 0x0020)
924 regs[bits (insn, 0, 3)] = addr;
925 }
926
927 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
928 && (inst2 & 0x0c00) == 0x0c00
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 {
931 int regno = bits (inst2, 12, 15);
932 pv_t addr = regs[bits (insn, 0, 3)];
933
934 offset = inst2 & 0xff;
935 if (inst2 & 0x0200)
936 addr = pv_add_constant (addr, offset);
937 else
938 addr = pv_add_constant (addr, -offset);
939
940 if (pv_area_store_would_trash (stack, addr))
941 break;
942
943 pv_area_store (stack, addr, 4, regs[regno]);
944
945 if (inst2 & 0x0100)
946 regs[bits (insn, 0, 3)] = addr;
947 }
948
949 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
951 {
952 int regno = bits (inst2, 12, 15);
953 pv_t addr;
954
955 offset = inst2 & 0xfff;
956 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
957
958 if (pv_area_store_would_trash (stack, addr))
959 break;
960
961 pv_area_store (stack, addr, 4, regs[regno]);
962 }
963
964 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Ignore stores of argument registers to the stack. */
967 ;
968
969 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
970 && (inst2 & 0x0d00) == 0x0c00
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
973 ;
974
975 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
976 { registers } */
977 && (inst2 & 0x8000) == 0x0000
978 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
979 /* Ignore block loads from the stack, potentially copying
980 parameters from memory. */
981 ;
982
983 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
984 [Rn, #+/-imm] */
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Similarly ignore dual loads from the stack. */
987 ;
988
989 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
990 && (inst2 & 0x0d00) == 0x0c00
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore single loads from the stack. */
993 ;
994
995 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
996 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
997 /* Similarly ignore single loads from the stack. */
998 ;
999
1000 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1002 {
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1006
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)],
1009 thumb_expand_immediate (imm));
1010 }
1011
1012 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1013 && (inst2 & 0x8000) == 0x0000)
1014 {
1015 unsigned int imm = ((bits (insn, 10, 10) << 11)
1016 | (bits (inst2, 12, 14) << 8)
1017 | bits (inst2, 0, 7));
1018
1019 regs[bits (inst2, 8, 11)]
1020 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1021 }
1022
1023 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1024 && (inst2 & 0x8000) == 0x0000)
1025 {
1026 unsigned int imm = ((bits (insn, 10, 10) << 11)
1027 | (bits (inst2, 12, 14) << 8)
1028 | bits (inst2, 0, 7));
1029
1030 regs[bits (inst2, 8, 11)]
1031 = pv_add_constant (regs[bits (insn, 0, 3)],
1032 - (CORE_ADDR) thumb_expand_immediate (imm));
1033 }
1034
1035 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1036 && (inst2 & 0x8000) == 0x0000)
1037 {
1038 unsigned int imm = ((bits (insn, 10, 10) << 11)
1039 | (bits (inst2, 12, 14) << 8)
1040 | bits (inst2, 0, 7));
1041
1042 regs[bits (inst2, 8, 11)]
1043 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1044 }
1045
1046 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1047 {
1048 unsigned int imm = ((bits (insn, 10, 10) << 11)
1049 | (bits (inst2, 12, 14) << 8)
1050 | bits (inst2, 0, 7));
1051
1052 regs[bits (inst2, 8, 11)]
1053 = pv_constant (thumb_expand_immediate (imm));
1054 }
1055
1056 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1057 {
1058 unsigned int imm
1059 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1060
1061 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1062 }
1063
1064 else if (insn == 0xea5f /* mov.w Rd,Rm */
1065 && (inst2 & 0xf0f0) == 0)
1066 {
1067 int dst_reg = (inst2 & 0x0f00) >> 8;
1068 int src_reg = inst2 & 0xf;
1069 regs[dst_reg] = regs[src_reg];
1070 }
1071
1072 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1073 {
1074 /* Constant pool loads. */
1075 unsigned int constant;
1076 CORE_ADDR loc;
1077
1078 offset = bits (inst2, 0, 11);
1079 if (insn & 0x0080)
1080 loc = start + 4 + offset;
1081 else
1082 loc = start + 4 - offset;
1083
1084 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1085 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1086 }
1087
1088 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1089 {
1090 /* Constant pool loads. */
1091 unsigned int constant;
1092 CORE_ADDR loc;
1093
1094 offset = bits (inst2, 0, 7) << 2;
1095 if (insn & 0x0080)
1096 loc = start + 4 + offset;
1097 else
1098 loc = start + 4 - offset;
1099
1100 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1101 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1102
1103 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1104 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1105 }
1106
1107 else if (thumb2_instruction_changes_pc (insn, inst2))
1108 {
1109 /* Don't scan past anything that might change control flow. */
1110 break;
1111 }
1112 else
1113 {
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1117 }
1118
1119 start += 2;
1120 }
1121 else if (thumb_instruction_changes_pc (insn))
1122 {
1123 /* Don't scan past anything that might change control flow. */
1124 break;
1125 }
1126 else
1127 {
1128 /* The optimizer might shove anything into the prologue,
1129 so we just skip what we don't recognize. */
1130 unrecognized_pc = start;
1131 }
1132
1133 start += 2;
1134 }
1135
1136 if (arm_debug)
1137 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1138 paddress (gdbarch, start));
1139
1140 if (unrecognized_pc == 0)
1141 unrecognized_pc = start;
1142
1143 if (cache == NULL)
1144 {
1145 do_cleanups (back_to);
1146 return unrecognized_pc;
1147 }
1148
1149 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1150 {
1151 /* Frame pointer is fp. Frame size is constant. */
1152 cache->framereg = ARM_FP_REGNUM;
1153 cache->framesize = -regs[ARM_FP_REGNUM].k;
1154 }
1155 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1156 {
1157 /* Frame pointer is r7. Frame size is constant. */
1158 cache->framereg = THUMB_FP_REGNUM;
1159 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1160 }
1161 else
1162 {
1163 /* Try the stack pointer... this is a bit desperate. */
1164 cache->framereg = ARM_SP_REGNUM;
1165 cache->framesize = -regs[ARM_SP_REGNUM].k;
1166 }
1167
1168 for (i = 0; i < 16; i++)
1169 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1170 cache->saved_regs[i].addr = offset;
1171
1172 do_cleanups (back_to);
1173 return unrecognized_pc;
1174 }
1175
1176
1177 /* Try to analyze the instructions starting from PC, which load symbol
1178 __stack_chk_guard. Return the address of instruction after loading this
1179 symbol, set the dest register number to *BASEREG, and set the size of
1180 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 not recognized. */
1182
1183 static CORE_ADDR
1184 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1185 unsigned int *destreg, int *offset)
1186 {
1187 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1188 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1189 unsigned int low, high, address;
1190
1191 address = 0;
1192 if (is_thumb)
1193 {
1194 unsigned short insn1
1195 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1196
1197 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1198 {
1199 *destreg = bits (insn1, 8, 10);
1200 *offset = 2;
1201 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1202 address = read_memory_unsigned_integer (address, 4,
1203 byte_order_for_code);
1204 }
1205 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1206 {
1207 unsigned short insn2
1208 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1209
1210 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1211
1212 insn1
1213 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1214 insn2
1215 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1216
1217 /* movt Rd, #const */
1218 if ((insn1 & 0xfbc0) == 0xf2c0)
1219 {
1220 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1221 *destreg = bits (insn2, 8, 11);
1222 *offset = 8;
1223 address = (high << 16 | low);
1224 }
1225 }
1226 }
1227 else
1228 {
1229 unsigned int insn
1230 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1231
1232 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1233 {
1234 address = bits (insn, 0, 11) + pc + 8;
1235 address = read_memory_unsigned_integer (address, 4,
1236 byte_order_for_code);
1237
1238 *destreg = bits (insn, 12, 15);
1239 *offset = 4;
1240 }
1241 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1242 {
1243 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1244
1245 insn
1246 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1247
1248 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1249 {
1250 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1251 *destreg = bits (insn, 12, 15);
1252 *offset = 8;
1253 address = (high << 16 | low);
1254 }
1255 }
1256 }
1257
1258 return address;
1259 }
1260
1261 /* Try to skip a sequence of instructions used for stack protector. If PC
1262 points to the first instruction of this sequence, return the address of
1263 first instruction after this sequence, otherwise, return original PC.
1264
1265 On arm, this sequence of instructions is composed of mainly three steps,
1266 Step 1: load symbol __stack_chk_guard,
1267 Step 2: load from address of __stack_chk_guard,
1268 Step 3: store it to somewhere else.
1269
1270 Usually, instructions on step 2 and step 3 are the same on various ARM
1271 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1272 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1273 instructions in step 1 vary from different ARM architectures. On ARMv7,
1274 they are,
1275
1276 movw Rn, #:lower16:__stack_chk_guard
1277 movt Rn, #:upper16:__stack_chk_guard
1278
1279 On ARMv5t, it is,
1280
1281 ldr Rn, .Label
1282 ....
1283 .Lable:
1284 .word __stack_chk_guard
1285
1286 Since ldr/str is a very popular instruction, we can't use them as
1287 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1288 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1289 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1290
1291 static CORE_ADDR
1292 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1293 {
1294 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1295 unsigned int basereg;
1296 struct bound_minimal_symbol stack_chk_guard;
1297 int offset;
1298 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1299 CORE_ADDR addr;
1300
1301 /* Try to parse the instructions in Step 1. */
1302 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1303 &basereg, &offset);
1304 if (!addr)
1305 return pc;
1306
1307 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1308 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1309 Otherwise, this sequence cannot be for stack protector. */
1310 if (stack_chk_guard.minsym == NULL
1311 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1312 return pc;
1313
1314 if (is_thumb)
1315 {
1316 unsigned int destreg;
1317 unsigned short insn
1318 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1319
1320 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1321 if ((insn & 0xf800) != 0x6800)
1322 return pc;
1323 if (bits (insn, 3, 5) != basereg)
1324 return pc;
1325 destreg = bits (insn, 0, 2);
1326
1327 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1328 byte_order_for_code);
1329 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1330 if ((insn & 0xf800) != 0x6000)
1331 return pc;
1332 if (destreg != bits (insn, 0, 2))
1333 return pc;
1334 }
1335 else
1336 {
1337 unsigned int destreg;
1338 unsigned int insn
1339 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1340
1341 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1342 if ((insn & 0x0e500000) != 0x04100000)
1343 return pc;
1344 if (bits (insn, 16, 19) != basereg)
1345 return pc;
1346 destreg = bits (insn, 12, 15);
1347 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1348 insn = read_memory_unsigned_integer (pc + offset + 4,
1349 4, byte_order_for_code);
1350 if ((insn & 0x0e500000) != 0x04000000)
1351 return pc;
1352 if (bits (insn, 12, 15) != destreg)
1353 return pc;
1354 }
1355 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1356 on arm. */
1357 if (is_thumb)
1358 return pc + offset + 4;
1359 else
1360 return pc + offset + 8;
1361 }
1362
1363 /* Advance the PC across any function entry prologue instructions to
1364 reach some "real" code.
1365
1366 The APCS (ARM Procedure Call Standard) defines the following
1367 prologue:
1368
1369 mov ip, sp
1370 [stmfd sp!, {a1,a2,a3,a4}]
1371 stmfd sp!, {...,fp,ip,lr,pc}
1372 [stfe f7, [sp, #-12]!]
1373 [stfe f6, [sp, #-12]!]
1374 [stfe f5, [sp, #-12]!]
1375 [stfe f4, [sp, #-12]!]
1376 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1377
1378 static CORE_ADDR
1379 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1380 {
1381 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1382 unsigned long inst;
1383 CORE_ADDR func_addr, limit_pc;
1384
1385 /* See if we can determine the end of the prologue via the symbol table.
1386 If so, then return either PC, or the PC after the prologue, whichever
1387 is greater. */
1388 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1389 {
1390 CORE_ADDR post_prologue_pc
1391 = skip_prologue_using_sal (gdbarch, func_addr);
1392 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1393
1394 if (post_prologue_pc)
1395 post_prologue_pc
1396 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1397
1398
1399 /* GCC always emits a line note before the prologue and another
1400 one after, even if the two are at the same address or on the
1401 same line. Take advantage of this so that we do not need to
1402 know every instruction that might appear in the prologue. We
1403 will have producer information for most binaries; if it is
1404 missing (e.g. for -gstabs), assuming the GNU tools. */
1405 if (post_prologue_pc
1406 && (cust == NULL
1407 || COMPUNIT_PRODUCER (cust) == NULL
1408 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1409 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1410 return post_prologue_pc;
1411
1412 if (post_prologue_pc != 0)
1413 {
1414 CORE_ADDR analyzed_limit;
1415
1416 /* For non-GCC compilers, make sure the entire line is an
1417 acceptable prologue; GDB will round this function's
1418 return value up to the end of the following line so we
1419 can not skip just part of a line (and we do not want to).
1420
1421 RealView does not treat the prologue specially, but does
1422 associate prologue code with the opening brace; so this
1423 lets us skip the first line if we think it is the opening
1424 brace. */
1425 if (arm_pc_is_thumb (gdbarch, func_addr))
1426 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1427 post_prologue_pc, NULL);
1428 else
1429 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1430 post_prologue_pc, NULL);
1431
1432 if (analyzed_limit != post_prologue_pc)
1433 return func_addr;
1434
1435 return post_prologue_pc;
1436 }
1437 }
1438
1439 /* Can't determine prologue from the symbol table, need to examine
1440 instructions. */
1441
1442 /* Find an upper limit on the function prologue using the debug
1443 information. If the debug information could not be used to provide
1444 that bound, then use an arbitrary large number as the upper bound. */
1445 /* Like arm_scan_prologue, stop no later than pc + 64. */
1446 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1447 if (limit_pc == 0)
1448 limit_pc = pc + 64; /* Magic. */
1449
1450
1451 /* Check if this is Thumb code. */
1452 if (arm_pc_is_thumb (gdbarch, pc))
1453 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1454 else
1455 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1456 }
1457
1458 /* *INDENT-OFF* */
1459 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1460 This function decodes a Thumb function prologue to determine:
1461 1) the size of the stack frame
1462 2) which registers are saved on it
1463 3) the offsets of saved regs
1464 4) the offset from the stack pointer to the frame pointer
1465
1466 A typical Thumb function prologue would create this stack frame
1467 (offsets relative to FP)
1468 old SP -> 24 stack parameters
1469 20 LR
1470 16 R7
1471 R7 -> 0 local variables (16 bytes)
1472 SP -> -12 additional stack space (12 bytes)
1473 The frame size would thus be 36 bytes, and the frame offset would be
1474 12 bytes. The frame register is R7.
1475
1476 The comments for thumb_skip_prolog() describe the algorithm we use
1477 to detect the end of the prolog. */
1478 /* *INDENT-ON* */
1479
1480 static void
1481 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1482 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1483 {
1484 CORE_ADDR prologue_start;
1485 CORE_ADDR prologue_end;
1486
1487 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1488 &prologue_end))
1489 {
1490 /* See comment in arm_scan_prologue for an explanation of
1491 this heuristics. */
1492 if (prologue_end > prologue_start + 64)
1493 {
1494 prologue_end = prologue_start + 64;
1495 }
1496 }
1497 else
1498 /* We're in the boondocks: we have no idea where the start of the
1499 function is. */
1500 return;
1501
1502 prologue_end = min (prologue_end, prev_pc);
1503
1504 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1505 }
1506
1507 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1508
1509 static int
1510 arm_instruction_changes_pc (uint32_t this_instr)
1511 {
1512 if (bits (this_instr, 28, 31) == INST_NV)
1513 /* Unconditional instructions. */
1514 switch (bits (this_instr, 24, 27))
1515 {
1516 case 0xa:
1517 case 0xb:
1518 /* Branch with Link and change to Thumb. */
1519 return 1;
1520 case 0xc:
1521 case 0xd:
1522 case 0xe:
1523 /* Coprocessor register transfer. */
1524 if (bits (this_instr, 12, 15) == 15)
1525 error (_("Invalid update to pc in instruction"));
1526 return 0;
1527 default:
1528 return 0;
1529 }
1530 else
1531 switch (bits (this_instr, 25, 27))
1532 {
1533 case 0x0:
1534 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1535 {
1536 /* Multiplies and extra load/stores. */
1537 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1538 /* Neither multiplies nor extension load/stores are allowed
1539 to modify PC. */
1540 return 0;
1541
1542 /* Otherwise, miscellaneous instructions. */
1543
1544 /* BX <reg>, BXJ <reg>, BLX <reg> */
1545 if (bits (this_instr, 4, 27) == 0x12fff1
1546 || bits (this_instr, 4, 27) == 0x12fff2
1547 || bits (this_instr, 4, 27) == 0x12fff3)
1548 return 1;
1549
1550 /* Other miscellaneous instructions are unpredictable if they
1551 modify PC. */
1552 return 0;
1553 }
1554 /* Data processing instruction. Fall through. */
1555
1556 case 0x1:
1557 if (bits (this_instr, 12, 15) == 15)
1558 return 1;
1559 else
1560 return 0;
1561
1562 case 0x2:
1563 case 0x3:
1564 /* Media instructions and architecturally undefined instructions. */
1565 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1566 return 0;
1567
1568 /* Stores. */
1569 if (bit (this_instr, 20) == 0)
1570 return 0;
1571
1572 /* Loads. */
1573 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1574 return 1;
1575 else
1576 return 0;
1577
1578 case 0x4:
1579 /* Load/store multiple. */
1580 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1581 return 1;
1582 else
1583 return 0;
1584
1585 case 0x5:
1586 /* Branch and branch with link. */
1587 return 1;
1588
1589 case 0x6:
1590 case 0x7:
1591 /* Coprocessor transfers or SWIs can not affect PC. */
1592 return 0;
1593
1594 default:
1595 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1596 }
1597 }
1598
1599 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1600 otherwise. */
1601
1602 static int
1603 arm_instruction_restores_sp (unsigned int insn)
1604 {
1605 if (bits (insn, 28, 31) != INST_NV)
1606 {
1607 if ((insn & 0x0df0f000) == 0x0080d000
1608 /* ADD SP (register or immediate). */
1609 || (insn & 0x0df0f000) == 0x0040d000
1610 /* SUB SP (register or immediate). */
1611 || (insn & 0x0ffffff0) == 0x01a0d000
1612 /* MOV SP. */
1613 || (insn & 0x0fff0000) == 0x08bd0000
1614 /* POP (LDMIA). */
1615 || (insn & 0x0fff0000) == 0x049d0000)
1616 /* POP of a single register. */
1617 return 1;
1618 }
1619
1620 return 0;
1621 }
1622
1623 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1624 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1625 fill it in. Return the first address not recognized as a prologue
1626 instruction.
1627
1628 We recognize all the instructions typically found in ARM prologues,
1629 plus harmless instructions which can be skipped (either for analysis
1630 purposes, or a more restrictive set that can be skipped when finding
1631 the end of the prologue). */
1632
1633 static CORE_ADDR
1634 arm_analyze_prologue (struct gdbarch *gdbarch,
1635 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1636 struct arm_prologue_cache *cache)
1637 {
1638 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1639 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1640 int regno;
1641 CORE_ADDR offset, current_pc;
1642 pv_t regs[ARM_FPS_REGNUM];
1643 struct pv_area *stack;
1644 struct cleanup *back_to;
1645 CORE_ADDR unrecognized_pc = 0;
1646
1647 /* Search the prologue looking for instructions that set up the
1648 frame pointer, adjust the stack pointer, and save registers.
1649
1650 Be careful, however, and if it doesn't look like a prologue,
1651 don't try to scan it. If, for instance, a frameless function
1652 begins with stmfd sp!, then we will tell ourselves there is
1653 a frame, which will confuse stack traceback, as well as "finish"
1654 and other operations that rely on a knowledge of the stack
1655 traceback. */
1656
1657 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1658 regs[regno] = pv_register (regno, 0);
1659 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1660 back_to = make_cleanup_free_pv_area (stack);
1661
1662 for (current_pc = prologue_start;
1663 current_pc < prologue_end;
1664 current_pc += 4)
1665 {
1666 unsigned int insn
1667 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1668
1669 if (insn == 0xe1a0c00d) /* mov ip, sp */
1670 {
1671 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1672 continue;
1673 }
1674 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 {
1677 unsigned imm = insn & 0xff; /* immediate value */
1678 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1679 int rd = bits (insn, 12, 15);
1680 imm = (imm >> rot) | (imm << (32 - rot));
1681 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1682 continue;
1683 }
1684 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1686 {
1687 unsigned imm = insn & 0xff; /* immediate value */
1688 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1689 int rd = bits (insn, 12, 15);
1690 imm = (imm >> rot) | (imm << (32 - rot));
1691 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1692 continue;
1693 }
1694 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1695 [sp, #-4]! */
1696 {
1697 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1698 break;
1699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1700 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1701 regs[bits (insn, 12, 15)]);
1702 continue;
1703 }
1704 else if ((insn & 0xffff0000) == 0xe92d0000)
1705 /* stmfd sp!, {..., fp, ip, lr, pc}
1706 or
1707 stmfd sp!, {a1, a2, a3, a4} */
1708 {
1709 int mask = insn & 0xffff;
1710
1711 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1712 break;
1713
1714 /* Calculate offsets of saved registers. */
1715 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1716 if (mask & (1 << regno))
1717 {
1718 regs[ARM_SP_REGNUM]
1719 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1720 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1721 }
1722 }
1723 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1724 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1725 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1726 {
1727 /* No need to add this to saved_regs -- it's just an arg reg. */
1728 continue;
1729 }
1730 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1731 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1732 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1733 {
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1735 continue;
1736 }
1737 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1738 { registers } */
1739 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1740 {
1741 /* No need to add this to saved_regs -- it's just arg regs. */
1742 continue;
1743 }
1744 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1745 {
1746 unsigned imm = insn & 0xff; /* immediate value */
1747 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1748 imm = (imm >> rot) | (imm << (32 - rot));
1749 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1750 }
1751 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1752 {
1753 unsigned imm = insn & 0xff; /* immediate value */
1754 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1755 imm = (imm >> rot) | (imm << (32 - rot));
1756 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1757 }
1758 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1759 [sp, -#c]! */
1760 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1761 {
1762 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1763 break;
1764
1765 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1766 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1767 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1768 }
1769 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1770 [sp!] */
1771 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1772 {
1773 int n_saved_fp_regs;
1774 unsigned int fp_start_reg, fp_bound_reg;
1775
1776 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1777 break;
1778
1779 if ((insn & 0x800) == 0x800) /* N0 is set */
1780 {
1781 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1782 n_saved_fp_regs = 3;
1783 else
1784 n_saved_fp_regs = 1;
1785 }
1786 else
1787 {
1788 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs = 2;
1790 else
1791 n_saved_fp_regs = 4;
1792 }
1793
1794 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1795 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1796 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1797 {
1798 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1799 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1800 regs[fp_start_reg++]);
1801 }
1802 }
1803 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1804 {
1805 /* Allow some special function calls when skipping the
1806 prologue; GCC generates these before storing arguments to
1807 the stack. */
1808 CORE_ADDR dest = BranchDest (current_pc, insn);
1809
1810 if (skip_prologue_function (gdbarch, dest, 0))
1811 continue;
1812 else
1813 break;
1814 }
1815 else if ((insn & 0xf0000000) != 0xe0000000)
1816 break; /* Condition not true, exit early. */
1817 else if (arm_instruction_changes_pc (insn))
1818 /* Don't scan past anything that might change control flow. */
1819 break;
1820 else if (arm_instruction_restores_sp (insn))
1821 {
1822 /* Don't scan past the epilogue. */
1823 break;
1824 }
1825 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1826 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1827 /* Ignore block loads from the stack, potentially copying
1828 parameters from memory. */
1829 continue;
1830 else if ((insn & 0xfc500000) == 0xe4100000
1831 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1832 /* Similarly ignore single loads from the stack. */
1833 continue;
1834 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1835 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1836 register instead of the stack. */
1837 continue;
1838 else
1839 {
1840 /* The optimizer might shove anything into the prologue, if
1841 we build up cache (cache != NULL) from scanning prologue,
1842 we just skip what we don't recognize and scan further to
1843 make cache as complete as possible. However, if we skip
1844 prologue, we'll stop immediately on unrecognized
1845 instruction. */
1846 unrecognized_pc = current_pc;
1847 if (cache != NULL)
1848 continue;
1849 else
1850 break;
1851 }
1852 }
1853
1854 if (unrecognized_pc == 0)
1855 unrecognized_pc = current_pc;
1856
1857 if (cache)
1858 {
1859 int framereg, framesize;
1860
1861 /* The frame size is just the distance from the frame register
1862 to the original stack pointer. */
1863 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1864 {
1865 /* Frame pointer is fp. */
1866 framereg = ARM_FP_REGNUM;
1867 framesize = -regs[ARM_FP_REGNUM].k;
1868 }
1869 else
1870 {
1871 /* Try the stack pointer... this is a bit desperate. */
1872 framereg = ARM_SP_REGNUM;
1873 framesize = -regs[ARM_SP_REGNUM].k;
1874 }
1875
1876 cache->framereg = framereg;
1877 cache->framesize = framesize;
1878
1879 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1880 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1881 cache->saved_regs[regno].addr = offset;
1882 }
1883
1884 if (arm_debug)
1885 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1886 paddress (gdbarch, unrecognized_pc));
1887
1888 do_cleanups (back_to);
1889 return unrecognized_pc;
1890 }
1891
1892 static void
1893 arm_scan_prologue (struct frame_info *this_frame,
1894 struct arm_prologue_cache *cache)
1895 {
1896 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1897 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1898 int regno;
1899 CORE_ADDR prologue_start, prologue_end, current_pc;
1900 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1901 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1902 pv_t regs[ARM_FPS_REGNUM];
1903 struct pv_area *stack;
1904 struct cleanup *back_to;
1905 CORE_ADDR offset;
1906
1907 /* Assume there is no frame until proven otherwise. */
1908 cache->framereg = ARM_SP_REGNUM;
1909 cache->framesize = 0;
1910
1911 /* Check for Thumb prologue. */
1912 if (arm_frame_is_thumb (this_frame))
1913 {
1914 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1915 return;
1916 }
1917
1918 /* Find the function prologue. If we can't find the function in
1919 the symbol table, peek in the stack frame to find the PC. */
1920 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1921 &prologue_end))
1922 {
1923 /* One way to find the end of the prologue (which works well
1924 for unoptimized code) is to do the following:
1925
1926 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1927
1928 if (sal.line == 0)
1929 prologue_end = prev_pc;
1930 else if (sal.end < prologue_end)
1931 prologue_end = sal.end;
1932
1933 This mechanism is very accurate so long as the optimizer
1934 doesn't move any instructions from the function body into the
1935 prologue. If this happens, sal.end will be the last
1936 instruction in the first hunk of prologue code just before
1937 the first instruction that the scheduler has moved from
1938 the body to the prologue.
1939
1940 In order to make sure that we scan all of the prologue
1941 instructions, we use a slightly less accurate mechanism which
1942 may scan more than necessary. To help compensate for this
1943 lack of accuracy, the prologue scanning loop below contains
1944 several clauses which'll cause the loop to terminate early if
1945 an implausible prologue instruction is encountered.
1946
1947 The expression
1948
1949 prologue_start + 64
1950
1951 is a suitable endpoint since it accounts for the largest
1952 possible prologue plus up to five instructions inserted by
1953 the scheduler. */
1954
1955 if (prologue_end > prologue_start + 64)
1956 {
1957 prologue_end = prologue_start + 64; /* See above. */
1958 }
1959 }
1960 else
1961 {
1962 /* We have no symbol information. Our only option is to assume this
1963 function has a standard stack frame and the normal frame register.
1964 Then, we can find the value of our frame pointer on entrance to
1965 the callee (or at the present moment if this is the innermost frame).
1966 The value stored there should be the address of the stmfd + 8. */
1967 CORE_ADDR frame_loc;
1968 LONGEST return_value;
1969
1970 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1971 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1972 return;
1973 else
1974 {
1975 prologue_start = gdbarch_addr_bits_remove
1976 (gdbarch, return_value) - 8;
1977 prologue_end = prologue_start + 64; /* See above. */
1978 }
1979 }
1980
1981 if (prev_pc < prologue_end)
1982 prologue_end = prev_pc;
1983
1984 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1985 }
1986
1987 static struct arm_prologue_cache *
1988 arm_make_prologue_cache (struct frame_info *this_frame)
1989 {
1990 int reg;
1991 struct arm_prologue_cache *cache;
1992 CORE_ADDR unwound_fp;
1993
1994 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1995 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1996
1997 arm_scan_prologue (this_frame, cache);
1998
1999 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2000 if (unwound_fp == 0)
2001 return cache;
2002
2003 cache->prev_sp = unwound_fp + cache->framesize;
2004
2005 /* Calculate actual addresses of saved registers using offsets
2006 determined by arm_scan_prologue. */
2007 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2008 if (trad_frame_addr_p (cache->saved_regs, reg))
2009 cache->saved_regs[reg].addr += cache->prev_sp;
2010
2011 return cache;
2012 }
2013
2014 /* Implementation of the stop_reason hook for arm_prologue frames. */
2015
2016 static enum unwind_stop_reason
2017 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2018 void **this_cache)
2019 {
2020 struct arm_prologue_cache *cache;
2021 CORE_ADDR pc;
2022
2023 if (*this_cache == NULL)
2024 *this_cache = arm_make_prologue_cache (this_frame);
2025 cache = (struct arm_prologue_cache *) *this_cache;
2026
2027 /* This is meant to halt the backtrace at "_start". */
2028 pc = get_frame_pc (this_frame);
2029 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2030 return UNWIND_OUTERMOST;
2031
2032 /* If we've hit a wall, stop. */
2033 if (cache->prev_sp == 0)
2034 return UNWIND_OUTERMOST;
2035
2036 return UNWIND_NO_REASON;
2037 }
2038
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2041
2042 static void
2043 arm_prologue_this_id (struct frame_info *this_frame,
2044 void **this_cache,
2045 struct frame_id *this_id)
2046 {
2047 struct arm_prologue_cache *cache;
2048 struct frame_id id;
2049 CORE_ADDR pc, func;
2050
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = (struct arm_prologue_cache *) *this_cache;
2054
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 pc = get_frame_pc (this_frame);
2059 func = get_frame_func (this_frame);
2060 if (!func)
2061 func = pc;
2062
2063 id = frame_id_build (cache->prev_sp, func);
2064 *this_id = id;
2065 }
2066
2067 static struct value *
2068 arm_prologue_prev_register (struct frame_info *this_frame,
2069 void **this_cache,
2070 int prev_regnum)
2071 {
2072 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2073 struct arm_prologue_cache *cache;
2074
2075 if (*this_cache == NULL)
2076 *this_cache = arm_make_prologue_cache (this_frame);
2077 cache = (struct arm_prologue_cache *) *this_cache;
2078
2079 /* If we are asked to unwind the PC, then we need to return the LR
2080 instead. The prologue may save PC, but it will point into this
2081 frame's prologue, not the next frame's resume location. Also
2082 strip the saved T bit. A valid LR may have the low bit set, but
2083 a valid PC never does. */
2084 if (prev_regnum == ARM_PC_REGNUM)
2085 {
2086 CORE_ADDR lr;
2087
2088 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2089 return frame_unwind_got_constant (this_frame, prev_regnum,
2090 arm_addr_bits_remove (gdbarch, lr));
2091 }
2092
2093 /* SP is generally not saved to the stack, but this frame is
2094 identified by the next frame's stack pointer at the time of the call.
2095 The value was already reconstructed into PREV_SP. */
2096 if (prev_regnum == ARM_SP_REGNUM)
2097 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2098
2099 /* The CPSR may have been changed by the call instruction and by the
2100 called function. The only bit we can reconstruct is the T bit,
2101 by checking the low bit of LR as of the call. This is a reliable
2102 indicator of Thumb-ness except for some ARM v4T pre-interworking
2103 Thumb code, which could get away with a clear low bit as long as
2104 the called function did not use bx. Guess that all other
2105 bits are unchanged; the condition flags are presumably lost,
2106 but the processor status is likely valid. */
2107 if (prev_regnum == ARM_PS_REGNUM)
2108 {
2109 CORE_ADDR lr, cpsr;
2110 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2111
2112 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2113 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2114 if (IS_THUMB_ADDR (lr))
2115 cpsr |= t_bit;
2116 else
2117 cpsr &= ~t_bit;
2118 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2119 }
2120
2121 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2122 prev_regnum);
2123 }
2124
2125 struct frame_unwind arm_prologue_unwind = {
2126 NORMAL_FRAME,
2127 arm_prologue_unwind_stop_reason,
2128 arm_prologue_this_id,
2129 arm_prologue_prev_register,
2130 NULL,
2131 default_frame_sniffer
2132 };
2133
2134 /* Maintain a list of ARM exception table entries per objfile, similar to the
2135 list of mapping symbols. We only cache entries for standard ARM-defined
2136 personality routines; the cache will contain only the frame unwinding
2137 instructions associated with the entry (not the descriptors). */
2138
2139 static const struct objfile_data *arm_exidx_data_key;
2140
2141 struct arm_exidx_entry
2142 {
2143 bfd_vma addr;
2144 gdb_byte *entry;
2145 };
2146 typedef struct arm_exidx_entry arm_exidx_entry_s;
2147 DEF_VEC_O(arm_exidx_entry_s);
2148
2149 struct arm_exidx_data
2150 {
2151 VEC(arm_exidx_entry_s) **section_maps;
2152 };
2153
2154 static void
2155 arm_exidx_data_free (struct objfile *objfile, void *arg)
2156 {
2157 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2158 unsigned int i;
2159
2160 for (i = 0; i < objfile->obfd->section_count; i++)
2161 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2162 }
2163
2164 static inline int
2165 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2166 const struct arm_exidx_entry *rhs)
2167 {
2168 return lhs->addr < rhs->addr;
2169 }
2170
2171 static struct obj_section *
2172 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2173 {
2174 struct obj_section *osect;
2175
2176 ALL_OBJFILE_OSECTIONS (objfile, osect)
2177 if (bfd_get_section_flags (objfile->obfd,
2178 osect->the_bfd_section) & SEC_ALLOC)
2179 {
2180 bfd_vma start, size;
2181 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2182 size = bfd_get_section_size (osect->the_bfd_section);
2183
2184 if (start <= vma && vma < start + size)
2185 return osect;
2186 }
2187
2188 return NULL;
2189 }
2190
2191 /* Parse contents of exception table and exception index sections
2192 of OBJFILE, and fill in the exception table entry cache.
2193
2194 For each entry that refers to a standard ARM-defined personality
2195 routine, extract the frame unwinding instructions (from either
2196 the index or the table section). The unwinding instructions
2197 are normalized by:
2198 - extracting them from the rest of the table data
2199 - converting to host endianness
2200 - appending the implicit 0xb0 ("Finish") code
2201
2202 The extracted and normalized instructions are stored for later
2203 retrieval by the arm_find_exidx_entry routine. */
2204
2205 static void
2206 arm_exidx_new_objfile (struct objfile *objfile)
2207 {
2208 struct cleanup *cleanups;
2209 struct arm_exidx_data *data;
2210 asection *exidx, *extab;
2211 bfd_vma exidx_vma = 0, extab_vma = 0;
2212 bfd_size_type exidx_size = 0, extab_size = 0;
2213 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2214 LONGEST i;
2215
2216 /* If we've already touched this file, do nothing. */
2217 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2218 return;
2219 cleanups = make_cleanup (null_cleanup, NULL);
2220
2221 /* Read contents of exception table and index. */
2222 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2223 if (exidx)
2224 {
2225 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2226 exidx_size = bfd_get_section_size (exidx);
2227 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2228 make_cleanup (xfree, exidx_data);
2229
2230 if (!bfd_get_section_contents (objfile->obfd, exidx,
2231 exidx_data, 0, exidx_size))
2232 {
2233 do_cleanups (cleanups);
2234 return;
2235 }
2236 }
2237
2238 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2239 if (extab)
2240 {
2241 extab_vma = bfd_section_vma (objfile->obfd, extab);
2242 extab_size = bfd_get_section_size (extab);
2243 extab_data = (gdb_byte *) xmalloc (extab_size);
2244 make_cleanup (xfree, extab_data);
2245
2246 if (!bfd_get_section_contents (objfile->obfd, extab,
2247 extab_data, 0, extab_size))
2248 {
2249 do_cleanups (cleanups);
2250 return;
2251 }
2252 }
2253
2254 /* Allocate exception table data structure. */
2255 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2256 set_objfile_data (objfile, arm_exidx_data_key, data);
2257 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2258 objfile->obfd->section_count,
2259 VEC(arm_exidx_entry_s) *);
2260
2261 /* Fill in exception table. */
2262 for (i = 0; i < exidx_size / 8; i++)
2263 {
2264 struct arm_exidx_entry new_exidx_entry;
2265 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2266 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2267 bfd_vma addr = 0, word = 0;
2268 int n_bytes = 0, n_words = 0;
2269 struct obj_section *sec;
2270 gdb_byte *entry = NULL;
2271
2272 /* Extract address of start of function. */
2273 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2274 idx += exidx_vma + i * 8;
2275
2276 /* Find section containing function and compute section offset. */
2277 sec = arm_obj_section_from_vma (objfile, idx);
2278 if (sec == NULL)
2279 continue;
2280 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2281
2282 /* Determine address of exception table entry. */
2283 if (val == 1)
2284 {
2285 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2286 }
2287 else if ((val & 0xff000000) == 0x80000000)
2288 {
2289 /* Exception table entry embedded in .ARM.exidx
2290 -- must be short form. */
2291 word = val;
2292 n_bytes = 3;
2293 }
2294 else if (!(val & 0x80000000))
2295 {
2296 /* Exception table entry in .ARM.extab. */
2297 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2298 addr += exidx_vma + i * 8 + 4;
2299
2300 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2301 {
2302 word = bfd_h_get_32 (objfile->obfd,
2303 extab_data + addr - extab_vma);
2304 addr += 4;
2305
2306 if ((word & 0xff000000) == 0x80000000)
2307 {
2308 /* Short form. */
2309 n_bytes = 3;
2310 }
2311 else if ((word & 0xff000000) == 0x81000000
2312 || (word & 0xff000000) == 0x82000000)
2313 {
2314 /* Long form. */
2315 n_bytes = 2;
2316 n_words = ((word >> 16) & 0xff);
2317 }
2318 else if (!(word & 0x80000000))
2319 {
2320 bfd_vma pers;
2321 struct obj_section *pers_sec;
2322 int gnu_personality = 0;
2323
2324 /* Custom personality routine. */
2325 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2326 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2327
2328 /* Check whether we've got one of the variants of the
2329 GNU personality routines. */
2330 pers_sec = arm_obj_section_from_vma (objfile, pers);
2331 if (pers_sec)
2332 {
2333 static const char *personality[] =
2334 {
2335 "__gcc_personality_v0",
2336 "__gxx_personality_v0",
2337 "__gcj_personality_v0",
2338 "__gnu_objc_personality_v0",
2339 NULL
2340 };
2341
2342 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2343 int k;
2344
2345 for (k = 0; personality[k]; k++)
2346 if (lookup_minimal_symbol_by_pc_name
2347 (pc, personality[k], objfile))
2348 {
2349 gnu_personality = 1;
2350 break;
2351 }
2352 }
2353
2354 /* If so, the next word contains a word count in the high
2355 byte, followed by the same unwind instructions as the
2356 pre-defined forms. */
2357 if (gnu_personality
2358 && addr + 4 <= extab_vma + extab_size)
2359 {
2360 word = bfd_h_get_32 (objfile->obfd,
2361 extab_data + addr - extab_vma);
2362 addr += 4;
2363 n_bytes = 3;
2364 n_words = ((word >> 24) & 0xff);
2365 }
2366 }
2367 }
2368 }
2369
2370 /* Sanity check address. */
2371 if (n_words)
2372 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2373 n_words = n_bytes = 0;
2374
2375 /* The unwind instructions reside in WORD (only the N_BYTES least
2376 significant bytes are valid), followed by N_WORDS words in the
2377 extab section starting at ADDR. */
2378 if (n_bytes || n_words)
2379 {
2380 gdb_byte *p = entry
2381 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2382 n_bytes + n_words * 4 + 1);
2383
2384 while (n_bytes--)
2385 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2386
2387 while (n_words--)
2388 {
2389 word = bfd_h_get_32 (objfile->obfd,
2390 extab_data + addr - extab_vma);
2391 addr += 4;
2392
2393 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2394 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2395 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2396 *p++ = (gdb_byte) (word & 0xff);
2397 }
2398
2399 /* Implied "Finish" to terminate the list. */
2400 *p++ = 0xb0;
2401 }
2402
2403 /* Push entry onto vector. They are guaranteed to always
2404 appear in order of increasing addresses. */
2405 new_exidx_entry.addr = idx;
2406 new_exidx_entry.entry = entry;
2407 VEC_safe_push (arm_exidx_entry_s,
2408 data->section_maps[sec->the_bfd_section->index],
2409 &new_exidx_entry);
2410 }
2411
2412 do_cleanups (cleanups);
2413 }
2414
2415 /* Search for the exception table entry covering MEMADDR. If one is found,
2416 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2417 set *START to the start of the region covered by this entry. */
2418
2419 static gdb_byte *
2420 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2421 {
2422 struct obj_section *sec;
2423
2424 sec = find_pc_section (memaddr);
2425 if (sec != NULL)
2426 {
2427 struct arm_exidx_data *data;
2428 VEC(arm_exidx_entry_s) *map;
2429 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2430 unsigned int idx;
2431
2432 data = ((struct arm_exidx_data *)
2433 objfile_data (sec->objfile, arm_exidx_data_key));
2434 if (data != NULL)
2435 {
2436 map = data->section_maps[sec->the_bfd_section->index];
2437 if (!VEC_empty (arm_exidx_entry_s, map))
2438 {
2439 struct arm_exidx_entry *map_sym;
2440
2441 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2442 arm_compare_exidx_entries);
2443
2444 /* VEC_lower_bound finds the earliest ordered insertion
2445 point. If the following symbol starts at this exact
2446 address, we use that; otherwise, the preceding
2447 exception table entry covers this address. */
2448 if (idx < VEC_length (arm_exidx_entry_s, map))
2449 {
2450 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2451 if (map_sym->addr == map_key.addr)
2452 {
2453 if (start)
2454 *start = map_sym->addr + obj_section_addr (sec);
2455 return map_sym->entry;
2456 }
2457 }
2458
2459 if (idx > 0)
2460 {
2461 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2462 if (start)
2463 *start = map_sym->addr + obj_section_addr (sec);
2464 return map_sym->entry;
2465 }
2466 }
2467 }
2468 }
2469
2470 return NULL;
2471 }
2472
2473 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2474 instruction list from the ARM exception table entry ENTRY, allocate and
2475 return a prologue cache structure describing how to unwind this frame.
2476
2477 Return NULL if the unwinding instruction list contains a "spare",
2478 "reserved" or "refuse to unwind" instruction as defined in section
2479 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2480 for the ARM Architecture" document. */
2481
2482 static struct arm_prologue_cache *
2483 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2484 {
2485 CORE_ADDR vsp = 0;
2486 int vsp_valid = 0;
2487
2488 struct arm_prologue_cache *cache;
2489 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2490 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2491
2492 for (;;)
2493 {
2494 gdb_byte insn;
2495
2496 /* Whenever we reload SP, we actually have to retrieve its
2497 actual value in the current frame. */
2498 if (!vsp_valid)
2499 {
2500 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2501 {
2502 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2503 vsp = get_frame_register_unsigned (this_frame, reg);
2504 }
2505 else
2506 {
2507 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2508 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2509 }
2510
2511 vsp_valid = 1;
2512 }
2513
2514 /* Decode next unwind instruction. */
2515 insn = *entry++;
2516
2517 if ((insn & 0xc0) == 0)
2518 {
2519 int offset = insn & 0x3f;
2520 vsp += (offset << 2) + 4;
2521 }
2522 else if ((insn & 0xc0) == 0x40)
2523 {
2524 int offset = insn & 0x3f;
2525 vsp -= (offset << 2) + 4;
2526 }
2527 else if ((insn & 0xf0) == 0x80)
2528 {
2529 int mask = ((insn & 0xf) << 8) | *entry++;
2530 int i;
2531
2532 /* The special case of an all-zero mask identifies
2533 "Refuse to unwind". We return NULL to fall back
2534 to the prologue analyzer. */
2535 if (mask == 0)
2536 return NULL;
2537
2538 /* Pop registers r4..r15 under mask. */
2539 for (i = 0; i < 12; i++)
2540 if (mask & (1 << i))
2541 {
2542 cache->saved_regs[4 + i].addr = vsp;
2543 vsp += 4;
2544 }
2545
2546 /* Special-case popping SP -- we need to reload vsp. */
2547 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2548 vsp_valid = 0;
2549 }
2550 else if ((insn & 0xf0) == 0x90)
2551 {
2552 int reg = insn & 0xf;
2553
2554 /* Reserved cases. */
2555 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2556 return NULL;
2557
2558 /* Set SP from another register and mark VSP for reload. */
2559 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2560 vsp_valid = 0;
2561 }
2562 else if ((insn & 0xf0) == 0xa0)
2563 {
2564 int count = insn & 0x7;
2565 int pop_lr = (insn & 0x8) != 0;
2566 int i;
2567
2568 /* Pop r4..r[4+count]. */
2569 for (i = 0; i <= count; i++)
2570 {
2571 cache->saved_regs[4 + i].addr = vsp;
2572 vsp += 4;
2573 }
2574
2575 /* If indicated by flag, pop LR as well. */
2576 if (pop_lr)
2577 {
2578 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2579 vsp += 4;
2580 }
2581 }
2582 else if (insn == 0xb0)
2583 {
2584 /* We could only have updated PC by popping into it; if so, it
2585 will show up as address. Otherwise, copy LR into PC. */
2586 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2587 cache->saved_regs[ARM_PC_REGNUM]
2588 = cache->saved_regs[ARM_LR_REGNUM];
2589
2590 /* We're done. */
2591 break;
2592 }
2593 else if (insn == 0xb1)
2594 {
2595 int mask = *entry++;
2596 int i;
2597
2598 /* All-zero mask and mask >= 16 is "spare". */
2599 if (mask == 0 || mask >= 16)
2600 return NULL;
2601
2602 /* Pop r0..r3 under mask. */
2603 for (i = 0; i < 4; i++)
2604 if (mask & (1 << i))
2605 {
2606 cache->saved_regs[i].addr = vsp;
2607 vsp += 4;
2608 }
2609 }
2610 else if (insn == 0xb2)
2611 {
2612 ULONGEST offset = 0;
2613 unsigned shift = 0;
2614
2615 do
2616 {
2617 offset |= (*entry & 0x7f) << shift;
2618 shift += 7;
2619 }
2620 while (*entry++ & 0x80);
2621
2622 vsp += 0x204 + (offset << 2);
2623 }
2624 else if (insn == 0xb3)
2625 {
2626 int start = *entry >> 4;
2627 int count = (*entry++) & 0xf;
2628 int i;
2629
2630 /* Only registers D0..D15 are valid here. */
2631 if (start + count >= 16)
2632 return NULL;
2633
2634 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2635 for (i = 0; i <= count; i++)
2636 {
2637 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2638 vsp += 8;
2639 }
2640
2641 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2642 vsp += 4;
2643 }
2644 else if ((insn & 0xf8) == 0xb8)
2645 {
2646 int count = insn & 0x7;
2647 int i;
2648
2649 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2650 for (i = 0; i <= count; i++)
2651 {
2652 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2653 vsp += 8;
2654 }
2655
2656 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2657 vsp += 4;
2658 }
2659 else if (insn == 0xc6)
2660 {
2661 int start = *entry >> 4;
2662 int count = (*entry++) & 0xf;
2663 int i;
2664
2665 /* Only registers WR0..WR15 are valid. */
2666 if (start + count >= 16)
2667 return NULL;
2668
2669 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2670 for (i = 0; i <= count; i++)
2671 {
2672 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2673 vsp += 8;
2674 }
2675 }
2676 else if (insn == 0xc7)
2677 {
2678 int mask = *entry++;
2679 int i;
2680
2681 /* All-zero mask and mask >= 16 is "spare". */
2682 if (mask == 0 || mask >= 16)
2683 return NULL;
2684
2685 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2686 for (i = 0; i < 4; i++)
2687 if (mask & (1 << i))
2688 {
2689 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2690 vsp += 4;
2691 }
2692 }
2693 else if ((insn & 0xf8) == 0xc0)
2694 {
2695 int count = insn & 0x7;
2696 int i;
2697
2698 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2699 for (i = 0; i <= count; i++)
2700 {
2701 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2702 vsp += 8;
2703 }
2704 }
2705 else if (insn == 0xc8)
2706 {
2707 int start = *entry >> 4;
2708 int count = (*entry++) & 0xf;
2709 int i;
2710
2711 /* Only registers D0..D31 are valid. */
2712 if (start + count >= 16)
2713 return NULL;
2714
2715 /* Pop VFP double-precision registers
2716 D[16+start]..D[16+start+count]. */
2717 for (i = 0; i <= count; i++)
2718 {
2719 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2720 vsp += 8;
2721 }
2722 }
2723 else if (insn == 0xc9)
2724 {
2725 int start = *entry >> 4;
2726 int count = (*entry++) & 0xf;
2727 int i;
2728
2729 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2730 for (i = 0; i <= count; i++)
2731 {
2732 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2733 vsp += 8;
2734 }
2735 }
2736 else if ((insn & 0xf8) == 0xd0)
2737 {
2738 int count = insn & 0x7;
2739 int i;
2740
2741 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2742 for (i = 0; i <= count; i++)
2743 {
2744 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2745 vsp += 8;
2746 }
2747 }
2748 else
2749 {
2750 /* Everything else is "spare". */
2751 return NULL;
2752 }
2753 }
2754
2755 /* If we restore SP from a register, assume this was the frame register.
2756 Otherwise just fall back to SP as frame register. */
2757 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2758 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2759 else
2760 cache->framereg = ARM_SP_REGNUM;
2761
2762 /* Determine offset to previous frame. */
2763 cache->framesize
2764 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2765
2766 /* We already got the previous SP. */
2767 cache->prev_sp = vsp;
2768
2769 return cache;
2770 }
2771
2772 /* Unwinding via ARM exception table entries. Note that the sniffer
2773 already computes a filled-in prologue cache, which is then used
2774 with the same arm_prologue_this_id and arm_prologue_prev_register
2775 routines also used for prologue-parsing based unwinding. */
2776
2777 static int
2778 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2781 {
2782 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2783 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2784 CORE_ADDR addr_in_block, exidx_region, func_start;
2785 struct arm_prologue_cache *cache;
2786 gdb_byte *entry;
2787
2788 /* See if we have an ARM exception table entry covering this address. */
2789 addr_in_block = get_frame_address_in_block (this_frame);
2790 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2791 if (!entry)
2792 return 0;
2793
2794 /* The ARM exception table does not describe unwind information
2795 for arbitrary PC values, but is guaranteed to be correct only
2796 at call sites. We have to decide here whether we want to use
2797 ARM exception table information for this frame, or fall back
2798 to using prologue parsing. (Note that if we have DWARF CFI,
2799 this sniffer isn't even called -- CFI is always preferred.)
2800
2801 Before we make this decision, however, we check whether we
2802 actually have *symbol* information for the current frame.
2803 If not, prologue parsing would not work anyway, so we might
2804 as well use the exception table and hope for the best. */
2805 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2806 {
2807 int exc_valid = 0;
2808
2809 /* If the next frame is "normal", we are at a call site in this
2810 frame, so exception information is guaranteed to be valid. */
2811 if (get_next_frame (this_frame)
2812 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2813 exc_valid = 1;
2814
2815 /* We also assume exception information is valid if we're currently
2816 blocked in a system call. The system library is supposed to
2817 ensure this, so that e.g. pthread cancellation works. */
2818 if (arm_frame_is_thumb (this_frame))
2819 {
2820 LONGEST insn;
2821
2822 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2823 byte_order_for_code, &insn)
2824 && (insn & 0xff00) == 0xdf00 /* svc */)
2825 exc_valid = 1;
2826 }
2827 else
2828 {
2829 LONGEST insn;
2830
2831 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2832 byte_order_for_code, &insn)
2833 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2834 exc_valid = 1;
2835 }
2836
2837 /* Bail out if we don't know that exception information is valid. */
2838 if (!exc_valid)
2839 return 0;
2840
2841 /* The ARM exception index does not mark the *end* of the region
2842 covered by the entry, and some functions will not have any entry.
2843 To correctly recognize the end of the covered region, the linker
2844 should have inserted dummy records with a CANTUNWIND marker.
2845
2846 Unfortunately, current versions of GNU ld do not reliably do
2847 this, and thus we may have found an incorrect entry above.
2848 As a (temporary) sanity check, we only use the entry if it
2849 lies *within* the bounds of the function. Note that this check
2850 might reject perfectly valid entries that just happen to cover
2851 multiple functions; therefore this check ought to be removed
2852 once the linker is fixed. */
2853 if (func_start > exidx_region)
2854 return 0;
2855 }
2856
2857 /* Decode the list of unwinding instructions into a prologue cache.
2858 Note that this may fail due to e.g. a "refuse to unwind" code. */
2859 cache = arm_exidx_fill_cache (this_frame, entry);
2860 if (!cache)
2861 return 0;
2862
2863 *this_prologue_cache = cache;
2864 return 1;
2865 }
2866
2867 struct frame_unwind arm_exidx_unwind = {
2868 NORMAL_FRAME,
2869 default_frame_unwind_stop_reason,
2870 arm_prologue_this_id,
2871 arm_prologue_prev_register,
2872 NULL,
2873 arm_exidx_unwind_sniffer
2874 };
2875
2876 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2877 trampoline, return the target PC. Otherwise return 0.
2878
2879 void call0a (char c, short s, int i, long l) {}
2880
2881 int main (void)
2882 {
2883 (*pointer_to_call0a) (c, s, i, l);
2884 }
2885
2886 Instead of calling a stub library function _call_via_xx (xx is
2887 the register name), GCC may inline the trampoline in the object
2888 file as below (register r2 has the address of call0a).
2889
2890 .global main
2891 .type main, %function
2892 ...
2893 bl .L1
2894 ...
2895 .size main, .-main
2896
2897 .L1:
2898 bx r2
2899
2900 The trampoline 'bx r2' doesn't belong to main. */
2901
2902 static CORE_ADDR
2903 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2904 {
2905 /* The heuristics of recognizing such trampoline is that FRAME is
2906 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2907 if (arm_frame_is_thumb (frame))
2908 {
2909 gdb_byte buf[2];
2910
2911 if (target_read_memory (pc, buf, 2) == 0)
2912 {
2913 struct gdbarch *gdbarch = get_frame_arch (frame);
2914 enum bfd_endian byte_order_for_code
2915 = gdbarch_byte_order_for_code (gdbarch);
2916 uint16_t insn
2917 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2918
2919 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2920 {
2921 CORE_ADDR dest
2922 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2923
2924 /* Clear the LSB so that gdb core sets step-resume
2925 breakpoint at the right address. */
2926 return UNMAKE_THUMB_ADDR (dest);
2927 }
2928 }
2929 }
2930
2931 return 0;
2932 }
2933
2934 static struct arm_prologue_cache *
2935 arm_make_stub_cache (struct frame_info *this_frame)
2936 {
2937 struct arm_prologue_cache *cache;
2938
2939 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2940 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2941
2942 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2943
2944 return cache;
2945 }
2946
2947 /* Our frame ID for a stub frame is the current SP and LR. */
2948
2949 static void
2950 arm_stub_this_id (struct frame_info *this_frame,
2951 void **this_cache,
2952 struct frame_id *this_id)
2953 {
2954 struct arm_prologue_cache *cache;
2955
2956 if (*this_cache == NULL)
2957 *this_cache = arm_make_stub_cache (this_frame);
2958 cache = (struct arm_prologue_cache *) *this_cache;
2959
2960 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2961 }
2962
2963 static int
2964 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2965 struct frame_info *this_frame,
2966 void **this_prologue_cache)
2967 {
2968 CORE_ADDR addr_in_block;
2969 gdb_byte dummy[4];
2970 CORE_ADDR pc, start_addr;
2971 const char *name;
2972
2973 addr_in_block = get_frame_address_in_block (this_frame);
2974 pc = get_frame_pc (this_frame);
2975 if (in_plt_section (addr_in_block)
2976 /* We also use the stub winder if the target memory is unreadable
2977 to avoid having the prologue unwinder trying to read it. */
2978 || target_read_memory (pc, dummy, 4) != 0)
2979 return 1;
2980
2981 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2982 && arm_skip_bx_reg (this_frame, pc) != 0)
2983 return 1;
2984
2985 return 0;
2986 }
2987
2988 struct frame_unwind arm_stub_unwind = {
2989 NORMAL_FRAME,
2990 default_frame_unwind_stop_reason,
2991 arm_stub_this_id,
2992 arm_prologue_prev_register,
2993 NULL,
2994 arm_stub_unwind_sniffer
2995 };
2996
2997 /* Put here the code to store, into CACHE->saved_regs, the addresses
2998 of the saved registers of frame described by THIS_FRAME. CACHE is
2999 returned. */
3000
3001 static struct arm_prologue_cache *
3002 arm_m_exception_cache (struct frame_info *this_frame)
3003 {
3004 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3005 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3006 struct arm_prologue_cache *cache;
3007 CORE_ADDR unwound_sp;
3008 LONGEST xpsr;
3009
3010 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3011 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3012
3013 unwound_sp = get_frame_register_unsigned (this_frame,
3014 ARM_SP_REGNUM);
3015
3016 /* The hardware saves eight 32-bit words, comprising xPSR,
3017 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3018 "B1.5.6 Exception entry behavior" in
3019 "ARMv7-M Architecture Reference Manual". */
3020 cache->saved_regs[0].addr = unwound_sp;
3021 cache->saved_regs[1].addr = unwound_sp + 4;
3022 cache->saved_regs[2].addr = unwound_sp + 8;
3023 cache->saved_regs[3].addr = unwound_sp + 12;
3024 cache->saved_regs[12].addr = unwound_sp + 16;
3025 cache->saved_regs[14].addr = unwound_sp + 20;
3026 cache->saved_regs[15].addr = unwound_sp + 24;
3027 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3028
3029 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3030 aligner between the top of the 32-byte stack frame and the
3031 previous context's stack pointer. */
3032 cache->prev_sp = unwound_sp + 32;
3033 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3034 && (xpsr & (1 << 9)) != 0)
3035 cache->prev_sp += 4;
3036
3037 return cache;
3038 }
3039
3040 /* Implementation of function hook 'this_id' in
3041 'struct frame_uwnind'. */
3042
3043 static void
3044 arm_m_exception_this_id (struct frame_info *this_frame,
3045 void **this_cache,
3046 struct frame_id *this_id)
3047 {
3048 struct arm_prologue_cache *cache;
3049
3050 if (*this_cache == NULL)
3051 *this_cache = arm_m_exception_cache (this_frame);
3052 cache = (struct arm_prologue_cache *) *this_cache;
3053
3054 /* Our frame ID for a stub frame is the current SP and LR. */
3055 *this_id = frame_id_build (cache->prev_sp,
3056 get_frame_pc (this_frame));
3057 }
3058
3059 /* Implementation of function hook 'prev_register' in
3060 'struct frame_uwnind'. */
3061
3062 static struct value *
3063 arm_m_exception_prev_register (struct frame_info *this_frame,
3064 void **this_cache,
3065 int prev_regnum)
3066 {
3067 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3068 struct arm_prologue_cache *cache;
3069
3070 if (*this_cache == NULL)
3071 *this_cache = arm_m_exception_cache (this_frame);
3072 cache = (struct arm_prologue_cache *) *this_cache;
3073
3074 /* The value was already reconstructed into PREV_SP. */
3075 if (prev_regnum == ARM_SP_REGNUM)
3076 return frame_unwind_got_constant (this_frame, prev_regnum,
3077 cache->prev_sp);
3078
3079 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3080 prev_regnum);
3081 }
3082
3083 /* Implementation of function hook 'sniffer' in
3084 'struct frame_uwnind'. */
3085
3086 static int
3087 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3088 struct frame_info *this_frame,
3089 void **this_prologue_cache)
3090 {
3091 CORE_ADDR this_pc = get_frame_pc (this_frame);
3092
3093 /* No need to check is_m; this sniffer is only registered for
3094 M-profile architectures. */
3095
3096 /* Exception frames return to one of these magic PCs. Other values
3097 are not defined as of v7-M. See details in "B1.5.8 Exception
3098 return behavior" in "ARMv7-M Architecture Reference Manual". */
3099 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3100 || this_pc == 0xfffffffd)
3101 return 1;
3102
3103 return 0;
3104 }
3105
3106 /* Frame unwinder for M-profile exceptions. */
3107
3108 struct frame_unwind arm_m_exception_unwind =
3109 {
3110 SIGTRAMP_FRAME,
3111 default_frame_unwind_stop_reason,
3112 arm_m_exception_this_id,
3113 arm_m_exception_prev_register,
3114 NULL,
3115 arm_m_exception_unwind_sniffer
3116 };
3117
3118 static CORE_ADDR
3119 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3120 {
3121 struct arm_prologue_cache *cache;
3122
3123 if (*this_cache == NULL)
3124 *this_cache = arm_make_prologue_cache (this_frame);
3125 cache = (struct arm_prologue_cache *) *this_cache;
3126
3127 return cache->prev_sp - cache->framesize;
3128 }
3129
3130 struct frame_base arm_normal_base = {
3131 &arm_prologue_unwind,
3132 arm_normal_frame_base,
3133 arm_normal_frame_base,
3134 arm_normal_frame_base
3135 };
3136
3137 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3138 dummy frame. The frame ID's base needs to match the TOS value
3139 saved by save_dummy_frame_tos() and returned from
3140 arm_push_dummy_call, and the PC needs to match the dummy frame's
3141 breakpoint. */
3142
3143 static struct frame_id
3144 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3145 {
3146 return frame_id_build (get_frame_register_unsigned (this_frame,
3147 ARM_SP_REGNUM),
3148 get_frame_pc (this_frame));
3149 }
3150
3151 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3152 be used to construct the previous frame's ID, after looking up the
3153 containing function). */
3154
3155 static CORE_ADDR
3156 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3157 {
3158 CORE_ADDR pc;
3159 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3160 return arm_addr_bits_remove (gdbarch, pc);
3161 }
3162
3163 static CORE_ADDR
3164 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3165 {
3166 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3167 }
3168
3169 static struct value *
3170 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3171 int regnum)
3172 {
3173 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3174 CORE_ADDR lr, cpsr;
3175 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3176
3177 switch (regnum)
3178 {
3179 case ARM_PC_REGNUM:
3180 /* The PC is normally copied from the return column, which
3181 describes saves of LR. However, that version may have an
3182 extra bit set to indicate Thumb state. The bit is not
3183 part of the PC. */
3184 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3185 return frame_unwind_got_constant (this_frame, regnum,
3186 arm_addr_bits_remove (gdbarch, lr));
3187
3188 case ARM_PS_REGNUM:
3189 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3190 cpsr = get_frame_register_unsigned (this_frame, regnum);
3191 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3192 if (IS_THUMB_ADDR (lr))
3193 cpsr |= t_bit;
3194 else
3195 cpsr &= ~t_bit;
3196 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3197
3198 default:
3199 internal_error (__FILE__, __LINE__,
3200 _("Unexpected register %d"), regnum);
3201 }
3202 }
3203
3204 static void
3205 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3206 struct dwarf2_frame_state_reg *reg,
3207 struct frame_info *this_frame)
3208 {
3209 switch (regnum)
3210 {
3211 case ARM_PC_REGNUM:
3212 case ARM_PS_REGNUM:
3213 reg->how = DWARF2_FRAME_REG_FN;
3214 reg->loc.fn = arm_dwarf2_prev_register;
3215 break;
3216 case ARM_SP_REGNUM:
3217 reg->how = DWARF2_FRAME_REG_CFA;
3218 break;
3219 }
3220 }
3221
3222 /* Implement the stack_frame_destroyed_p gdbarch method. */
3223
3224 static int
3225 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3226 {
3227 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3228 unsigned int insn, insn2;
3229 int found_return = 0, found_stack_adjust = 0;
3230 CORE_ADDR func_start, func_end;
3231 CORE_ADDR scan_pc;
3232 gdb_byte buf[4];
3233
3234 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3235 return 0;
3236
3237 /* The epilogue is a sequence of instructions along the following lines:
3238
3239 - add stack frame size to SP or FP
3240 - [if frame pointer used] restore SP from FP
3241 - restore registers from SP [may include PC]
3242 - a return-type instruction [if PC wasn't already restored]
3243
3244 In a first pass, we scan forward from the current PC and verify the
3245 instructions we find as compatible with this sequence, ending in a
3246 return instruction.
3247
3248 However, this is not sufficient to distinguish indirect function calls
3249 within a function from indirect tail calls in the epilogue in some cases.
3250 Therefore, if we didn't already find any SP-changing instruction during
3251 forward scan, we add a backward scanning heuristic to ensure we actually
3252 are in the epilogue. */
3253
3254 scan_pc = pc;
3255 while (scan_pc < func_end && !found_return)
3256 {
3257 if (target_read_memory (scan_pc, buf, 2))
3258 break;
3259
3260 scan_pc += 2;
3261 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3262
3263 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3264 found_return = 1;
3265 else if (insn == 0x46f7) /* mov pc, lr */
3266 found_return = 1;
3267 else if (thumb_instruction_restores_sp (insn))
3268 {
3269 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3270 found_return = 1;
3271 }
3272 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3273 {
3274 if (target_read_memory (scan_pc, buf, 2))
3275 break;
3276
3277 scan_pc += 2;
3278 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3279
3280 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3281 {
3282 if (insn2 & 0x8000) /* <registers> include PC. */
3283 found_return = 1;
3284 }
3285 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3286 && (insn2 & 0x0fff) == 0x0b04)
3287 {
3288 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3289 found_return = 1;
3290 }
3291 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3292 && (insn2 & 0x0e00) == 0x0a00)
3293 ;
3294 else
3295 break;
3296 }
3297 else
3298 break;
3299 }
3300
3301 if (!found_return)
3302 return 0;
3303
3304 /* Since any instruction in the epilogue sequence, with the possible
3305 exception of return itself, updates the stack pointer, we need to
3306 scan backwards for at most one instruction. Try either a 16-bit or
3307 a 32-bit instruction. This is just a heuristic, so we do not worry
3308 too much about false positives. */
3309
3310 if (pc - 4 < func_start)
3311 return 0;
3312 if (target_read_memory (pc - 4, buf, 4))
3313 return 0;
3314
3315 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3316 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3317
3318 if (thumb_instruction_restores_sp (insn2))
3319 found_stack_adjust = 1;
3320 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3321 found_stack_adjust = 1;
3322 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3323 && (insn2 & 0x0fff) == 0x0b04)
3324 found_stack_adjust = 1;
3325 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3326 && (insn2 & 0x0e00) == 0x0a00)
3327 found_stack_adjust = 1;
3328
3329 return found_stack_adjust;
3330 }
3331
3332 /* Implement the stack_frame_destroyed_p gdbarch method. */
3333
3334 static int
3335 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3336 {
3337 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3338 unsigned int insn;
3339 int found_return;
3340 CORE_ADDR func_start, func_end;
3341
3342 if (arm_pc_is_thumb (gdbarch, pc))
3343 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3344
3345 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3346 return 0;
3347
3348 /* We are in the epilogue if the previous instruction was a stack
3349 adjustment and the next instruction is a possible return (bx, mov
3350 pc, or pop). We could have to scan backwards to find the stack
3351 adjustment, or forwards to find the return, but this is a decent
3352 approximation. First scan forwards. */
3353
3354 found_return = 0;
3355 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3356 if (bits (insn, 28, 31) != INST_NV)
3357 {
3358 if ((insn & 0x0ffffff0) == 0x012fff10)
3359 /* BX. */
3360 found_return = 1;
3361 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3362 /* MOV PC. */
3363 found_return = 1;
3364 else if ((insn & 0x0fff0000) == 0x08bd0000
3365 && (insn & 0x0000c000) != 0)
3366 /* POP (LDMIA), including PC or LR. */
3367 found_return = 1;
3368 }
3369
3370 if (!found_return)
3371 return 0;
3372
3373 /* Scan backwards. This is just a heuristic, so do not worry about
3374 false positives from mode changes. */
3375
3376 if (pc < func_start + 4)
3377 return 0;
3378
3379 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3380 if (arm_instruction_restores_sp (insn))
3381 return 1;
3382
3383 return 0;
3384 }
3385
3386
3387 /* When arguments must be pushed onto the stack, they go on in reverse
3388 order. The code below implements a FILO (stack) to do this. */
3389
3390 struct stack_item
3391 {
3392 int len;
3393 struct stack_item *prev;
3394 gdb_byte *data;
3395 };
3396
3397 static struct stack_item *
3398 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3399 {
3400 struct stack_item *si;
3401 si = XNEW (struct stack_item);
3402 si->data = (gdb_byte *) xmalloc (len);
3403 si->len = len;
3404 si->prev = prev;
3405 memcpy (si->data, contents, len);
3406 return si;
3407 }
3408
3409 static struct stack_item *
3410 pop_stack_item (struct stack_item *si)
3411 {
3412 struct stack_item *dead = si;
3413 si = si->prev;
3414 xfree (dead->data);
3415 xfree (dead);
3416 return si;
3417 }
3418
3419
3420 /* Return the alignment (in bytes) of the given type. */
3421
3422 static int
3423 arm_type_align (struct type *t)
3424 {
3425 int n;
3426 int align;
3427 int falign;
3428
3429 t = check_typedef (t);
3430 switch (TYPE_CODE (t))
3431 {
3432 default:
3433 /* Should never happen. */
3434 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3435 return 4;
3436
3437 case TYPE_CODE_PTR:
3438 case TYPE_CODE_ENUM:
3439 case TYPE_CODE_INT:
3440 case TYPE_CODE_FLT:
3441 case TYPE_CODE_SET:
3442 case TYPE_CODE_RANGE:
3443 case TYPE_CODE_REF:
3444 case TYPE_CODE_CHAR:
3445 case TYPE_CODE_BOOL:
3446 return TYPE_LENGTH (t);
3447
3448 case TYPE_CODE_ARRAY:
3449 case TYPE_CODE_COMPLEX:
3450 /* TODO: What about vector types? */
3451 return arm_type_align (TYPE_TARGET_TYPE (t));
3452
3453 case TYPE_CODE_STRUCT:
3454 case TYPE_CODE_UNION:
3455 align = 1;
3456 for (n = 0; n < TYPE_NFIELDS (t); n++)
3457 {
3458 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3459 if (falign > align)
3460 align = falign;
3461 }
3462 return align;
3463 }
3464 }
3465
3466 /* Possible base types for a candidate for passing and returning in
3467 VFP registers. */
3468
3469 enum arm_vfp_cprc_base_type
3470 {
3471 VFP_CPRC_UNKNOWN,
3472 VFP_CPRC_SINGLE,
3473 VFP_CPRC_DOUBLE,
3474 VFP_CPRC_VEC64,
3475 VFP_CPRC_VEC128
3476 };
3477
3478 /* The length of one element of base type B. */
3479
3480 static unsigned
3481 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3482 {
3483 switch (b)
3484 {
3485 case VFP_CPRC_SINGLE:
3486 return 4;
3487 case VFP_CPRC_DOUBLE:
3488 return 8;
3489 case VFP_CPRC_VEC64:
3490 return 8;
3491 case VFP_CPRC_VEC128:
3492 return 16;
3493 default:
3494 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3495 (int) b);
3496 }
3497 }
3498
3499 /* The character ('s', 'd' or 'q') for the type of VFP register used
3500 for passing base type B. */
3501
3502 static int
3503 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3504 {
3505 switch (b)
3506 {
3507 case VFP_CPRC_SINGLE:
3508 return 's';
3509 case VFP_CPRC_DOUBLE:
3510 return 'd';
3511 case VFP_CPRC_VEC64:
3512 return 'd';
3513 case VFP_CPRC_VEC128:
3514 return 'q';
3515 default:
3516 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3517 (int) b);
3518 }
3519 }
3520
3521 /* Determine whether T may be part of a candidate for passing and
3522 returning in VFP registers, ignoring the limit on the total number
3523 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3524 classification of the first valid component found; if it is not
3525 VFP_CPRC_UNKNOWN, all components must have the same classification
3526 as *BASE_TYPE. If it is found that T contains a type not permitted
3527 for passing and returning in VFP registers, a type differently
3528 classified from *BASE_TYPE, or two types differently classified
3529 from each other, return -1, otherwise return the total number of
3530 base-type elements found (possibly 0 in an empty structure or
3531 array). Vector types are not currently supported, matching the
3532 generic AAPCS support. */
3533
3534 static int
3535 arm_vfp_cprc_sub_candidate (struct type *t,
3536 enum arm_vfp_cprc_base_type *base_type)
3537 {
3538 t = check_typedef (t);
3539 switch (TYPE_CODE (t))
3540 {
3541 case TYPE_CODE_FLT:
3542 switch (TYPE_LENGTH (t))
3543 {
3544 case 4:
3545 if (*base_type == VFP_CPRC_UNKNOWN)
3546 *base_type = VFP_CPRC_SINGLE;
3547 else if (*base_type != VFP_CPRC_SINGLE)
3548 return -1;
3549 return 1;
3550
3551 case 8:
3552 if (*base_type == VFP_CPRC_UNKNOWN)
3553 *base_type = VFP_CPRC_DOUBLE;
3554 else if (*base_type != VFP_CPRC_DOUBLE)
3555 return -1;
3556 return 1;
3557
3558 default:
3559 return -1;
3560 }
3561 break;
3562
3563 case TYPE_CODE_COMPLEX:
3564 /* Arguments of complex T where T is one of the types float or
3565 double get treated as if they are implemented as:
3566
3567 struct complexT
3568 {
3569 T real;
3570 T imag;
3571 };
3572
3573 */
3574 switch (TYPE_LENGTH (t))
3575 {
3576 case 8:
3577 if (*base_type == VFP_CPRC_UNKNOWN)
3578 *base_type = VFP_CPRC_SINGLE;
3579 else if (*base_type != VFP_CPRC_SINGLE)
3580 return -1;
3581 return 2;
3582
3583 case 16:
3584 if (*base_type == VFP_CPRC_UNKNOWN)
3585 *base_type = VFP_CPRC_DOUBLE;
3586 else if (*base_type != VFP_CPRC_DOUBLE)
3587 return -1;
3588 return 2;
3589
3590 default:
3591 return -1;
3592 }
3593 break;
3594
3595 case TYPE_CODE_ARRAY:
3596 {
3597 int count;
3598 unsigned unitlen;
3599 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t), base_type);
3600 if (count == -1)
3601 return -1;
3602 if (TYPE_LENGTH (t) == 0)
3603 {
3604 gdb_assert (count == 0);
3605 return 0;
3606 }
3607 else if (count == 0)
3608 return -1;
3609 unitlen = arm_vfp_cprc_unit_length (*base_type);
3610 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3611 return TYPE_LENGTH (t) / unitlen;
3612 }
3613 break;
3614
3615 case TYPE_CODE_STRUCT:
3616 {
3617 int count = 0;
3618 unsigned unitlen;
3619 int i;
3620 for (i = 0; i < TYPE_NFIELDS (t); i++)
3621 {
3622 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3623 base_type);
3624 if (sub_count == -1)
3625 return -1;
3626 count += sub_count;
3627 }
3628 if (TYPE_LENGTH (t) == 0)
3629 {
3630 gdb_assert (count == 0);
3631 return 0;
3632 }
3633 else if (count == 0)
3634 return -1;
3635 unitlen = arm_vfp_cprc_unit_length (*base_type);
3636 if (TYPE_LENGTH (t) != unitlen * count)
3637 return -1;
3638 return count;
3639 }
3640
3641 case TYPE_CODE_UNION:
3642 {
3643 int count = 0;
3644 unsigned unitlen;
3645 int i;
3646 for (i = 0; i < TYPE_NFIELDS (t); i++)
3647 {
3648 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3649 base_type);
3650 if (sub_count == -1)
3651 return -1;
3652 count = (count > sub_count ? count : sub_count);
3653 }
3654 if (TYPE_LENGTH (t) == 0)
3655 {
3656 gdb_assert (count == 0);
3657 return 0;
3658 }
3659 else if (count == 0)
3660 return -1;
3661 unitlen = arm_vfp_cprc_unit_length (*base_type);
3662 if (TYPE_LENGTH (t) != unitlen * count)
3663 return -1;
3664 return count;
3665 }
3666
3667 default:
3668 break;
3669 }
3670
3671 return -1;
3672 }
3673
3674 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3675 if passed to or returned from a non-variadic function with the VFP
3676 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3677 *BASE_TYPE to the base type for T and *COUNT to the number of
3678 elements of that base type before returning. */
3679
3680 static int
3681 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3682 int *count)
3683 {
3684 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3685 int c = arm_vfp_cprc_sub_candidate (t, &b);
3686 if (c <= 0 || c > 4)
3687 return 0;
3688 *base_type = b;
3689 *count = c;
3690 return 1;
3691 }
3692
3693 /* Return 1 if the VFP ABI should be used for passing arguments to and
3694 returning values from a function of type FUNC_TYPE, 0
3695 otherwise. */
3696
3697 static int
3698 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3699 {
3700 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3701 /* Variadic functions always use the base ABI. Assume that functions
3702 without debug info are not variadic. */
3703 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3704 return 0;
3705 /* The VFP ABI is only supported as a variant of AAPCS. */
3706 if (tdep->arm_abi != ARM_ABI_AAPCS)
3707 return 0;
3708 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3709 }
3710
3711 /* We currently only support passing parameters in integer registers, which
3712 conforms with GCC's default model, and VFP argument passing following
3713 the VFP variant of AAPCS. Several other variants exist and
3714 we should probably support some of them based on the selected ABI. */
3715
3716 static CORE_ADDR
3717 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3718 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3719 struct value **args, CORE_ADDR sp, int struct_return,
3720 CORE_ADDR struct_addr)
3721 {
3722 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3723 int argnum;
3724 int argreg;
3725 int nstack;
3726 struct stack_item *si = NULL;
3727 int use_vfp_abi;
3728 struct type *ftype;
3729 unsigned vfp_regs_free = (1 << 16) - 1;
3730
3731 /* Determine the type of this function and whether the VFP ABI
3732 applies. */
3733 ftype = check_typedef (value_type (function));
3734 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3735 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3736 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3737
3738 /* Set the return address. For the ARM, the return breakpoint is
3739 always at BP_ADDR. */
3740 if (arm_pc_is_thumb (gdbarch, bp_addr))
3741 bp_addr |= 1;
3742 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3743
3744 /* Walk through the list of args and determine how large a temporary
3745 stack is required. Need to take care here as structs may be
3746 passed on the stack, and we have to push them. */
3747 nstack = 0;
3748
3749 argreg = ARM_A1_REGNUM;
3750 nstack = 0;
3751
3752 /* The struct_return pointer occupies the first parameter
3753 passing register. */
3754 if (struct_return)
3755 {
3756 if (arm_debug)
3757 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3758 gdbarch_register_name (gdbarch, argreg),
3759 paddress (gdbarch, struct_addr));
3760 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3761 argreg++;
3762 }
3763
3764 for (argnum = 0; argnum < nargs; argnum++)
3765 {
3766 int len;
3767 struct type *arg_type;
3768 struct type *target_type;
3769 enum type_code typecode;
3770 const bfd_byte *val;
3771 int align;
3772 enum arm_vfp_cprc_base_type vfp_base_type;
3773 int vfp_base_count;
3774 int may_use_core_reg = 1;
3775
3776 arg_type = check_typedef (value_type (args[argnum]));
3777 len = TYPE_LENGTH (arg_type);
3778 target_type = TYPE_TARGET_TYPE (arg_type);
3779 typecode = TYPE_CODE (arg_type);
3780 val = value_contents (args[argnum]);
3781
3782 align = arm_type_align (arg_type);
3783 /* Round alignment up to a whole number of words. */
3784 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3785 /* Different ABIs have different maximum alignments. */
3786 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3787 {
3788 /* The APCS ABI only requires word alignment. */
3789 align = INT_REGISTER_SIZE;
3790 }
3791 else
3792 {
3793 /* The AAPCS requires at most doubleword alignment. */
3794 if (align > INT_REGISTER_SIZE * 2)
3795 align = INT_REGISTER_SIZE * 2;
3796 }
3797
3798 if (use_vfp_abi
3799 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3800 &vfp_base_count))
3801 {
3802 int regno;
3803 int unit_length;
3804 int shift;
3805 unsigned mask;
3806
3807 /* Because this is a CPRC it cannot go in a core register or
3808 cause a core register to be skipped for alignment.
3809 Either it goes in VFP registers and the rest of this loop
3810 iteration is skipped for this argument, or it goes on the
3811 stack (and the stack alignment code is correct for this
3812 case). */
3813 may_use_core_reg = 0;
3814
3815 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3816 shift = unit_length / 4;
3817 mask = (1 << (shift * vfp_base_count)) - 1;
3818 for (regno = 0; regno < 16; regno += shift)
3819 if (((vfp_regs_free >> regno) & mask) == mask)
3820 break;
3821
3822 if (regno < 16)
3823 {
3824 int reg_char;
3825 int reg_scaled;
3826 int i;
3827
3828 vfp_regs_free &= ~(mask << regno);
3829 reg_scaled = regno / shift;
3830 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3831 for (i = 0; i < vfp_base_count; i++)
3832 {
3833 char name_buf[4];
3834 int regnum;
3835 if (reg_char == 'q')
3836 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3837 val + i * unit_length);
3838 else
3839 {
3840 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3841 reg_char, reg_scaled + i);
3842 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3843 strlen (name_buf));
3844 regcache_cooked_write (regcache, regnum,
3845 val + i * unit_length);
3846 }
3847 }
3848 continue;
3849 }
3850 else
3851 {
3852 /* This CPRC could not go in VFP registers, so all VFP
3853 registers are now marked as used. */
3854 vfp_regs_free = 0;
3855 }
3856 }
3857
3858 /* Push stack padding for dowubleword alignment. */
3859 if (nstack & (align - 1))
3860 {
3861 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3862 nstack += INT_REGISTER_SIZE;
3863 }
3864
3865 /* Doubleword aligned quantities must go in even register pairs. */
3866 if (may_use_core_reg
3867 && argreg <= ARM_LAST_ARG_REGNUM
3868 && align > INT_REGISTER_SIZE
3869 && argreg & 1)
3870 argreg++;
3871
3872 /* If the argument is a pointer to a function, and it is a
3873 Thumb function, create a LOCAL copy of the value and set
3874 the THUMB bit in it. */
3875 if (TYPE_CODE_PTR == typecode
3876 && target_type != NULL
3877 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3878 {
3879 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3880 if (arm_pc_is_thumb (gdbarch, regval))
3881 {
3882 bfd_byte *copy = (bfd_byte *) alloca (len);
3883 store_unsigned_integer (copy, len, byte_order,
3884 MAKE_THUMB_ADDR (regval));
3885 val = copy;
3886 }
3887 }
3888
3889 /* Copy the argument to general registers or the stack in
3890 register-sized pieces. Large arguments are split between
3891 registers and stack. */
3892 while (len > 0)
3893 {
3894 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3895
3896 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3897 {
3898 /* The argument is being passed in a general purpose
3899 register. */
3900 CORE_ADDR regval
3901 = extract_unsigned_integer (val, partial_len, byte_order);
3902 if (byte_order == BFD_ENDIAN_BIG)
3903 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3904 if (arm_debug)
3905 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3906 argnum,
3907 gdbarch_register_name
3908 (gdbarch, argreg),
3909 phex (regval, INT_REGISTER_SIZE));
3910 regcache_cooked_write_unsigned (regcache, argreg, regval);
3911 argreg++;
3912 }
3913 else
3914 {
3915 /* Push the arguments onto the stack. */
3916 if (arm_debug)
3917 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3918 argnum, nstack);
3919 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3920 nstack += INT_REGISTER_SIZE;
3921 }
3922
3923 len -= partial_len;
3924 val += partial_len;
3925 }
3926 }
3927 /* If we have an odd number of words to push, then decrement the stack
3928 by one word now, so first stack argument will be dword aligned. */
3929 if (nstack & 4)
3930 sp -= 4;
3931
3932 while (si)
3933 {
3934 sp -= si->len;
3935 write_memory (sp, si->data, si->len);
3936 si = pop_stack_item (si);
3937 }
3938
3939 /* Finally, update teh SP register. */
3940 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3941
3942 return sp;
3943 }
3944
3945
3946 /* Always align the frame to an 8-byte boundary. This is required on
3947 some platforms and harmless on the rest. */
3948
3949 static CORE_ADDR
3950 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3951 {
3952 /* Align the stack to eight bytes. */
3953 return sp & ~ (CORE_ADDR) 7;
3954 }
3955
3956 static void
3957 print_fpu_flags (struct ui_file *file, int flags)
3958 {
3959 if (flags & (1 << 0))
3960 fputs_filtered ("IVO ", file);
3961 if (flags & (1 << 1))
3962 fputs_filtered ("DVZ ", file);
3963 if (flags & (1 << 2))
3964 fputs_filtered ("OFL ", file);
3965 if (flags & (1 << 3))
3966 fputs_filtered ("UFL ", file);
3967 if (flags & (1 << 4))
3968 fputs_filtered ("INX ", file);
3969 fputc_filtered ('\n', file);
3970 }
3971
3972 /* Print interesting information about the floating point processor
3973 (if present) or emulator. */
3974 static void
3975 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
3976 struct frame_info *frame, const char *args)
3977 {
3978 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
3979 int type;
3980
3981 type = (status >> 24) & 127;
3982 if (status & (1 << 31))
3983 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
3984 else
3985 fprintf_filtered (file, _("Software FPU type %d\n"), type);
3986 /* i18n: [floating point unit] mask */
3987 fputs_filtered (_("mask: "), file);
3988 print_fpu_flags (file, status >> 16);
3989 /* i18n: [floating point unit] flags */
3990 fputs_filtered (_("flags: "), file);
3991 print_fpu_flags (file, status);
3992 }
3993
3994 /* Construct the ARM extended floating point type. */
3995 static struct type *
3996 arm_ext_type (struct gdbarch *gdbarch)
3997 {
3998 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3999
4000 if (!tdep->arm_ext_type)
4001 tdep->arm_ext_type
4002 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4003 floatformats_arm_ext);
4004
4005 return tdep->arm_ext_type;
4006 }
4007
4008 static struct type *
4009 arm_neon_double_type (struct gdbarch *gdbarch)
4010 {
4011 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4012
4013 if (tdep->neon_double_type == NULL)
4014 {
4015 struct type *t, *elem;
4016
4017 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4018 TYPE_CODE_UNION);
4019 elem = builtin_type (gdbarch)->builtin_uint8;
4020 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4021 elem = builtin_type (gdbarch)->builtin_uint16;
4022 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4023 elem = builtin_type (gdbarch)->builtin_uint32;
4024 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4025 elem = builtin_type (gdbarch)->builtin_uint64;
4026 append_composite_type_field (t, "u64", elem);
4027 elem = builtin_type (gdbarch)->builtin_float;
4028 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4029 elem = builtin_type (gdbarch)->builtin_double;
4030 append_composite_type_field (t, "f64", elem);
4031
4032 TYPE_VECTOR (t) = 1;
4033 TYPE_NAME (t) = "neon_d";
4034 tdep->neon_double_type = t;
4035 }
4036
4037 return tdep->neon_double_type;
4038 }
4039
4040 /* FIXME: The vector types are not correctly ordered on big-endian
4041 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4042 bits of d0 - regardless of what unit size is being held in d0. So
4043 the offset of the first uint8 in d0 is 7, but the offset of the
4044 first float is 4. This code works as-is for little-endian
4045 targets. */
4046
4047 static struct type *
4048 arm_neon_quad_type (struct gdbarch *gdbarch)
4049 {
4050 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4051
4052 if (tdep->neon_quad_type == NULL)
4053 {
4054 struct type *t, *elem;
4055
4056 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4057 TYPE_CODE_UNION);
4058 elem = builtin_type (gdbarch)->builtin_uint8;
4059 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4060 elem = builtin_type (gdbarch)->builtin_uint16;
4061 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4062 elem = builtin_type (gdbarch)->builtin_uint32;
4063 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4064 elem = builtin_type (gdbarch)->builtin_uint64;
4065 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4066 elem = builtin_type (gdbarch)->builtin_float;
4067 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4068 elem = builtin_type (gdbarch)->builtin_double;
4069 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4070
4071 TYPE_VECTOR (t) = 1;
4072 TYPE_NAME (t) = "neon_q";
4073 tdep->neon_quad_type = t;
4074 }
4075
4076 return tdep->neon_quad_type;
4077 }
4078
4079 /* Return the GDB type object for the "standard" data type of data in
4080 register N. */
4081
4082 static struct type *
4083 arm_register_type (struct gdbarch *gdbarch, int regnum)
4084 {
4085 int num_regs = gdbarch_num_regs (gdbarch);
4086
4087 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4088 && regnum >= num_regs && regnum < num_regs + 32)
4089 return builtin_type (gdbarch)->builtin_float;
4090
4091 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4092 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4093 return arm_neon_quad_type (gdbarch);
4094
4095 /* If the target description has register information, we are only
4096 in this function so that we can override the types of
4097 double-precision registers for NEON. */
4098 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4099 {
4100 struct type *t = tdesc_register_type (gdbarch, regnum);
4101
4102 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4103 && TYPE_CODE (t) == TYPE_CODE_FLT
4104 && gdbarch_tdep (gdbarch)->have_neon)
4105 return arm_neon_double_type (gdbarch);
4106 else
4107 return t;
4108 }
4109
4110 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4111 {
4112 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4113 return builtin_type (gdbarch)->builtin_void;
4114
4115 return arm_ext_type (gdbarch);
4116 }
4117 else if (regnum == ARM_SP_REGNUM)
4118 return builtin_type (gdbarch)->builtin_data_ptr;
4119 else if (regnum == ARM_PC_REGNUM)
4120 return builtin_type (gdbarch)->builtin_func_ptr;
4121 else if (regnum >= ARRAY_SIZE (arm_register_names))
4122 /* These registers are only supported on targets which supply
4123 an XML description. */
4124 return builtin_type (gdbarch)->builtin_int0;
4125 else
4126 return builtin_type (gdbarch)->builtin_uint32;
4127 }
4128
4129 /* Map a DWARF register REGNUM onto the appropriate GDB register
4130 number. */
4131
4132 static int
4133 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4134 {
4135 /* Core integer regs. */
4136 if (reg >= 0 && reg <= 15)
4137 return reg;
4138
4139 /* Legacy FPA encoding. These were once used in a way which
4140 overlapped with VFP register numbering, so their use is
4141 discouraged, but GDB doesn't support the ARM toolchain
4142 which used them for VFP. */
4143 if (reg >= 16 && reg <= 23)
4144 return ARM_F0_REGNUM + reg - 16;
4145
4146 /* New assignments for the FPA registers. */
4147 if (reg >= 96 && reg <= 103)
4148 return ARM_F0_REGNUM + reg - 96;
4149
4150 /* WMMX register assignments. */
4151 if (reg >= 104 && reg <= 111)
4152 return ARM_WCGR0_REGNUM + reg - 104;
4153
4154 if (reg >= 112 && reg <= 127)
4155 return ARM_WR0_REGNUM + reg - 112;
4156
4157 if (reg >= 192 && reg <= 199)
4158 return ARM_WC0_REGNUM + reg - 192;
4159
4160 /* VFP v2 registers. A double precision value is actually
4161 in d1 rather than s2, but the ABI only defines numbering
4162 for the single precision registers. This will "just work"
4163 in GDB for little endian targets (we'll read eight bytes,
4164 starting in s0 and then progressing to s1), but will be
4165 reversed on big endian targets with VFP. This won't
4166 be a problem for the new Neon quad registers; you're supposed
4167 to use DW_OP_piece for those. */
4168 if (reg >= 64 && reg <= 95)
4169 {
4170 char name_buf[4];
4171
4172 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4173 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4174 strlen (name_buf));
4175 }
4176
4177 /* VFP v3 / Neon registers. This range is also used for VFP v2
4178 registers, except that it now describes d0 instead of s0. */
4179 if (reg >= 256 && reg <= 287)
4180 {
4181 char name_buf[4];
4182
4183 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4184 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4185 strlen (name_buf));
4186 }
4187
4188 return -1;
4189 }
4190
4191 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4192 static int
4193 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4194 {
4195 int reg = regnum;
4196 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4197
4198 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4199 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4200
4201 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4202 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4203
4204 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4205 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4206
4207 if (reg < NUM_GREGS)
4208 return SIM_ARM_R0_REGNUM + reg;
4209 reg -= NUM_GREGS;
4210
4211 if (reg < NUM_FREGS)
4212 return SIM_ARM_FP0_REGNUM + reg;
4213 reg -= NUM_FREGS;
4214
4215 if (reg < NUM_SREGS)
4216 return SIM_ARM_FPS_REGNUM + reg;
4217 reg -= NUM_SREGS;
4218
4219 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4220 }
4221
4222 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4223 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4224 It is thought that this is is the floating-point register format on
4225 little-endian systems. */
4226
4227 static void
4228 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4229 void *dbl, int endianess)
4230 {
4231 DOUBLEST d;
4232
4233 if (endianess == BFD_ENDIAN_BIG)
4234 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4235 else
4236 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4237 ptr, &d);
4238 floatformat_from_doublest (fmt, &d, dbl);
4239 }
4240
4241 static void
4242 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4243 int endianess)
4244 {
4245 DOUBLEST d;
4246
4247 floatformat_to_doublest (fmt, ptr, &d);
4248 if (endianess == BFD_ENDIAN_BIG)
4249 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4250 else
4251 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4252 &d, dbl);
4253 }
4254
4255 static int
4256 condition_true (unsigned long cond, unsigned long status_reg)
4257 {
4258 if (cond == INST_AL || cond == INST_NV)
4259 return 1;
4260
4261 switch (cond)
4262 {
4263 case INST_EQ:
4264 return ((status_reg & FLAG_Z) != 0);
4265 case INST_NE:
4266 return ((status_reg & FLAG_Z) == 0);
4267 case INST_CS:
4268 return ((status_reg & FLAG_C) != 0);
4269 case INST_CC:
4270 return ((status_reg & FLAG_C) == 0);
4271 case INST_MI:
4272 return ((status_reg & FLAG_N) != 0);
4273 case INST_PL:
4274 return ((status_reg & FLAG_N) == 0);
4275 case INST_VS:
4276 return ((status_reg & FLAG_V) != 0);
4277 case INST_VC:
4278 return ((status_reg & FLAG_V) == 0);
4279 case INST_HI:
4280 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4281 case INST_LS:
4282 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4283 case INST_GE:
4284 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4285 case INST_LT:
4286 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4287 case INST_GT:
4288 return (((status_reg & FLAG_Z) == 0)
4289 && (((status_reg & FLAG_N) == 0)
4290 == ((status_reg & FLAG_V) == 0)));
4291 case INST_LE:
4292 return (((status_reg & FLAG_Z) != 0)
4293 || (((status_reg & FLAG_N) == 0)
4294 != ((status_reg & FLAG_V) == 0)));
4295 }
4296 return 1;
4297 }
4298
4299 static unsigned long
4300 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4301 unsigned long pc_val, unsigned long status_reg)
4302 {
4303 unsigned long res, shift;
4304 int rm = bits (inst, 0, 3);
4305 unsigned long shifttype = bits (inst, 5, 6);
4306
4307 if (bit (inst, 4))
4308 {
4309 int rs = bits (inst, 8, 11);
4310 shift = (rs == 15 ? pc_val + 8
4311 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4312 }
4313 else
4314 shift = bits (inst, 7, 11);
4315
4316 res = (rm == ARM_PC_REGNUM
4317 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4318 : get_frame_register_unsigned (frame, rm));
4319
4320 switch (shifttype)
4321 {
4322 case 0: /* LSL */
4323 res = shift >= 32 ? 0 : res << shift;
4324 break;
4325
4326 case 1: /* LSR */
4327 res = shift >= 32 ? 0 : res >> shift;
4328 break;
4329
4330 case 2: /* ASR */
4331 if (shift >= 32)
4332 shift = 31;
4333 res = ((res & 0x80000000L)
4334 ? ~((~res) >> shift) : res >> shift);
4335 break;
4336
4337 case 3: /* ROR/RRX */
4338 shift &= 31;
4339 if (shift == 0)
4340 res = (res >> 1) | (carry ? 0x80000000L : 0);
4341 else
4342 res = (res >> shift) | (res << (32 - shift));
4343 break;
4344 }
4345
4346 return res & 0xffffffff;
4347 }
4348
4349 /* Return number of 1-bits in VAL. */
4350
4351 static int
4352 bitcount (unsigned long val)
4353 {
4354 int nbits;
4355 for (nbits = 0; val != 0; nbits++)
4356 val &= val - 1; /* Delete rightmost 1-bit in val. */
4357 return nbits;
4358 }
4359
4360 static int
4361 thumb_advance_itstate (unsigned int itstate)
4362 {
4363 /* Preserve IT[7:5], the first three bits of the condition. Shift
4364 the upcoming condition flags left by one bit. */
4365 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4366
4367 /* If we have finished the IT block, clear the state. */
4368 if ((itstate & 0x0f) == 0)
4369 itstate = 0;
4370
4371 return itstate;
4372 }
4373
4374 /* Find the next PC after the current instruction executes. In some
4375 cases we can not statically determine the answer (see the IT state
4376 handling in this function); in that case, a breakpoint may be
4377 inserted in addition to the returned PC, which will be used to set
4378 another breakpoint by our caller. */
4379
4380 static CORE_ADDR
4381 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4382 {
4383 struct gdbarch *gdbarch = get_frame_arch (frame);
4384 struct address_space *aspace = get_frame_address_space (frame);
4385 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4386 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4387 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4388 unsigned short inst1;
4389 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4390 unsigned long offset;
4391 ULONGEST status, itstate;
4392
4393 nextpc = MAKE_THUMB_ADDR (nextpc);
4394 pc_val = MAKE_THUMB_ADDR (pc_val);
4395
4396 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4397
4398 /* Thumb-2 conditional execution support. There are eight bits in
4399 the CPSR which describe conditional execution state. Once
4400 reconstructed (they're in a funny order), the low five bits
4401 describe the low bit of the condition for each instruction and
4402 how many instructions remain. The high three bits describe the
4403 base condition. One of the low four bits will be set if an IT
4404 block is active. These bits read as zero on earlier
4405 processors. */
4406 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4407 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4408
4409 /* If-Then handling. On GNU/Linux, where this routine is used, we
4410 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4411 can disable execution of the undefined instruction. So we might
4412 miss the breakpoint if we set it on a skipped conditional
4413 instruction. Because conditional instructions can change the
4414 flags, affecting the execution of further instructions, we may
4415 need to set two breakpoints. */
4416
4417 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4418 {
4419 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4420 {
4421 /* An IT instruction. Because this instruction does not
4422 modify the flags, we can accurately predict the next
4423 executed instruction. */
4424 itstate = inst1 & 0x00ff;
4425 pc += thumb_insn_size (inst1);
4426
4427 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4428 {
4429 inst1 = read_memory_unsigned_integer (pc, 2,
4430 byte_order_for_code);
4431 pc += thumb_insn_size (inst1);
4432 itstate = thumb_advance_itstate (itstate);
4433 }
4434
4435 return MAKE_THUMB_ADDR (pc);
4436 }
4437 else if (itstate != 0)
4438 {
4439 /* We are in a conditional block. Check the condition. */
4440 if (! condition_true (itstate >> 4, status))
4441 {
4442 /* Advance to the next executed instruction. */
4443 pc += thumb_insn_size (inst1);
4444 itstate = thumb_advance_itstate (itstate);
4445
4446 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4447 {
4448 inst1 = read_memory_unsigned_integer (pc, 2,
4449 byte_order_for_code);
4450 pc += thumb_insn_size (inst1);
4451 itstate = thumb_advance_itstate (itstate);
4452 }
4453
4454 return MAKE_THUMB_ADDR (pc);
4455 }
4456 else if ((itstate & 0x0f) == 0x08)
4457 {
4458 /* This is the last instruction of the conditional
4459 block, and it is executed. We can handle it normally
4460 because the following instruction is not conditional,
4461 and we must handle it normally because it is
4462 permitted to branch. Fall through. */
4463 }
4464 else
4465 {
4466 int cond_negated;
4467
4468 /* There are conditional instructions after this one.
4469 If this instruction modifies the flags, then we can
4470 not predict what the next executed instruction will
4471 be. Fortunately, this instruction is architecturally
4472 forbidden to branch; we know it will fall through.
4473 Start by skipping past it. */
4474 pc += thumb_insn_size (inst1);
4475 itstate = thumb_advance_itstate (itstate);
4476
4477 /* Set a breakpoint on the following instruction. */
4478 gdb_assert ((itstate & 0x0f) != 0);
4479 arm_insert_single_step_breakpoint (gdbarch, aspace,
4480 MAKE_THUMB_ADDR (pc));
4481 cond_negated = (itstate >> 4) & 1;
4482
4483 /* Skip all following instructions with the same
4484 condition. If there is a later instruction in the IT
4485 block with the opposite condition, set the other
4486 breakpoint there. If not, then set a breakpoint on
4487 the instruction after the IT block. */
4488 do
4489 {
4490 inst1 = read_memory_unsigned_integer (pc, 2,
4491 byte_order_for_code);
4492 pc += thumb_insn_size (inst1);
4493 itstate = thumb_advance_itstate (itstate);
4494 }
4495 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4496
4497 return MAKE_THUMB_ADDR (pc);
4498 }
4499 }
4500 }
4501 else if (itstate & 0x0f)
4502 {
4503 /* We are in a conditional block. Check the condition. */
4504 int cond = itstate >> 4;
4505
4506 if (! condition_true (cond, status))
4507 /* Advance to the next instruction. All the 32-bit
4508 instructions share a common prefix. */
4509 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4510
4511 /* Otherwise, handle the instruction normally. */
4512 }
4513
4514 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4515 {
4516 CORE_ADDR sp;
4517
4518 /* Fetch the saved PC from the stack. It's stored above
4519 all of the other registers. */
4520 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4521 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4522 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4523 }
4524 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4525 {
4526 unsigned long cond = bits (inst1, 8, 11);
4527 if (cond == 0x0f) /* 0x0f = SWI */
4528 {
4529 struct gdbarch_tdep *tdep;
4530 tdep = gdbarch_tdep (gdbarch);
4531
4532 if (tdep->syscall_next_pc != NULL)
4533 nextpc = tdep->syscall_next_pc (frame);
4534
4535 }
4536 else if (cond != 0x0f && condition_true (cond, status))
4537 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4538 }
4539 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4540 {
4541 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4542 }
4543 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4544 {
4545 unsigned short inst2;
4546 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4547
4548 /* Default to the next instruction. */
4549 nextpc = pc + 4;
4550 nextpc = MAKE_THUMB_ADDR (nextpc);
4551
4552 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4553 {
4554 /* Branches and miscellaneous control instructions. */
4555
4556 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4557 {
4558 /* B, BL, BLX. */
4559 int j1, j2, imm1, imm2;
4560
4561 imm1 = sbits (inst1, 0, 10);
4562 imm2 = bits (inst2, 0, 10);
4563 j1 = bit (inst2, 13);
4564 j2 = bit (inst2, 11);
4565
4566 offset = ((imm1 << 12) + (imm2 << 1));
4567 offset ^= ((!j2) << 22) | ((!j1) << 23);
4568
4569 nextpc = pc_val + offset;
4570 /* For BLX make sure to clear the low bits. */
4571 if (bit (inst2, 12) == 0)
4572 nextpc = nextpc & 0xfffffffc;
4573 }
4574 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4575 {
4576 /* SUBS PC, LR, #imm8. */
4577 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4578 nextpc -= inst2 & 0x00ff;
4579 }
4580 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4581 {
4582 /* Conditional branch. */
4583 if (condition_true (bits (inst1, 6, 9), status))
4584 {
4585 int sign, j1, j2, imm1, imm2;
4586
4587 sign = sbits (inst1, 10, 10);
4588 imm1 = bits (inst1, 0, 5);
4589 imm2 = bits (inst2, 0, 10);
4590 j1 = bit (inst2, 13);
4591 j2 = bit (inst2, 11);
4592
4593 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4594 offset += (imm1 << 12) + (imm2 << 1);
4595
4596 nextpc = pc_val + offset;
4597 }
4598 }
4599 }
4600 else if ((inst1 & 0xfe50) == 0xe810)
4601 {
4602 /* Load multiple or RFE. */
4603 int rn, offset, load_pc = 1;
4604
4605 rn = bits (inst1, 0, 3);
4606 if (bit (inst1, 7) && !bit (inst1, 8))
4607 {
4608 /* LDMIA or POP */
4609 if (!bit (inst2, 15))
4610 load_pc = 0;
4611 offset = bitcount (inst2) * 4 - 4;
4612 }
4613 else if (!bit (inst1, 7) && bit (inst1, 8))
4614 {
4615 /* LDMDB */
4616 if (!bit (inst2, 15))
4617 load_pc = 0;
4618 offset = -4;
4619 }
4620 else if (bit (inst1, 7) && bit (inst1, 8))
4621 {
4622 /* RFEIA */
4623 offset = 0;
4624 }
4625 else if (!bit (inst1, 7) && !bit (inst1, 8))
4626 {
4627 /* RFEDB */
4628 offset = -8;
4629 }
4630 else
4631 load_pc = 0;
4632
4633 if (load_pc)
4634 {
4635 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4636 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4637 }
4638 }
4639 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4640 {
4641 /* MOV PC or MOVS PC. */
4642 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4643 nextpc = MAKE_THUMB_ADDR (nextpc);
4644 }
4645 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4646 {
4647 /* LDR PC. */
4648 CORE_ADDR base;
4649 int rn, load_pc = 1;
4650
4651 rn = bits (inst1, 0, 3);
4652 base = get_frame_register_unsigned (frame, rn);
4653 if (rn == ARM_PC_REGNUM)
4654 {
4655 base = (base + 4) & ~(CORE_ADDR) 0x3;
4656 if (bit (inst1, 7))
4657 base += bits (inst2, 0, 11);
4658 else
4659 base -= bits (inst2, 0, 11);
4660 }
4661 else if (bit (inst1, 7))
4662 base += bits (inst2, 0, 11);
4663 else if (bit (inst2, 11))
4664 {
4665 if (bit (inst2, 10))
4666 {
4667 if (bit (inst2, 9))
4668 base += bits (inst2, 0, 7);
4669 else
4670 base -= bits (inst2, 0, 7);
4671 }
4672 }
4673 else if ((inst2 & 0x0fc0) == 0x0000)
4674 {
4675 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4676 base += get_frame_register_unsigned (frame, rm) << shift;
4677 }
4678 else
4679 /* Reserved. */
4680 load_pc = 0;
4681
4682 if (load_pc)
4683 nextpc = get_frame_memory_unsigned (frame, base, 4);
4684 }
4685 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4686 {
4687 /* TBB. */
4688 CORE_ADDR tbl_reg, table, offset, length;
4689
4690 tbl_reg = bits (inst1, 0, 3);
4691 if (tbl_reg == 0x0f)
4692 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4693 else
4694 table = get_frame_register_unsigned (frame, tbl_reg);
4695
4696 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4697 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4698 nextpc = pc_val + length;
4699 }
4700 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4701 {
4702 /* TBH. */
4703 CORE_ADDR tbl_reg, table, offset, length;
4704
4705 tbl_reg = bits (inst1, 0, 3);
4706 if (tbl_reg == 0x0f)
4707 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4708 else
4709 table = get_frame_register_unsigned (frame, tbl_reg);
4710
4711 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4712 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4713 nextpc = pc_val + length;
4714 }
4715 }
4716 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4717 {
4718 if (bits (inst1, 3, 6) == 0x0f)
4719 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4720 else
4721 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4722 }
4723 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4724 {
4725 if (bits (inst1, 3, 6) == 0x0f)
4726 nextpc = pc_val;
4727 else
4728 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4729
4730 nextpc = MAKE_THUMB_ADDR (nextpc);
4731 }
4732 else if ((inst1 & 0xf500) == 0xb100)
4733 {
4734 /* CBNZ or CBZ. */
4735 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4736 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4737
4738 if (bit (inst1, 11) && reg != 0)
4739 nextpc = pc_val + imm;
4740 else if (!bit (inst1, 11) && reg == 0)
4741 nextpc = pc_val + imm;
4742 }
4743 return nextpc;
4744 }
4745
4746 /* Get the raw next address. PC is the current program counter, in
4747 FRAME, which is assumed to be executing in ARM mode.
4748
4749 The value returned has the execution state of the next instruction
4750 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4751 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4752 address. */
4753
4754 static CORE_ADDR
4755 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4756 {
4757 struct gdbarch *gdbarch = get_frame_arch (frame);
4758 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4759 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4760 unsigned long pc_val;
4761 unsigned long this_instr;
4762 unsigned long status;
4763 CORE_ADDR nextpc;
4764
4765 pc_val = (unsigned long) pc;
4766 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4767
4768 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4769 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4770
4771 if (bits (this_instr, 28, 31) == INST_NV)
4772 switch (bits (this_instr, 24, 27))
4773 {
4774 case 0xa:
4775 case 0xb:
4776 {
4777 /* Branch with Link and change to Thumb. */
4778 nextpc = BranchDest (pc, this_instr);
4779 nextpc |= bit (this_instr, 24) << 1;
4780 nextpc = MAKE_THUMB_ADDR (nextpc);
4781 break;
4782 }
4783 case 0xc:
4784 case 0xd:
4785 case 0xe:
4786 /* Coprocessor register transfer. */
4787 if (bits (this_instr, 12, 15) == 15)
4788 error (_("Invalid update to pc in instruction"));
4789 break;
4790 }
4791 else if (condition_true (bits (this_instr, 28, 31), status))
4792 {
4793 switch (bits (this_instr, 24, 27))
4794 {
4795 case 0x0:
4796 case 0x1: /* data processing */
4797 case 0x2:
4798 case 0x3:
4799 {
4800 unsigned long operand1, operand2, result = 0;
4801 unsigned long rn;
4802 int c;
4803
4804 if (bits (this_instr, 12, 15) != 15)
4805 break;
4806
4807 if (bits (this_instr, 22, 25) == 0
4808 && bits (this_instr, 4, 7) == 9) /* multiply */
4809 error (_("Invalid update to pc in instruction"));
4810
4811 /* BX <reg>, BLX <reg> */
4812 if (bits (this_instr, 4, 27) == 0x12fff1
4813 || bits (this_instr, 4, 27) == 0x12fff3)
4814 {
4815 rn = bits (this_instr, 0, 3);
4816 nextpc = ((rn == ARM_PC_REGNUM)
4817 ? (pc_val + 8)
4818 : get_frame_register_unsigned (frame, rn));
4819
4820 return nextpc;
4821 }
4822
4823 /* Multiply into PC. */
4824 c = (status & FLAG_C) ? 1 : 0;
4825 rn = bits (this_instr, 16, 19);
4826 operand1 = ((rn == ARM_PC_REGNUM)
4827 ? (pc_val + 8)
4828 : get_frame_register_unsigned (frame, rn));
4829
4830 if (bit (this_instr, 25))
4831 {
4832 unsigned long immval = bits (this_instr, 0, 7);
4833 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4834 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4835 & 0xffffffff;
4836 }
4837 else /* operand 2 is a shifted register. */
4838 operand2 = shifted_reg_val (frame, this_instr, c,
4839 pc_val, status);
4840
4841 switch (bits (this_instr, 21, 24))
4842 {
4843 case 0x0: /*and */
4844 result = operand1 & operand2;
4845 break;
4846
4847 case 0x1: /*eor */
4848 result = operand1 ^ operand2;
4849 break;
4850
4851 case 0x2: /*sub */
4852 result = operand1 - operand2;
4853 break;
4854
4855 case 0x3: /*rsb */
4856 result = operand2 - operand1;
4857 break;
4858
4859 case 0x4: /*add */
4860 result = operand1 + operand2;
4861 break;
4862
4863 case 0x5: /*adc */
4864 result = operand1 + operand2 + c;
4865 break;
4866
4867 case 0x6: /*sbc */
4868 result = operand1 - operand2 + c;
4869 break;
4870
4871 case 0x7: /*rsc */
4872 result = operand2 - operand1 + c;
4873 break;
4874
4875 case 0x8:
4876 case 0x9:
4877 case 0xa:
4878 case 0xb: /* tst, teq, cmp, cmn */
4879 result = (unsigned long) nextpc;
4880 break;
4881
4882 case 0xc: /*orr */
4883 result = operand1 | operand2;
4884 break;
4885
4886 case 0xd: /*mov */
4887 /* Always step into a function. */
4888 result = operand2;
4889 break;
4890
4891 case 0xe: /*bic */
4892 result = operand1 & ~operand2;
4893 break;
4894
4895 case 0xf: /*mvn */
4896 result = ~operand2;
4897 break;
4898 }
4899
4900 /* In 26-bit APCS the bottom two bits of the result are
4901 ignored, and we always end up in ARM state. */
4902 if (!arm_apcs_32)
4903 nextpc = arm_addr_bits_remove (gdbarch, result);
4904 else
4905 nextpc = result;
4906
4907 break;
4908 }
4909
4910 case 0x4:
4911 case 0x5: /* data transfer */
4912 case 0x6:
4913 case 0x7:
4914 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
4915 {
4916 /* Media instructions and architecturally undefined
4917 instructions. */
4918 break;
4919 }
4920
4921 if (bit (this_instr, 20))
4922 {
4923 /* load */
4924 if (bits (this_instr, 12, 15) == 15)
4925 {
4926 /* rd == pc */
4927 unsigned long rn;
4928 unsigned long base;
4929
4930 if (bit (this_instr, 22))
4931 error (_("Invalid update to pc in instruction"));
4932
4933 /* byte write to PC */
4934 rn = bits (this_instr, 16, 19);
4935 base = ((rn == ARM_PC_REGNUM)
4936 ? (pc_val + 8)
4937 : get_frame_register_unsigned (frame, rn));
4938
4939 if (bit (this_instr, 24))
4940 {
4941 /* pre-indexed */
4942 int c = (status & FLAG_C) ? 1 : 0;
4943 unsigned long offset =
4944 (bit (this_instr, 25)
4945 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4946 : bits (this_instr, 0, 11));
4947
4948 if (bit (this_instr, 23))
4949 base += offset;
4950 else
4951 base -= offset;
4952 }
4953 nextpc =
4954 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4955 4, byte_order);
4956 }
4957 }
4958 break;
4959
4960 case 0x8:
4961 case 0x9: /* block transfer */
4962 if (bit (this_instr, 20))
4963 {
4964 /* LDM */
4965 if (bit (this_instr, 15))
4966 {
4967 /* loading pc */
4968 int offset = 0;
4969 unsigned long rn_val
4970 = get_frame_register_unsigned (frame,
4971 bits (this_instr, 16, 19));
4972
4973 if (bit (this_instr, 23))
4974 {
4975 /* up */
4976 unsigned long reglist = bits (this_instr, 0, 14);
4977 offset = bitcount (reglist) * 4;
4978 if (bit (this_instr, 24)) /* pre */
4979 offset += 4;
4980 }
4981 else if (bit (this_instr, 24))
4982 offset = -4;
4983
4984 nextpc =
4985 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
4986 (rn_val + offset),
4987 4, byte_order);
4988 }
4989 }
4990 break;
4991
4992 case 0xb: /* branch & link */
4993 case 0xa: /* branch */
4994 {
4995 nextpc = BranchDest (pc, this_instr);
4996 break;
4997 }
4998
4999 case 0xc:
5000 case 0xd:
5001 case 0xe: /* coproc ops */
5002 break;
5003 case 0xf: /* SWI */
5004 {
5005 struct gdbarch_tdep *tdep;
5006 tdep = gdbarch_tdep (gdbarch);
5007
5008 if (tdep->syscall_next_pc != NULL)
5009 nextpc = tdep->syscall_next_pc (frame);
5010
5011 }
5012 break;
5013
5014 default:
5015 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5016 return (pc);
5017 }
5018 }
5019
5020 return nextpc;
5021 }
5022
5023 /* Determine next PC after current instruction executes. Will call either
5024 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5025 loop is detected. */
5026
5027 CORE_ADDR
5028 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5029 {
5030 CORE_ADDR nextpc;
5031
5032 if (arm_frame_is_thumb (frame))
5033 nextpc = thumb_get_next_pc_raw (frame, pc);
5034 else
5035 nextpc = arm_get_next_pc_raw (frame, pc);
5036
5037 return nextpc;
5038 }
5039
5040 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5041 of the appropriate mode (as encoded in the PC value), even if this
5042 differs from what would be expected according to the symbol tables. */
5043
5044 void
5045 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5046 struct address_space *aspace,
5047 CORE_ADDR pc)
5048 {
5049 struct cleanup *old_chain
5050 = make_cleanup_restore_integer (&arm_override_mode);
5051
5052 arm_override_mode = IS_THUMB_ADDR (pc);
5053 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5054
5055 insert_single_step_breakpoint (gdbarch, aspace, pc);
5056
5057 do_cleanups (old_chain);
5058 }
5059
5060 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5061 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5062 is found, attempt to step through it. A breakpoint is placed at the end of
5063 the sequence. */
5064
5065 static int
5066 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5067 {
5068 struct gdbarch *gdbarch = get_frame_arch (frame);
5069 struct address_space *aspace = get_frame_address_space (frame);
5070 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5071 CORE_ADDR pc = get_frame_pc (frame);
5072 CORE_ADDR breaks[2] = {-1, -1};
5073 CORE_ADDR loc = pc;
5074 unsigned short insn1, insn2;
5075 int insn_count;
5076 int index;
5077 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5078 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5079 ULONGEST status, itstate;
5080
5081 /* We currently do not support atomic sequences within an IT block. */
5082 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5083 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5084 if (itstate & 0x0f)
5085 return 0;
5086
5087 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5088 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5089 loc += 2;
5090 if (thumb_insn_size (insn1) != 4)
5091 return 0;
5092
5093 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5094 loc += 2;
5095 if (!((insn1 & 0xfff0) == 0xe850
5096 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5097 return 0;
5098
5099 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5100 instructions. */
5101 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5102 {
5103 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5104 loc += 2;
5105
5106 if (thumb_insn_size (insn1) != 4)
5107 {
5108 /* Assume that there is at most one conditional branch in the
5109 atomic sequence. If a conditional branch is found, put a
5110 breakpoint in its destination address. */
5111 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5112 {
5113 if (last_breakpoint > 0)
5114 return 0; /* More than one conditional branch found,
5115 fallback to the standard code. */
5116
5117 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5118 last_breakpoint++;
5119 }
5120
5121 /* We do not support atomic sequences that use any *other*
5122 instructions but conditional branches to change the PC.
5123 Fall back to standard code to avoid losing control of
5124 execution. */
5125 else if (thumb_instruction_changes_pc (insn1))
5126 return 0;
5127 }
5128 else
5129 {
5130 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5131 loc += 2;
5132
5133 /* Assume that there is at most one conditional branch in the
5134 atomic sequence. If a conditional branch is found, put a
5135 breakpoint in its destination address. */
5136 if ((insn1 & 0xf800) == 0xf000
5137 && (insn2 & 0xd000) == 0x8000
5138 && (insn1 & 0x0380) != 0x0380)
5139 {
5140 int sign, j1, j2, imm1, imm2;
5141 unsigned int offset;
5142
5143 sign = sbits (insn1, 10, 10);
5144 imm1 = bits (insn1, 0, 5);
5145 imm2 = bits (insn2, 0, 10);
5146 j1 = bit (insn2, 13);
5147 j2 = bit (insn2, 11);
5148
5149 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5150 offset += (imm1 << 12) + (imm2 << 1);
5151
5152 if (last_breakpoint > 0)
5153 return 0; /* More than one conditional branch found,
5154 fallback to the standard code. */
5155
5156 breaks[1] = loc + offset;
5157 last_breakpoint++;
5158 }
5159
5160 /* We do not support atomic sequences that use any *other*
5161 instructions but conditional branches to change the PC.
5162 Fall back to standard code to avoid losing control of
5163 execution. */
5164 else if (thumb2_instruction_changes_pc (insn1, insn2))
5165 return 0;
5166
5167 /* If we find a strex{,b,h,d}, we're done. */
5168 if ((insn1 & 0xfff0) == 0xe840
5169 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5170 break;
5171 }
5172 }
5173
5174 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5175 if (insn_count == atomic_sequence_length)
5176 return 0;
5177
5178 /* Insert a breakpoint right after the end of the atomic sequence. */
5179 breaks[0] = loc;
5180
5181 /* Check for duplicated breakpoints. Check also for a breakpoint
5182 placed (branch instruction's destination) anywhere in sequence. */
5183 if (last_breakpoint
5184 && (breaks[1] == breaks[0]
5185 || (breaks[1] >= pc && breaks[1] < loc)))
5186 last_breakpoint = 0;
5187
5188 /* Effectively inserts the breakpoints. */
5189 for (index = 0; index <= last_breakpoint; index++)
5190 arm_insert_single_step_breakpoint (gdbarch, aspace,
5191 MAKE_THUMB_ADDR (breaks[index]));
5192
5193 return 1;
5194 }
5195
5196 static int
5197 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5198 {
5199 struct gdbarch *gdbarch = get_frame_arch (frame);
5200 struct address_space *aspace = get_frame_address_space (frame);
5201 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5202 CORE_ADDR pc = get_frame_pc (frame);
5203 CORE_ADDR breaks[2] = {-1, -1};
5204 CORE_ADDR loc = pc;
5205 unsigned int insn;
5206 int insn_count;
5207 int index;
5208 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5209 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5210
5211 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5212 Note that we do not currently support conditionally executed atomic
5213 instructions. */
5214 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5215 loc += 4;
5216 if ((insn & 0xff9000f0) != 0xe1900090)
5217 return 0;
5218
5219 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5220 instructions. */
5221 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5222 {
5223 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5224 loc += 4;
5225
5226 /* Assume that there is at most one conditional branch in the atomic
5227 sequence. If a conditional branch is found, put a breakpoint in
5228 its destination address. */
5229 if (bits (insn, 24, 27) == 0xa)
5230 {
5231 if (last_breakpoint > 0)
5232 return 0; /* More than one conditional branch found, fallback
5233 to the standard single-step code. */
5234
5235 breaks[1] = BranchDest (loc - 4, insn);
5236 last_breakpoint++;
5237 }
5238
5239 /* We do not support atomic sequences that use any *other* instructions
5240 but conditional branches to change the PC. Fall back to standard
5241 code to avoid losing control of execution. */
5242 else if (arm_instruction_changes_pc (insn))
5243 return 0;
5244
5245 /* If we find a strex{,b,h,d}, we're done. */
5246 if ((insn & 0xff9000f0) == 0xe1800090)
5247 break;
5248 }
5249
5250 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5251 if (insn_count == atomic_sequence_length)
5252 return 0;
5253
5254 /* Insert a breakpoint right after the end of the atomic sequence. */
5255 breaks[0] = loc;
5256
5257 /* Check for duplicated breakpoints. Check also for a breakpoint
5258 placed (branch instruction's destination) anywhere in sequence. */
5259 if (last_breakpoint
5260 && (breaks[1] == breaks[0]
5261 || (breaks[1] >= pc && breaks[1] < loc)))
5262 last_breakpoint = 0;
5263
5264 /* Effectively inserts the breakpoints. */
5265 for (index = 0; index <= last_breakpoint; index++)
5266 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5267
5268 return 1;
5269 }
5270
5271 int
5272 arm_deal_with_atomic_sequence (struct frame_info *frame)
5273 {
5274 if (arm_frame_is_thumb (frame))
5275 return thumb_deal_with_atomic_sequence_raw (frame);
5276 else
5277 return arm_deal_with_atomic_sequence_raw (frame);
5278 }
5279
5280 /* single_step() is called just before we want to resume the inferior,
5281 if we want to single-step it but there is no hardware or kernel
5282 single-step support. We find the target of the coming instruction
5283 and breakpoint it. */
5284
5285 int
5286 arm_software_single_step (struct frame_info *frame)
5287 {
5288 struct gdbarch *gdbarch = get_frame_arch (frame);
5289 struct address_space *aspace = get_frame_address_space (frame);
5290 CORE_ADDR next_pc;
5291
5292 if (arm_deal_with_atomic_sequence (frame))
5293 return 1;
5294
5295 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5296 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5297
5298 return 1;
5299 }
5300
5301 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5302 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5303 NULL if an error occurs. BUF is freed. */
5304
5305 static gdb_byte *
5306 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5307 int old_len, int new_len)
5308 {
5309 gdb_byte *new_buf;
5310 int bytes_to_read = new_len - old_len;
5311
5312 new_buf = (gdb_byte *) xmalloc (new_len);
5313 memcpy (new_buf + bytes_to_read, buf, old_len);
5314 xfree (buf);
5315 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5316 {
5317 xfree (new_buf);
5318 return NULL;
5319 }
5320 return new_buf;
5321 }
5322
5323 /* An IT block is at most the 2-byte IT instruction followed by
5324 four 4-byte instructions. The furthest back we must search to
5325 find an IT block that affects the current instruction is thus
5326 2 + 3 * 4 == 14 bytes. */
5327 #define MAX_IT_BLOCK_PREFIX 14
5328
5329 /* Use a quick scan if there are more than this many bytes of
5330 code. */
5331 #define IT_SCAN_THRESHOLD 32
5332
5333 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5334 A breakpoint in an IT block may not be hit, depending on the
5335 condition flags. */
5336 static CORE_ADDR
5337 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5338 {
5339 gdb_byte *buf;
5340 char map_type;
5341 CORE_ADDR boundary, func_start;
5342 int buf_len;
5343 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5344 int i, any, last_it, last_it_count;
5345
5346 /* If we are using BKPT breakpoints, none of this is necessary. */
5347 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5348 return bpaddr;
5349
5350 /* ARM mode does not have this problem. */
5351 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5352 return bpaddr;
5353
5354 /* We are setting a breakpoint in Thumb code that could potentially
5355 contain an IT block. The first step is to find how much Thumb
5356 code there is; we do not need to read outside of known Thumb
5357 sequences. */
5358 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5359 if (map_type == 0)
5360 /* Thumb-2 code must have mapping symbols to have a chance. */
5361 return bpaddr;
5362
5363 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5364
5365 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5366 && func_start > boundary)
5367 boundary = func_start;
5368
5369 /* Search for a candidate IT instruction. We have to do some fancy
5370 footwork to distinguish a real IT instruction from the second
5371 half of a 32-bit instruction, but there is no need for that if
5372 there's no candidate. */
5373 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5374 if (buf_len == 0)
5375 /* No room for an IT instruction. */
5376 return bpaddr;
5377
5378 buf = (gdb_byte *) xmalloc (buf_len);
5379 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5380 return bpaddr;
5381 any = 0;
5382 for (i = 0; i < buf_len; i += 2)
5383 {
5384 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5385 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5386 {
5387 any = 1;
5388 break;
5389 }
5390 }
5391 if (any == 0)
5392 {
5393 xfree (buf);
5394 return bpaddr;
5395 }
5396
5397 /* OK, the code bytes before this instruction contain at least one
5398 halfword which resembles an IT instruction. We know that it's
5399 Thumb code, but there are still two possibilities. Either the
5400 halfword really is an IT instruction, or it is the second half of
5401 a 32-bit Thumb instruction. The only way we can tell is to
5402 scan forwards from a known instruction boundary. */
5403 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5404 {
5405 int definite;
5406
5407 /* There's a lot of code before this instruction. Start with an
5408 optimistic search; it's easy to recognize halfwords that can
5409 not be the start of a 32-bit instruction, and use that to
5410 lock on to the instruction boundaries. */
5411 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5412 if (buf == NULL)
5413 return bpaddr;
5414 buf_len = IT_SCAN_THRESHOLD;
5415
5416 definite = 0;
5417 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5418 {
5419 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5420 if (thumb_insn_size (inst1) == 2)
5421 {
5422 definite = 1;
5423 break;
5424 }
5425 }
5426
5427 /* At this point, if DEFINITE, BUF[I] is the first place we
5428 are sure that we know the instruction boundaries, and it is far
5429 enough from BPADDR that we could not miss an IT instruction
5430 affecting BPADDR. If ! DEFINITE, give up - start from a
5431 known boundary. */
5432 if (! definite)
5433 {
5434 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5435 bpaddr - boundary);
5436 if (buf == NULL)
5437 return bpaddr;
5438 buf_len = bpaddr - boundary;
5439 i = 0;
5440 }
5441 }
5442 else
5443 {
5444 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5445 if (buf == NULL)
5446 return bpaddr;
5447 buf_len = bpaddr - boundary;
5448 i = 0;
5449 }
5450
5451 /* Scan forwards. Find the last IT instruction before BPADDR. */
5452 last_it = -1;
5453 last_it_count = 0;
5454 while (i < buf_len)
5455 {
5456 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5457 last_it_count--;
5458 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5459 {
5460 last_it = i;
5461 if (inst1 & 0x0001)
5462 last_it_count = 4;
5463 else if (inst1 & 0x0002)
5464 last_it_count = 3;
5465 else if (inst1 & 0x0004)
5466 last_it_count = 2;
5467 else
5468 last_it_count = 1;
5469 }
5470 i += thumb_insn_size (inst1);
5471 }
5472
5473 xfree (buf);
5474
5475 if (last_it == -1)
5476 /* There wasn't really an IT instruction after all. */
5477 return bpaddr;
5478
5479 if (last_it_count < 1)
5480 /* It was too far away. */
5481 return bpaddr;
5482
5483 /* This really is a trouble spot. Move the breakpoint to the IT
5484 instruction. */
5485 return bpaddr - buf_len + last_it;
5486 }
5487
5488 /* ARM displaced stepping support.
5489
5490 Generally ARM displaced stepping works as follows:
5491
5492 1. When an instruction is to be single-stepped, it is first decoded by
5493 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5494 Depending on the type of instruction, it is then copied to a scratch
5495 location, possibly in a modified form. The copy_* set of functions
5496 performs such modification, as necessary. A breakpoint is placed after
5497 the modified instruction in the scratch space to return control to GDB.
5498 Note in particular that instructions which modify the PC will no longer
5499 do so after modification.
5500
5501 2. The instruction is single-stepped, by setting the PC to the scratch
5502 location address, and resuming. Control returns to GDB when the
5503 breakpoint is hit.
5504
5505 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5506 function used for the current instruction. This function's job is to
5507 put the CPU/memory state back to what it would have been if the
5508 instruction had been executed unmodified in its original location. */
5509
5510 /* NOP instruction (mov r0, r0). */
5511 #define ARM_NOP 0xe1a00000
5512 #define THUMB_NOP 0x4600
5513
5514 /* Helper for register reads for displaced stepping. In particular, this
5515 returns the PC as it would be seen by the instruction at its original
5516 location. */
5517
5518 ULONGEST
5519 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5520 int regno)
5521 {
5522 ULONGEST ret;
5523 CORE_ADDR from = dsc->insn_addr;
5524
5525 if (regno == ARM_PC_REGNUM)
5526 {
5527 /* Compute pipeline offset:
5528 - When executing an ARM instruction, PC reads as the address of the
5529 current instruction plus 8.
5530 - When executing a Thumb instruction, PC reads as the address of the
5531 current instruction plus 4. */
5532
5533 if (!dsc->is_thumb)
5534 from += 8;
5535 else
5536 from += 4;
5537
5538 if (debug_displaced)
5539 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5540 (unsigned long) from);
5541 return (ULONGEST) from;
5542 }
5543 else
5544 {
5545 regcache_cooked_read_unsigned (regs, regno, &ret);
5546 if (debug_displaced)
5547 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5548 regno, (unsigned long) ret);
5549 return ret;
5550 }
5551 }
5552
5553 static int
5554 displaced_in_arm_mode (struct regcache *regs)
5555 {
5556 ULONGEST ps;
5557 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5558
5559 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5560
5561 return (ps & t_bit) == 0;
5562 }
5563
5564 /* Write to the PC as from a branch instruction. */
5565
5566 static void
5567 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5568 ULONGEST val)
5569 {
5570 if (!dsc->is_thumb)
5571 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5572 architecture versions < 6. */
5573 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5574 val & ~(ULONGEST) 0x3);
5575 else
5576 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5577 val & ~(ULONGEST) 0x1);
5578 }
5579
5580 /* Write to the PC as from a branch-exchange instruction. */
5581
5582 static void
5583 bx_write_pc (struct regcache *regs, ULONGEST val)
5584 {
5585 ULONGEST ps;
5586 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5587
5588 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5589
5590 if ((val & 1) == 1)
5591 {
5592 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5593 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5594 }
5595 else if ((val & 2) == 0)
5596 {
5597 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5598 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5599 }
5600 else
5601 {
5602 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5603 mode, align dest to 4 bytes). */
5604 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5605 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5606 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5607 }
5608 }
5609
5610 /* Write to the PC as if from a load instruction. */
5611
5612 static void
5613 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5614 ULONGEST val)
5615 {
5616 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5617 bx_write_pc (regs, val);
5618 else
5619 branch_write_pc (regs, dsc, val);
5620 }
5621
5622 /* Write to the PC as if from an ALU instruction. */
5623
5624 static void
5625 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5626 ULONGEST val)
5627 {
5628 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5629 bx_write_pc (regs, val);
5630 else
5631 branch_write_pc (regs, dsc, val);
5632 }
5633
5634 /* Helper for writing to registers for displaced stepping. Writing to the PC
5635 has a varying effects depending on the instruction which does the write:
5636 this is controlled by the WRITE_PC argument. */
5637
5638 void
5639 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5640 int regno, ULONGEST val, enum pc_write_style write_pc)
5641 {
5642 if (regno == ARM_PC_REGNUM)
5643 {
5644 if (debug_displaced)
5645 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5646 (unsigned long) val);
5647 switch (write_pc)
5648 {
5649 case BRANCH_WRITE_PC:
5650 branch_write_pc (regs, dsc, val);
5651 break;
5652
5653 case BX_WRITE_PC:
5654 bx_write_pc (regs, val);
5655 break;
5656
5657 case LOAD_WRITE_PC:
5658 load_write_pc (regs, dsc, val);
5659 break;
5660
5661 case ALU_WRITE_PC:
5662 alu_write_pc (regs, dsc, val);
5663 break;
5664
5665 case CANNOT_WRITE_PC:
5666 warning (_("Instruction wrote to PC in an unexpected way when "
5667 "single-stepping"));
5668 break;
5669
5670 default:
5671 internal_error (__FILE__, __LINE__,
5672 _("Invalid argument to displaced_write_reg"));
5673 }
5674
5675 dsc->wrote_to_pc = 1;
5676 }
5677 else
5678 {
5679 if (debug_displaced)
5680 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5681 regno, (unsigned long) val);
5682 regcache_cooked_write_unsigned (regs, regno, val);
5683 }
5684 }
5685
5686 /* This function is used to concisely determine if an instruction INSN
5687 references PC. Register fields of interest in INSN should have the
5688 corresponding fields of BITMASK set to 0b1111. The function
5689 returns return 1 if any of these fields in INSN reference the PC
5690 (also 0b1111, r15), else it returns 0. */
5691
5692 static int
5693 insn_references_pc (uint32_t insn, uint32_t bitmask)
5694 {
5695 uint32_t lowbit = 1;
5696
5697 while (bitmask != 0)
5698 {
5699 uint32_t mask;
5700
5701 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5702 ;
5703
5704 if (!lowbit)
5705 break;
5706
5707 mask = lowbit * 0xf;
5708
5709 if ((insn & mask) == mask)
5710 return 1;
5711
5712 bitmask &= ~mask;
5713 }
5714
5715 return 0;
5716 }
5717
5718 /* The simplest copy function. Many instructions have the same effect no
5719 matter what address they are executed at: in those cases, use this. */
5720
5721 static int
5722 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5723 const char *iname, struct displaced_step_closure *dsc)
5724 {
5725 if (debug_displaced)
5726 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5727 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5728 iname);
5729
5730 dsc->modinsn[0] = insn;
5731
5732 return 0;
5733 }
5734
5735 static int
5736 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5737 uint16_t insn2, const char *iname,
5738 struct displaced_step_closure *dsc)
5739 {
5740 if (debug_displaced)
5741 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5742 "opcode/class '%s' unmodified\n", insn1, insn2,
5743 iname);
5744
5745 dsc->modinsn[0] = insn1;
5746 dsc->modinsn[1] = insn2;
5747 dsc->numinsns = 2;
5748
5749 return 0;
5750 }
5751
5752 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5753 modification. */
5754 static int
5755 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5756 const char *iname,
5757 struct displaced_step_closure *dsc)
5758 {
5759 if (debug_displaced)
5760 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5761 "opcode/class '%s' unmodified\n", insn,
5762 iname);
5763
5764 dsc->modinsn[0] = insn;
5765
5766 return 0;
5767 }
5768
5769 /* Preload instructions with immediate offset. */
5770
5771 static void
5772 cleanup_preload (struct gdbarch *gdbarch,
5773 struct regcache *regs, struct displaced_step_closure *dsc)
5774 {
5775 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5776 if (!dsc->u.preload.immed)
5777 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5778 }
5779
5780 static void
5781 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5782 struct displaced_step_closure *dsc, unsigned int rn)
5783 {
5784 ULONGEST rn_val;
5785 /* Preload instructions:
5786
5787 {pli/pld} [rn, #+/-imm]
5788 ->
5789 {pli/pld} [r0, #+/-imm]. */
5790
5791 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5792 rn_val = displaced_read_reg (regs, dsc, rn);
5793 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5794 dsc->u.preload.immed = 1;
5795
5796 dsc->cleanup = &cleanup_preload;
5797 }
5798
5799 static int
5800 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5801 struct displaced_step_closure *dsc)
5802 {
5803 unsigned int rn = bits (insn, 16, 19);
5804
5805 if (!insn_references_pc (insn, 0x000f0000ul))
5806 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5807
5808 if (debug_displaced)
5809 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5810 (unsigned long) insn);
5811
5812 dsc->modinsn[0] = insn & 0xfff0ffff;
5813
5814 install_preload (gdbarch, regs, dsc, rn);
5815
5816 return 0;
5817 }
5818
5819 static int
5820 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5821 struct regcache *regs, struct displaced_step_closure *dsc)
5822 {
5823 unsigned int rn = bits (insn1, 0, 3);
5824 unsigned int u_bit = bit (insn1, 7);
5825 int imm12 = bits (insn2, 0, 11);
5826 ULONGEST pc_val;
5827
5828 if (rn != ARM_PC_REGNUM)
5829 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5830
5831 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5832 PLD (literal) Encoding T1. */
5833 if (debug_displaced)
5834 fprintf_unfiltered (gdb_stdlog,
5835 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5836 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5837 imm12);
5838
5839 if (!u_bit)
5840 imm12 = -1 * imm12;
5841
5842 /* Rewrite instruction {pli/pld} PC imm12 into:
5843 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5844
5845 {pli/pld} [r0, r1]
5846
5847 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5848
5849 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5850 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5851
5852 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5853
5854 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5855 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5856 dsc->u.preload.immed = 0;
5857
5858 /* {pli/pld} [r0, r1] */
5859 dsc->modinsn[0] = insn1 & 0xfff0;
5860 dsc->modinsn[1] = 0xf001;
5861 dsc->numinsns = 2;
5862
5863 dsc->cleanup = &cleanup_preload;
5864 return 0;
5865 }
5866
5867 /* Preload instructions with register offset. */
5868
5869 static void
5870 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5871 struct displaced_step_closure *dsc, unsigned int rn,
5872 unsigned int rm)
5873 {
5874 ULONGEST rn_val, rm_val;
5875
5876 /* Preload register-offset instructions:
5877
5878 {pli/pld} [rn, rm {, shift}]
5879 ->
5880 {pli/pld} [r0, r1 {, shift}]. */
5881
5882 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5883 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5884 rn_val = displaced_read_reg (regs, dsc, rn);
5885 rm_val = displaced_read_reg (regs, dsc, rm);
5886 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5887 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5888 dsc->u.preload.immed = 0;
5889
5890 dsc->cleanup = &cleanup_preload;
5891 }
5892
5893 static int
5894 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5895 struct regcache *regs,
5896 struct displaced_step_closure *dsc)
5897 {
5898 unsigned int rn = bits (insn, 16, 19);
5899 unsigned int rm = bits (insn, 0, 3);
5900
5901
5902 if (!insn_references_pc (insn, 0x000f000ful))
5903 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5904
5905 if (debug_displaced)
5906 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5907 (unsigned long) insn);
5908
5909 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5910
5911 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5912 return 0;
5913 }
5914
5915 /* Copy/cleanup coprocessor load and store instructions. */
5916
5917 static void
5918 cleanup_copro_load_store (struct gdbarch *gdbarch,
5919 struct regcache *regs,
5920 struct displaced_step_closure *dsc)
5921 {
5922 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5923
5924 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5925
5926 if (dsc->u.ldst.writeback)
5927 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5928 }
5929
5930 static void
5931 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5932 struct displaced_step_closure *dsc,
5933 int writeback, unsigned int rn)
5934 {
5935 ULONGEST rn_val;
5936
5937 /* Coprocessor load/store instructions:
5938
5939 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5940 ->
5941 {stc/stc2} [r0, #+/-imm].
5942
5943 ldc/ldc2 are handled identically. */
5944
5945 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5946 rn_val = displaced_read_reg (regs, dsc, rn);
5947 /* PC should be 4-byte aligned. */
5948 rn_val = rn_val & 0xfffffffc;
5949 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5950
5951 dsc->u.ldst.writeback = writeback;
5952 dsc->u.ldst.rn = rn;
5953
5954 dsc->cleanup = &cleanup_copro_load_store;
5955 }
5956
5957 static int
5958 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5959 struct regcache *regs,
5960 struct displaced_step_closure *dsc)
5961 {
5962 unsigned int rn = bits (insn, 16, 19);
5963
5964 if (!insn_references_pc (insn, 0x000f0000ul))
5965 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
5966
5967 if (debug_displaced)
5968 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5969 "load/store insn %.8lx\n", (unsigned long) insn);
5970
5971 dsc->modinsn[0] = insn & 0xfff0ffff;
5972
5973 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
5974
5975 return 0;
5976 }
5977
5978 static int
5979 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
5980 uint16_t insn2, struct regcache *regs,
5981 struct displaced_step_closure *dsc)
5982 {
5983 unsigned int rn = bits (insn1, 0, 3);
5984
5985 if (rn != ARM_PC_REGNUM)
5986 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
5987 "copro load/store", dsc);
5988
5989 if (debug_displaced)
5990 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
5991 "load/store insn %.4x%.4x\n", insn1, insn2);
5992
5993 dsc->modinsn[0] = insn1 & 0xfff0;
5994 dsc->modinsn[1] = insn2;
5995 dsc->numinsns = 2;
5996
5997 /* This function is called for copying instruction LDC/LDC2/VLDR, which
5998 doesn't support writeback, so pass 0. */
5999 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6000
6001 return 0;
6002 }
6003
6004 /* Clean up branch instructions (actually perform the branch, by setting
6005 PC). */
6006
6007 static void
6008 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6009 struct displaced_step_closure *dsc)
6010 {
6011 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6012 int branch_taken = condition_true (dsc->u.branch.cond, status);
6013 enum pc_write_style write_pc = dsc->u.branch.exchange
6014 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6015
6016 if (!branch_taken)
6017 return;
6018
6019 if (dsc->u.branch.link)
6020 {
6021 /* The value of LR should be the next insn of current one. In order
6022 not to confuse logic hanlding later insn `bx lr', if current insn mode
6023 is Thumb, the bit 0 of LR value should be set to 1. */
6024 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6025
6026 if (dsc->is_thumb)
6027 next_insn_addr |= 0x1;
6028
6029 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6030 CANNOT_WRITE_PC);
6031 }
6032
6033 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6034 }
6035
6036 /* Copy B/BL/BLX instructions with immediate destinations. */
6037
6038 static void
6039 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6040 struct displaced_step_closure *dsc,
6041 unsigned int cond, int exchange, int link, long offset)
6042 {
6043 /* Implement "BL<cond> <label>" as:
6044
6045 Preparation: cond <- instruction condition
6046 Insn: mov r0, r0 (nop)
6047 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6048
6049 B<cond> similar, but don't set r14 in cleanup. */
6050
6051 dsc->u.branch.cond = cond;
6052 dsc->u.branch.link = link;
6053 dsc->u.branch.exchange = exchange;
6054
6055 dsc->u.branch.dest = dsc->insn_addr;
6056 if (link && exchange)
6057 /* For BLX, offset is computed from the Align (PC, 4). */
6058 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6059
6060 if (dsc->is_thumb)
6061 dsc->u.branch.dest += 4 + offset;
6062 else
6063 dsc->u.branch.dest += 8 + offset;
6064
6065 dsc->cleanup = &cleanup_branch;
6066 }
6067 static int
6068 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6069 struct regcache *regs, struct displaced_step_closure *dsc)
6070 {
6071 unsigned int cond = bits (insn, 28, 31);
6072 int exchange = (cond == 0xf);
6073 int link = exchange || bit (insn, 24);
6074 long offset;
6075
6076 if (debug_displaced)
6077 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6078 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6079 (unsigned long) insn);
6080 if (exchange)
6081 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6082 then arrange the switch into Thumb mode. */
6083 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6084 else
6085 offset = bits (insn, 0, 23) << 2;
6086
6087 if (bit (offset, 25))
6088 offset = offset | ~0x3ffffff;
6089
6090 dsc->modinsn[0] = ARM_NOP;
6091
6092 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6093 return 0;
6094 }
6095
6096 static int
6097 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6098 uint16_t insn2, struct regcache *regs,
6099 struct displaced_step_closure *dsc)
6100 {
6101 int link = bit (insn2, 14);
6102 int exchange = link && !bit (insn2, 12);
6103 int cond = INST_AL;
6104 long offset = 0;
6105 int j1 = bit (insn2, 13);
6106 int j2 = bit (insn2, 11);
6107 int s = sbits (insn1, 10, 10);
6108 int i1 = !(j1 ^ bit (insn1, 10));
6109 int i2 = !(j2 ^ bit (insn1, 10));
6110
6111 if (!link && !exchange) /* B */
6112 {
6113 offset = (bits (insn2, 0, 10) << 1);
6114 if (bit (insn2, 12)) /* Encoding T4 */
6115 {
6116 offset |= (bits (insn1, 0, 9) << 12)
6117 | (i2 << 22)
6118 | (i1 << 23)
6119 | (s << 24);
6120 cond = INST_AL;
6121 }
6122 else /* Encoding T3 */
6123 {
6124 offset |= (bits (insn1, 0, 5) << 12)
6125 | (j1 << 18)
6126 | (j2 << 19)
6127 | (s << 20);
6128 cond = bits (insn1, 6, 9);
6129 }
6130 }
6131 else
6132 {
6133 offset = (bits (insn1, 0, 9) << 12);
6134 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6135 offset |= exchange ?
6136 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6137 }
6138
6139 if (debug_displaced)
6140 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6141 "%.4x %.4x with offset %.8lx\n",
6142 link ? (exchange) ? "blx" : "bl" : "b",
6143 insn1, insn2, offset);
6144
6145 dsc->modinsn[0] = THUMB_NOP;
6146
6147 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6148 return 0;
6149 }
6150
6151 /* Copy B Thumb instructions. */
6152 static int
6153 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6154 struct displaced_step_closure *dsc)
6155 {
6156 unsigned int cond = 0;
6157 int offset = 0;
6158 unsigned short bit_12_15 = bits (insn, 12, 15);
6159 CORE_ADDR from = dsc->insn_addr;
6160
6161 if (bit_12_15 == 0xd)
6162 {
6163 /* offset = SignExtend (imm8:0, 32) */
6164 offset = sbits ((insn << 1), 0, 8);
6165 cond = bits (insn, 8, 11);
6166 }
6167 else if (bit_12_15 == 0xe) /* Encoding T2 */
6168 {
6169 offset = sbits ((insn << 1), 0, 11);
6170 cond = INST_AL;
6171 }
6172
6173 if (debug_displaced)
6174 fprintf_unfiltered (gdb_stdlog,
6175 "displaced: copying b immediate insn %.4x "
6176 "with offset %d\n", insn, offset);
6177
6178 dsc->u.branch.cond = cond;
6179 dsc->u.branch.link = 0;
6180 dsc->u.branch.exchange = 0;
6181 dsc->u.branch.dest = from + 4 + offset;
6182
6183 dsc->modinsn[0] = THUMB_NOP;
6184
6185 dsc->cleanup = &cleanup_branch;
6186
6187 return 0;
6188 }
6189
6190 /* Copy BX/BLX with register-specified destinations. */
6191
6192 static void
6193 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6194 struct displaced_step_closure *dsc, int link,
6195 unsigned int cond, unsigned int rm)
6196 {
6197 /* Implement {BX,BLX}<cond> <reg>" as:
6198
6199 Preparation: cond <- instruction condition
6200 Insn: mov r0, r0 (nop)
6201 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6202
6203 Don't set r14 in cleanup for BX. */
6204
6205 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6206
6207 dsc->u.branch.cond = cond;
6208 dsc->u.branch.link = link;
6209
6210 dsc->u.branch.exchange = 1;
6211
6212 dsc->cleanup = &cleanup_branch;
6213 }
6214
6215 static int
6216 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6217 struct regcache *regs, struct displaced_step_closure *dsc)
6218 {
6219 unsigned int cond = bits (insn, 28, 31);
6220 /* BX: x12xxx1x
6221 BLX: x12xxx3x. */
6222 int link = bit (insn, 5);
6223 unsigned int rm = bits (insn, 0, 3);
6224
6225 if (debug_displaced)
6226 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6227 (unsigned long) insn);
6228
6229 dsc->modinsn[0] = ARM_NOP;
6230
6231 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6232 return 0;
6233 }
6234
6235 static int
6236 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6237 struct regcache *regs,
6238 struct displaced_step_closure *dsc)
6239 {
6240 int link = bit (insn, 7);
6241 unsigned int rm = bits (insn, 3, 6);
6242
6243 if (debug_displaced)
6244 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6245 (unsigned short) insn);
6246
6247 dsc->modinsn[0] = THUMB_NOP;
6248
6249 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6250
6251 return 0;
6252 }
6253
6254
6255 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6256
6257 static void
6258 cleanup_alu_imm (struct gdbarch *gdbarch,
6259 struct regcache *regs, struct displaced_step_closure *dsc)
6260 {
6261 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6262 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6263 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6264 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6265 }
6266
6267 static int
6268 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6269 struct displaced_step_closure *dsc)
6270 {
6271 unsigned int rn = bits (insn, 16, 19);
6272 unsigned int rd = bits (insn, 12, 15);
6273 unsigned int op = bits (insn, 21, 24);
6274 int is_mov = (op == 0xd);
6275 ULONGEST rd_val, rn_val;
6276
6277 if (!insn_references_pc (insn, 0x000ff000ul))
6278 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6279
6280 if (debug_displaced)
6281 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6282 "%.8lx\n", is_mov ? "move" : "ALU",
6283 (unsigned long) insn);
6284
6285 /* Instruction is of form:
6286
6287 <op><cond> rd, [rn,] #imm
6288
6289 Rewrite as:
6290
6291 Preparation: tmp1, tmp2 <- r0, r1;
6292 r0, r1 <- rd, rn
6293 Insn: <op><cond> r0, r1, #imm
6294 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6295 */
6296
6297 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6298 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6299 rn_val = displaced_read_reg (regs, dsc, rn);
6300 rd_val = displaced_read_reg (regs, dsc, rd);
6301 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6302 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6303 dsc->rd = rd;
6304
6305 if (is_mov)
6306 dsc->modinsn[0] = insn & 0xfff00fff;
6307 else
6308 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6309
6310 dsc->cleanup = &cleanup_alu_imm;
6311
6312 return 0;
6313 }
6314
6315 static int
6316 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6317 uint16_t insn2, struct regcache *regs,
6318 struct displaced_step_closure *dsc)
6319 {
6320 unsigned int op = bits (insn1, 5, 8);
6321 unsigned int rn, rm, rd;
6322 ULONGEST rd_val, rn_val;
6323
6324 rn = bits (insn1, 0, 3); /* Rn */
6325 rm = bits (insn2, 0, 3); /* Rm */
6326 rd = bits (insn2, 8, 11); /* Rd */
6327
6328 /* This routine is only called for instruction MOV. */
6329 gdb_assert (op == 0x2 && rn == 0xf);
6330
6331 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6332 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6333
6334 if (debug_displaced)
6335 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6336 "ALU", insn1, insn2);
6337
6338 /* Instruction is of form:
6339
6340 <op><cond> rd, [rn,] #imm
6341
6342 Rewrite as:
6343
6344 Preparation: tmp1, tmp2 <- r0, r1;
6345 r0, r1 <- rd, rn
6346 Insn: <op><cond> r0, r1, #imm
6347 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6348 */
6349
6350 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6351 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6352 rn_val = displaced_read_reg (regs, dsc, rn);
6353 rd_val = displaced_read_reg (regs, dsc, rd);
6354 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6355 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6356 dsc->rd = rd;
6357
6358 dsc->modinsn[0] = insn1;
6359 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6360 dsc->numinsns = 2;
6361
6362 dsc->cleanup = &cleanup_alu_imm;
6363
6364 return 0;
6365 }
6366
6367 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6368
6369 static void
6370 cleanup_alu_reg (struct gdbarch *gdbarch,
6371 struct regcache *regs, struct displaced_step_closure *dsc)
6372 {
6373 ULONGEST rd_val;
6374 int i;
6375
6376 rd_val = displaced_read_reg (regs, dsc, 0);
6377
6378 for (i = 0; i < 3; i++)
6379 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6380
6381 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6382 }
6383
6384 static void
6385 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6386 struct displaced_step_closure *dsc,
6387 unsigned int rd, unsigned int rn, unsigned int rm)
6388 {
6389 ULONGEST rd_val, rn_val, rm_val;
6390
6391 /* Instruction is of form:
6392
6393 <op><cond> rd, [rn,] rm [, <shift>]
6394
6395 Rewrite as:
6396
6397 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6398 r0, r1, r2 <- rd, rn, rm
6399 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6400 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6401 */
6402
6403 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6404 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6405 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6406 rd_val = displaced_read_reg (regs, dsc, rd);
6407 rn_val = displaced_read_reg (regs, dsc, rn);
6408 rm_val = displaced_read_reg (regs, dsc, rm);
6409 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6410 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6411 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6412 dsc->rd = rd;
6413
6414 dsc->cleanup = &cleanup_alu_reg;
6415 }
6416
6417 static int
6418 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6419 struct displaced_step_closure *dsc)
6420 {
6421 unsigned int op = bits (insn, 21, 24);
6422 int is_mov = (op == 0xd);
6423
6424 if (!insn_references_pc (insn, 0x000ff00ful))
6425 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6426
6427 if (debug_displaced)
6428 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6429 is_mov ? "move" : "ALU", (unsigned long) insn);
6430
6431 if (is_mov)
6432 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6433 else
6434 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6435
6436 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6437 bits (insn, 0, 3));
6438 return 0;
6439 }
6440
6441 static int
6442 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6443 struct regcache *regs,
6444 struct displaced_step_closure *dsc)
6445 {
6446 unsigned rm, rd;
6447
6448 rm = bits (insn, 3, 6);
6449 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6450
6451 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6452 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6453
6454 if (debug_displaced)
6455 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
6456 (unsigned short) insn);
6457
6458 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6459
6460 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6461
6462 return 0;
6463 }
6464
6465 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6466
6467 static void
6468 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6469 struct regcache *regs,
6470 struct displaced_step_closure *dsc)
6471 {
6472 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6473 int i;
6474
6475 for (i = 0; i < 4; i++)
6476 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6477
6478 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6479 }
6480
6481 static void
6482 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6483 struct displaced_step_closure *dsc,
6484 unsigned int rd, unsigned int rn, unsigned int rm,
6485 unsigned rs)
6486 {
6487 int i;
6488 ULONGEST rd_val, rn_val, rm_val, rs_val;
6489
6490 /* Instruction is of form:
6491
6492 <op><cond> rd, [rn,] rm, <shift> rs
6493
6494 Rewrite as:
6495
6496 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6497 r0, r1, r2, r3 <- rd, rn, rm, rs
6498 Insn: <op><cond> r0, r1, r2, <shift> r3
6499 Cleanup: tmp5 <- r0
6500 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6501 rd <- tmp5
6502 */
6503
6504 for (i = 0; i < 4; i++)
6505 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6506
6507 rd_val = displaced_read_reg (regs, dsc, rd);
6508 rn_val = displaced_read_reg (regs, dsc, rn);
6509 rm_val = displaced_read_reg (regs, dsc, rm);
6510 rs_val = displaced_read_reg (regs, dsc, rs);
6511 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6512 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6513 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6514 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6515 dsc->rd = rd;
6516 dsc->cleanup = &cleanup_alu_shifted_reg;
6517 }
6518
6519 static int
6520 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6521 struct regcache *regs,
6522 struct displaced_step_closure *dsc)
6523 {
6524 unsigned int op = bits (insn, 21, 24);
6525 int is_mov = (op == 0xd);
6526 unsigned int rd, rn, rm, rs;
6527
6528 if (!insn_references_pc (insn, 0x000fff0ful))
6529 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6530
6531 if (debug_displaced)
6532 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6533 "%.8lx\n", is_mov ? "move" : "ALU",
6534 (unsigned long) insn);
6535
6536 rn = bits (insn, 16, 19);
6537 rm = bits (insn, 0, 3);
6538 rs = bits (insn, 8, 11);
6539 rd = bits (insn, 12, 15);
6540
6541 if (is_mov)
6542 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6543 else
6544 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6545
6546 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6547
6548 return 0;
6549 }
6550
6551 /* Clean up load instructions. */
6552
6553 static void
6554 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6555 struct displaced_step_closure *dsc)
6556 {
6557 ULONGEST rt_val, rt_val2 = 0, rn_val;
6558
6559 rt_val = displaced_read_reg (regs, dsc, 0);
6560 if (dsc->u.ldst.xfersize == 8)
6561 rt_val2 = displaced_read_reg (regs, dsc, 1);
6562 rn_val = displaced_read_reg (regs, dsc, 2);
6563
6564 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6565 if (dsc->u.ldst.xfersize > 4)
6566 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6567 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6568 if (!dsc->u.ldst.immed)
6569 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6570
6571 /* Handle register writeback. */
6572 if (dsc->u.ldst.writeback)
6573 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6574 /* Put result in right place. */
6575 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6576 if (dsc->u.ldst.xfersize == 8)
6577 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6578 }
6579
6580 /* Clean up store instructions. */
6581
6582 static void
6583 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6584 struct displaced_step_closure *dsc)
6585 {
6586 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6587
6588 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6589 if (dsc->u.ldst.xfersize > 4)
6590 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6591 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6592 if (!dsc->u.ldst.immed)
6593 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6594 if (!dsc->u.ldst.restore_r4)
6595 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6596
6597 /* Writeback. */
6598 if (dsc->u.ldst.writeback)
6599 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6600 }
6601
6602 /* Copy "extra" load/store instructions. These are halfword/doubleword
6603 transfers, which have a different encoding to byte/word transfers. */
6604
6605 static int
6606 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6607 struct regcache *regs, struct displaced_step_closure *dsc)
6608 {
6609 unsigned int op1 = bits (insn, 20, 24);
6610 unsigned int op2 = bits (insn, 5, 6);
6611 unsigned int rt = bits (insn, 12, 15);
6612 unsigned int rn = bits (insn, 16, 19);
6613 unsigned int rm = bits (insn, 0, 3);
6614 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6615 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6616 int immed = (op1 & 0x4) != 0;
6617 int opcode;
6618 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6619
6620 if (!insn_references_pc (insn, 0x000ff00ful))
6621 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6622
6623 if (debug_displaced)
6624 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6625 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6626 (unsigned long) insn);
6627
6628 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6629
6630 if (opcode < 0)
6631 internal_error (__FILE__, __LINE__,
6632 _("copy_extra_ld_st: instruction decode error"));
6633
6634 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6635 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6636 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6637 if (!immed)
6638 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6639
6640 rt_val = displaced_read_reg (regs, dsc, rt);
6641 if (bytesize[opcode] == 8)
6642 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6643 rn_val = displaced_read_reg (regs, dsc, rn);
6644 if (!immed)
6645 rm_val = displaced_read_reg (regs, dsc, rm);
6646
6647 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6648 if (bytesize[opcode] == 8)
6649 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6650 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6651 if (!immed)
6652 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6653
6654 dsc->rd = rt;
6655 dsc->u.ldst.xfersize = bytesize[opcode];
6656 dsc->u.ldst.rn = rn;
6657 dsc->u.ldst.immed = immed;
6658 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6659 dsc->u.ldst.restore_r4 = 0;
6660
6661 if (immed)
6662 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6663 ->
6664 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6665 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6666 else
6667 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6668 ->
6669 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6670 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6671
6672 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6673
6674 return 0;
6675 }
6676
6677 /* Copy byte/half word/word loads and stores. */
6678
6679 static void
6680 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6681 struct displaced_step_closure *dsc, int load,
6682 int immed, int writeback, int size, int usermode,
6683 int rt, int rm, int rn)
6684 {
6685 ULONGEST rt_val, rn_val, rm_val = 0;
6686
6687 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6688 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6689 if (!immed)
6690 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6691 if (!load)
6692 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6693
6694 rt_val = displaced_read_reg (regs, dsc, rt);
6695 rn_val = displaced_read_reg (regs, dsc, rn);
6696 if (!immed)
6697 rm_val = displaced_read_reg (regs, dsc, rm);
6698
6699 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6700 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6701 if (!immed)
6702 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6703 dsc->rd = rt;
6704 dsc->u.ldst.xfersize = size;
6705 dsc->u.ldst.rn = rn;
6706 dsc->u.ldst.immed = immed;
6707 dsc->u.ldst.writeback = writeback;
6708
6709 /* To write PC we can do:
6710
6711 Before this sequence of instructions:
6712 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6713 r2 is the Rn value got from dispalced_read_reg.
6714
6715 Insn1: push {pc} Write address of STR instruction + offset on stack
6716 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6717 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6718 = addr(Insn1) + offset - addr(Insn3) - 8
6719 = offset - 16
6720 Insn4: add r4, r4, #8 r4 = offset - 8
6721 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6722 = from + offset
6723 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6724
6725 Otherwise we don't know what value to write for PC, since the offset is
6726 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6727 of this can be found in Section "Saving from r15" in
6728 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6729
6730 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6731 }
6732
6733
6734 static int
6735 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6736 uint16_t insn2, struct regcache *regs,
6737 struct displaced_step_closure *dsc, int size)
6738 {
6739 unsigned int u_bit = bit (insn1, 7);
6740 unsigned int rt = bits (insn2, 12, 15);
6741 int imm12 = bits (insn2, 0, 11);
6742 ULONGEST pc_val;
6743
6744 if (debug_displaced)
6745 fprintf_unfiltered (gdb_stdlog,
6746 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6747 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6748 imm12);
6749
6750 if (!u_bit)
6751 imm12 = -1 * imm12;
6752
6753 /* Rewrite instruction LDR Rt imm12 into:
6754
6755 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6756
6757 LDR R0, R2, R3,
6758
6759 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6760
6761
6762 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6763 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6764 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6765
6766 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6767
6768 pc_val = pc_val & 0xfffffffc;
6769
6770 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6771 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6772
6773 dsc->rd = rt;
6774
6775 dsc->u.ldst.xfersize = size;
6776 dsc->u.ldst.immed = 0;
6777 dsc->u.ldst.writeback = 0;
6778 dsc->u.ldst.restore_r4 = 0;
6779
6780 /* LDR R0, R2, R3 */
6781 dsc->modinsn[0] = 0xf852;
6782 dsc->modinsn[1] = 0x3;
6783 dsc->numinsns = 2;
6784
6785 dsc->cleanup = &cleanup_load;
6786
6787 return 0;
6788 }
6789
6790 static int
6791 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6792 uint16_t insn2, struct regcache *regs,
6793 struct displaced_step_closure *dsc,
6794 int writeback, int immed)
6795 {
6796 unsigned int rt = bits (insn2, 12, 15);
6797 unsigned int rn = bits (insn1, 0, 3);
6798 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6799 /* In LDR (register), there is also a register Rm, which is not allowed to
6800 be PC, so we don't have to check it. */
6801
6802 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6803 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6804 dsc);
6805
6806 if (debug_displaced)
6807 fprintf_unfiltered (gdb_stdlog,
6808 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6809 rt, rn, insn1, insn2);
6810
6811 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6812 0, rt, rm, rn);
6813
6814 dsc->u.ldst.restore_r4 = 0;
6815
6816 if (immed)
6817 /* ldr[b]<cond> rt, [rn, #imm], etc.
6818 ->
6819 ldr[b]<cond> r0, [r2, #imm]. */
6820 {
6821 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6822 dsc->modinsn[1] = insn2 & 0x0fff;
6823 }
6824 else
6825 /* ldr[b]<cond> rt, [rn, rm], etc.
6826 ->
6827 ldr[b]<cond> r0, [r2, r3]. */
6828 {
6829 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6830 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6831 }
6832
6833 dsc->numinsns = 2;
6834
6835 return 0;
6836 }
6837
6838
6839 static int
6840 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6841 struct regcache *regs,
6842 struct displaced_step_closure *dsc,
6843 int load, int size, int usermode)
6844 {
6845 int immed = !bit (insn, 25);
6846 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6847 unsigned int rt = bits (insn, 12, 15);
6848 unsigned int rn = bits (insn, 16, 19);
6849 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6850
6851 if (!insn_references_pc (insn, 0x000ff00ful))
6852 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6853
6854 if (debug_displaced)
6855 fprintf_unfiltered (gdb_stdlog,
6856 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6857 load ? (size == 1 ? "ldrb" : "ldr")
6858 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6859 rt, rn,
6860 (unsigned long) insn);
6861
6862 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6863 usermode, rt, rm, rn);
6864
6865 if (load || rt != ARM_PC_REGNUM)
6866 {
6867 dsc->u.ldst.restore_r4 = 0;
6868
6869 if (immed)
6870 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6871 ->
6872 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6873 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6874 else
6875 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6876 ->
6877 {ldr,str}[b]<cond> r0, [r2, r3]. */
6878 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6879 }
6880 else
6881 {
6882 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6883 dsc->u.ldst.restore_r4 = 1;
6884 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6885 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6886 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6887 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6888 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6889
6890 /* As above. */
6891 if (immed)
6892 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6893 else
6894 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6895
6896 dsc->numinsns = 6;
6897 }
6898
6899 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6900
6901 return 0;
6902 }
6903
6904 /* Cleanup LDM instructions with fully-populated register list. This is an
6905 unfortunate corner case: it's impossible to implement correctly by modifying
6906 the instruction. The issue is as follows: we have an instruction,
6907
6908 ldm rN, {r0-r15}
6909
6910 which we must rewrite to avoid loading PC. A possible solution would be to
6911 do the load in two halves, something like (with suitable cleanup
6912 afterwards):
6913
6914 mov r8, rN
6915 ldm[id][ab] r8!, {r0-r7}
6916 str r7, <temp>
6917 ldm[id][ab] r8, {r7-r14}
6918 <bkpt>
6919
6920 but at present there's no suitable place for <temp>, since the scratch space
6921 is overwritten before the cleanup routine is called. For now, we simply
6922 emulate the instruction. */
6923
6924 static void
6925 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6926 struct displaced_step_closure *dsc)
6927 {
6928 int inc = dsc->u.block.increment;
6929 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6930 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6931 uint32_t regmask = dsc->u.block.regmask;
6932 int regno = inc ? 0 : 15;
6933 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6934 int exception_return = dsc->u.block.load && dsc->u.block.user
6935 && (regmask & 0x8000) != 0;
6936 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6937 int do_transfer = condition_true (dsc->u.block.cond, status);
6938 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6939
6940 if (!do_transfer)
6941 return;
6942
6943 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6944 sensible we can do here. Complain loudly. */
6945 if (exception_return)
6946 error (_("Cannot single-step exception return"));
6947
6948 /* We don't handle any stores here for now. */
6949 gdb_assert (dsc->u.block.load != 0);
6950
6951 if (debug_displaced)
6952 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6953 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6954 dsc->u.block.increment ? "inc" : "dec",
6955 dsc->u.block.before ? "before" : "after");
6956
6957 while (regmask)
6958 {
6959 uint32_t memword;
6960
6961 if (inc)
6962 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
6963 regno++;
6964 else
6965 while (regno >= 0 && (regmask & (1 << regno)) == 0)
6966 regno--;
6967
6968 xfer_addr += bump_before;
6969
6970 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
6971 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
6972
6973 xfer_addr += bump_after;
6974
6975 regmask &= ~(1 << regno);
6976 }
6977
6978 if (dsc->u.block.writeback)
6979 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
6980 CANNOT_WRITE_PC);
6981 }
6982
6983 /* Clean up an STM which included the PC in the register list. */
6984
6985 static void
6986 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
6987 struct displaced_step_closure *dsc)
6988 {
6989 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6990 int store_executed = condition_true (dsc->u.block.cond, status);
6991 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
6992 CORE_ADDR stm_insn_addr;
6993 uint32_t pc_val;
6994 long offset;
6995 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6996
6997 /* If condition code fails, there's nothing else to do. */
6998 if (!store_executed)
6999 return;
7000
7001 if (dsc->u.block.increment)
7002 {
7003 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7004
7005 if (dsc->u.block.before)
7006 pc_stored_at += 4;
7007 }
7008 else
7009 {
7010 pc_stored_at = dsc->u.block.xfer_addr;
7011
7012 if (dsc->u.block.before)
7013 pc_stored_at -= 4;
7014 }
7015
7016 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7017 stm_insn_addr = dsc->scratch_base;
7018 offset = pc_val - stm_insn_addr;
7019
7020 if (debug_displaced)
7021 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7022 "STM instruction\n", offset);
7023
7024 /* Rewrite the stored PC to the proper value for the non-displaced original
7025 instruction. */
7026 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7027 dsc->insn_addr + offset);
7028 }
7029
7030 /* Clean up an LDM which includes the PC in the register list. We clumped all
7031 the registers in the transferred list into a contiguous range r0...rX (to
7032 avoid loading PC directly and losing control of the debugged program), so we
7033 must undo that here. */
7034
7035 static void
7036 cleanup_block_load_pc (struct gdbarch *gdbarch,
7037 struct regcache *regs,
7038 struct displaced_step_closure *dsc)
7039 {
7040 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7041 int load_executed = condition_true (dsc->u.block.cond, status);
7042 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7043 unsigned int regs_loaded = bitcount (mask);
7044 unsigned int num_to_shuffle = regs_loaded, clobbered;
7045
7046 /* The method employed here will fail if the register list is fully populated
7047 (we need to avoid loading PC directly). */
7048 gdb_assert (num_to_shuffle < 16);
7049
7050 if (!load_executed)
7051 return;
7052
7053 clobbered = (1 << num_to_shuffle) - 1;
7054
7055 while (num_to_shuffle > 0)
7056 {
7057 if ((mask & (1 << write_reg)) != 0)
7058 {
7059 unsigned int read_reg = num_to_shuffle - 1;
7060
7061 if (read_reg != write_reg)
7062 {
7063 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7064 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7065 if (debug_displaced)
7066 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7067 "loaded register r%d to r%d\n"), read_reg,
7068 write_reg);
7069 }
7070 else if (debug_displaced)
7071 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7072 "r%d already in the right place\n"),
7073 write_reg);
7074
7075 clobbered &= ~(1 << write_reg);
7076
7077 num_to_shuffle--;
7078 }
7079
7080 write_reg--;
7081 }
7082
7083 /* Restore any registers we scribbled over. */
7084 for (write_reg = 0; clobbered != 0; write_reg++)
7085 {
7086 if ((clobbered & (1 << write_reg)) != 0)
7087 {
7088 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7089 CANNOT_WRITE_PC);
7090 if (debug_displaced)
7091 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7092 "clobbered register r%d\n"), write_reg);
7093 clobbered &= ~(1 << write_reg);
7094 }
7095 }
7096
7097 /* Perform register writeback manually. */
7098 if (dsc->u.block.writeback)
7099 {
7100 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7101
7102 if (dsc->u.block.increment)
7103 new_rn_val += regs_loaded * 4;
7104 else
7105 new_rn_val -= regs_loaded * 4;
7106
7107 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7108 CANNOT_WRITE_PC);
7109 }
7110 }
7111
7112 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7113 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7114
7115 static int
7116 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7117 struct regcache *regs,
7118 struct displaced_step_closure *dsc)
7119 {
7120 int load = bit (insn, 20);
7121 int user = bit (insn, 22);
7122 int increment = bit (insn, 23);
7123 int before = bit (insn, 24);
7124 int writeback = bit (insn, 21);
7125 int rn = bits (insn, 16, 19);
7126
7127 /* Block transfers which don't mention PC can be run directly
7128 out-of-line. */
7129 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7130 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7131
7132 if (rn == ARM_PC_REGNUM)
7133 {
7134 warning (_("displaced: Unpredictable LDM or STM with "
7135 "base register r15"));
7136 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7137 }
7138
7139 if (debug_displaced)
7140 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7141 "%.8lx\n", (unsigned long) insn);
7142
7143 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7144 dsc->u.block.rn = rn;
7145
7146 dsc->u.block.load = load;
7147 dsc->u.block.user = user;
7148 dsc->u.block.increment = increment;
7149 dsc->u.block.before = before;
7150 dsc->u.block.writeback = writeback;
7151 dsc->u.block.cond = bits (insn, 28, 31);
7152
7153 dsc->u.block.regmask = insn & 0xffff;
7154
7155 if (load)
7156 {
7157 if ((insn & 0xffff) == 0xffff)
7158 {
7159 /* LDM with a fully-populated register list. This case is
7160 particularly tricky. Implement for now by fully emulating the
7161 instruction (which might not behave perfectly in all cases, but
7162 these instructions should be rare enough for that not to matter
7163 too much). */
7164 dsc->modinsn[0] = ARM_NOP;
7165
7166 dsc->cleanup = &cleanup_block_load_all;
7167 }
7168 else
7169 {
7170 /* LDM of a list of registers which includes PC. Implement by
7171 rewriting the list of registers to be transferred into a
7172 contiguous chunk r0...rX before doing the transfer, then shuffling
7173 registers into the correct places in the cleanup routine. */
7174 unsigned int regmask = insn & 0xffff;
7175 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7176 unsigned int to = 0, from = 0, i, new_rn;
7177
7178 for (i = 0; i < num_in_list; i++)
7179 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7180
7181 /* Writeback makes things complicated. We need to avoid clobbering
7182 the base register with one of the registers in our modified
7183 register list, but just using a different register can't work in
7184 all cases, e.g.:
7185
7186 ldm r14!, {r0-r13,pc}
7187
7188 which would need to be rewritten as:
7189
7190 ldm rN!, {r0-r14}
7191
7192 but that can't work, because there's no free register for N.
7193
7194 Solve this by turning off the writeback bit, and emulating
7195 writeback manually in the cleanup routine. */
7196
7197 if (writeback)
7198 insn &= ~(1 << 21);
7199
7200 new_regmask = (1 << num_in_list) - 1;
7201
7202 if (debug_displaced)
7203 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7204 "{..., pc}: original reg list %.4x, modified "
7205 "list %.4x\n"), rn, writeback ? "!" : "",
7206 (int) insn & 0xffff, new_regmask);
7207
7208 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7209
7210 dsc->cleanup = &cleanup_block_load_pc;
7211 }
7212 }
7213 else
7214 {
7215 /* STM of a list of registers which includes PC. Run the instruction
7216 as-is, but out of line: this will store the wrong value for the PC,
7217 so we must manually fix up the memory in the cleanup routine.
7218 Doing things this way has the advantage that we can auto-detect
7219 the offset of the PC write (which is architecture-dependent) in
7220 the cleanup routine. */
7221 dsc->modinsn[0] = insn;
7222
7223 dsc->cleanup = &cleanup_block_store_pc;
7224 }
7225
7226 return 0;
7227 }
7228
7229 static int
7230 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7231 struct regcache *regs,
7232 struct displaced_step_closure *dsc)
7233 {
7234 int rn = bits (insn1, 0, 3);
7235 int load = bit (insn1, 4);
7236 int writeback = bit (insn1, 5);
7237
7238 /* Block transfers which don't mention PC can be run directly
7239 out-of-line. */
7240 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7241 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7242
7243 if (rn == ARM_PC_REGNUM)
7244 {
7245 warning (_("displaced: Unpredictable LDM or STM with "
7246 "base register r15"));
7247 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7248 "unpredictable ldm/stm", dsc);
7249 }
7250
7251 if (debug_displaced)
7252 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7253 "%.4x%.4x\n", insn1, insn2);
7254
7255 /* Clear bit 13, since it should be always zero. */
7256 dsc->u.block.regmask = (insn2 & 0xdfff);
7257 dsc->u.block.rn = rn;
7258
7259 dsc->u.block.load = load;
7260 dsc->u.block.user = 0;
7261 dsc->u.block.increment = bit (insn1, 7);
7262 dsc->u.block.before = bit (insn1, 8);
7263 dsc->u.block.writeback = writeback;
7264 dsc->u.block.cond = INST_AL;
7265 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7266
7267 if (load)
7268 {
7269 if (dsc->u.block.regmask == 0xffff)
7270 {
7271 /* This branch is impossible to happen. */
7272 gdb_assert (0);
7273 }
7274 else
7275 {
7276 unsigned int regmask = dsc->u.block.regmask;
7277 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7278 unsigned int to = 0, from = 0, i, new_rn;
7279
7280 for (i = 0; i < num_in_list; i++)
7281 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7282
7283 if (writeback)
7284 insn1 &= ~(1 << 5);
7285
7286 new_regmask = (1 << num_in_list) - 1;
7287
7288 if (debug_displaced)
7289 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7290 "{..., pc}: original reg list %.4x, modified "
7291 "list %.4x\n"), rn, writeback ? "!" : "",
7292 (int) dsc->u.block.regmask, new_regmask);
7293
7294 dsc->modinsn[0] = insn1;
7295 dsc->modinsn[1] = (new_regmask & 0xffff);
7296 dsc->numinsns = 2;
7297
7298 dsc->cleanup = &cleanup_block_load_pc;
7299 }
7300 }
7301 else
7302 {
7303 dsc->modinsn[0] = insn1;
7304 dsc->modinsn[1] = insn2;
7305 dsc->numinsns = 2;
7306 dsc->cleanup = &cleanup_block_store_pc;
7307 }
7308 return 0;
7309 }
7310
7311 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7312 for Linux, where some SVC instructions must be treated specially. */
7313
7314 static void
7315 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7316 struct displaced_step_closure *dsc)
7317 {
7318 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7319
7320 if (debug_displaced)
7321 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7322 "%.8lx\n", (unsigned long) resume_addr);
7323
7324 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7325 }
7326
7327
7328 /* Common copy routine for svc instruciton. */
7329
7330 static int
7331 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7332 struct displaced_step_closure *dsc)
7333 {
7334 /* Preparation: none.
7335 Insn: unmodified svc.
7336 Cleanup: pc <- insn_addr + insn_size. */
7337
7338 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7339 instruction. */
7340 dsc->wrote_to_pc = 1;
7341
7342 /* Allow OS-specific code to override SVC handling. */
7343 if (dsc->u.svc.copy_svc_os)
7344 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7345 else
7346 {
7347 dsc->cleanup = &cleanup_svc;
7348 return 0;
7349 }
7350 }
7351
7352 static int
7353 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7354 struct regcache *regs, struct displaced_step_closure *dsc)
7355 {
7356
7357 if (debug_displaced)
7358 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7359 (unsigned long) insn);
7360
7361 dsc->modinsn[0] = insn;
7362
7363 return install_svc (gdbarch, regs, dsc);
7364 }
7365
7366 static int
7367 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7368 struct regcache *regs, struct displaced_step_closure *dsc)
7369 {
7370
7371 if (debug_displaced)
7372 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7373 insn);
7374
7375 dsc->modinsn[0] = insn;
7376
7377 return install_svc (gdbarch, regs, dsc);
7378 }
7379
7380 /* Copy undefined instructions. */
7381
7382 static int
7383 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7384 struct displaced_step_closure *dsc)
7385 {
7386 if (debug_displaced)
7387 fprintf_unfiltered (gdb_stdlog,
7388 "displaced: copying undefined insn %.8lx\n",
7389 (unsigned long) insn);
7390
7391 dsc->modinsn[0] = insn;
7392
7393 return 0;
7394 }
7395
7396 static int
7397 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7398 struct displaced_step_closure *dsc)
7399 {
7400
7401 if (debug_displaced)
7402 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7403 "%.4x %.4x\n", (unsigned short) insn1,
7404 (unsigned short) insn2);
7405
7406 dsc->modinsn[0] = insn1;
7407 dsc->modinsn[1] = insn2;
7408 dsc->numinsns = 2;
7409
7410 return 0;
7411 }
7412
7413 /* Copy unpredictable instructions. */
7414
7415 static int
7416 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7417 struct displaced_step_closure *dsc)
7418 {
7419 if (debug_displaced)
7420 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7421 "%.8lx\n", (unsigned long) insn);
7422
7423 dsc->modinsn[0] = insn;
7424
7425 return 0;
7426 }
7427
7428 /* The decode_* functions are instruction decoding helpers. They mostly follow
7429 the presentation in the ARM ARM. */
7430
7431 static int
7432 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7433 struct regcache *regs,
7434 struct displaced_step_closure *dsc)
7435 {
7436 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7437 unsigned int rn = bits (insn, 16, 19);
7438
7439 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7440 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7441 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7442 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7443 else if ((op1 & 0x60) == 0x20)
7444 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7445 else if ((op1 & 0x71) == 0x40)
7446 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7447 dsc);
7448 else if ((op1 & 0x77) == 0x41)
7449 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7450 else if ((op1 & 0x77) == 0x45)
7451 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7452 else if ((op1 & 0x77) == 0x51)
7453 {
7454 if (rn != 0xf)
7455 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7456 else
7457 return arm_copy_unpred (gdbarch, insn, dsc);
7458 }
7459 else if ((op1 & 0x77) == 0x55)
7460 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7461 else if (op1 == 0x57)
7462 switch (op2)
7463 {
7464 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7465 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7466 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7467 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7468 default: return arm_copy_unpred (gdbarch, insn, dsc);
7469 }
7470 else if ((op1 & 0x63) == 0x43)
7471 return arm_copy_unpred (gdbarch, insn, dsc);
7472 else if ((op2 & 0x1) == 0x0)
7473 switch (op1 & ~0x80)
7474 {
7475 case 0x61:
7476 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7477 case 0x65:
7478 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7479 case 0x71: case 0x75:
7480 /* pld/pldw reg. */
7481 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7482 case 0x63: case 0x67: case 0x73: case 0x77:
7483 return arm_copy_unpred (gdbarch, insn, dsc);
7484 default:
7485 return arm_copy_undef (gdbarch, insn, dsc);
7486 }
7487 else
7488 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7489 }
7490
7491 static int
7492 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7493 struct regcache *regs,
7494 struct displaced_step_closure *dsc)
7495 {
7496 if (bit (insn, 27) == 0)
7497 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7498 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7499 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7500 {
7501 case 0x0: case 0x2:
7502 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7503
7504 case 0x1: case 0x3:
7505 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7506
7507 case 0x4: case 0x5: case 0x6: case 0x7:
7508 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7509
7510 case 0x8:
7511 switch ((insn & 0xe00000) >> 21)
7512 {
7513 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7514 /* stc/stc2. */
7515 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7516
7517 case 0x2:
7518 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7519
7520 default:
7521 return arm_copy_undef (gdbarch, insn, dsc);
7522 }
7523
7524 case 0x9:
7525 {
7526 int rn_f = (bits (insn, 16, 19) == 0xf);
7527 switch ((insn & 0xe00000) >> 21)
7528 {
7529 case 0x1: case 0x3:
7530 /* ldc/ldc2 imm (undefined for rn == pc). */
7531 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7532 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7533
7534 case 0x2:
7535 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7536
7537 case 0x4: case 0x5: case 0x6: case 0x7:
7538 /* ldc/ldc2 lit (undefined for rn != pc). */
7539 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7540 : arm_copy_undef (gdbarch, insn, dsc);
7541
7542 default:
7543 return arm_copy_undef (gdbarch, insn, dsc);
7544 }
7545 }
7546
7547 case 0xa:
7548 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7549
7550 case 0xb:
7551 if (bits (insn, 16, 19) == 0xf)
7552 /* ldc/ldc2 lit. */
7553 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7554 else
7555 return arm_copy_undef (gdbarch, insn, dsc);
7556
7557 case 0xc:
7558 if (bit (insn, 4))
7559 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7560 else
7561 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7562
7563 case 0xd:
7564 if (bit (insn, 4))
7565 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7566 else
7567 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7568
7569 default:
7570 return arm_copy_undef (gdbarch, insn, dsc);
7571 }
7572 }
7573
7574 /* Decode miscellaneous instructions in dp/misc encoding space. */
7575
7576 static int
7577 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7578 struct regcache *regs,
7579 struct displaced_step_closure *dsc)
7580 {
7581 unsigned int op2 = bits (insn, 4, 6);
7582 unsigned int op = bits (insn, 21, 22);
7583 unsigned int op1 = bits (insn, 16, 19);
7584
7585 switch (op2)
7586 {
7587 case 0x0:
7588 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7589
7590 case 0x1:
7591 if (op == 0x1) /* bx. */
7592 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7593 else if (op == 0x3)
7594 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7595 else
7596 return arm_copy_undef (gdbarch, insn, dsc);
7597
7598 case 0x2:
7599 if (op == 0x1)
7600 /* Not really supported. */
7601 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7602 else
7603 return arm_copy_undef (gdbarch, insn, dsc);
7604
7605 case 0x3:
7606 if (op == 0x1)
7607 return arm_copy_bx_blx_reg (gdbarch, insn,
7608 regs, dsc); /* blx register. */
7609 else
7610 return arm_copy_undef (gdbarch, insn, dsc);
7611
7612 case 0x5:
7613 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7614
7615 case 0x7:
7616 if (op == 0x1)
7617 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7618 else if (op == 0x3)
7619 /* Not really supported. */
7620 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7621
7622 default:
7623 return arm_copy_undef (gdbarch, insn, dsc);
7624 }
7625 }
7626
7627 static int
7628 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7629 struct regcache *regs,
7630 struct displaced_step_closure *dsc)
7631 {
7632 if (bit (insn, 25))
7633 switch (bits (insn, 20, 24))
7634 {
7635 case 0x10:
7636 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7637
7638 case 0x14:
7639 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7640
7641 case 0x12: case 0x16:
7642 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7643
7644 default:
7645 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7646 }
7647 else
7648 {
7649 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7650
7651 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7652 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7653 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7654 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7655 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7656 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7657 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7658 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7659 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7660 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7661 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7662 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7663 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7664 /* 2nd arg means "unpriveleged". */
7665 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7666 dsc);
7667 }
7668
7669 /* Should be unreachable. */
7670 return 1;
7671 }
7672
7673 static int
7674 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7675 struct regcache *regs,
7676 struct displaced_step_closure *dsc)
7677 {
7678 int a = bit (insn, 25), b = bit (insn, 4);
7679 uint32_t op1 = bits (insn, 20, 24);
7680 int rn_f = bits (insn, 16, 19) == 0xf;
7681
7682 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7683 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7684 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7685 else if ((!a && (op1 & 0x17) == 0x02)
7686 || (a && (op1 & 0x17) == 0x02 && !b))
7687 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7688 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7689 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7690 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7691 else if ((!a && (op1 & 0x17) == 0x03)
7692 || (a && (op1 & 0x17) == 0x03 && !b))
7693 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7694 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7695 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7696 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7697 else if ((!a && (op1 & 0x17) == 0x06)
7698 || (a && (op1 & 0x17) == 0x06 && !b))
7699 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7700 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7701 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7702 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7703 else if ((!a && (op1 & 0x17) == 0x07)
7704 || (a && (op1 & 0x17) == 0x07 && !b))
7705 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7706
7707 /* Should be unreachable. */
7708 return 1;
7709 }
7710
7711 static int
7712 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7713 struct displaced_step_closure *dsc)
7714 {
7715 switch (bits (insn, 20, 24))
7716 {
7717 case 0x00: case 0x01: case 0x02: case 0x03:
7718 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7719
7720 case 0x04: case 0x05: case 0x06: case 0x07:
7721 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7722
7723 case 0x08: case 0x09: case 0x0a: case 0x0b:
7724 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7725 return arm_copy_unmodified (gdbarch, insn,
7726 "decode/pack/unpack/saturate/reverse", dsc);
7727
7728 case 0x18:
7729 if (bits (insn, 5, 7) == 0) /* op2. */
7730 {
7731 if (bits (insn, 12, 15) == 0xf)
7732 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7733 else
7734 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7735 }
7736 else
7737 return arm_copy_undef (gdbarch, insn, dsc);
7738
7739 case 0x1a: case 0x1b:
7740 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7741 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7742 else
7743 return arm_copy_undef (gdbarch, insn, dsc);
7744
7745 case 0x1c: case 0x1d:
7746 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7747 {
7748 if (bits (insn, 0, 3) == 0xf)
7749 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7750 else
7751 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7752 }
7753 else
7754 return arm_copy_undef (gdbarch, insn, dsc);
7755
7756 case 0x1e: case 0x1f:
7757 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7758 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7759 else
7760 return arm_copy_undef (gdbarch, insn, dsc);
7761 }
7762
7763 /* Should be unreachable. */
7764 return 1;
7765 }
7766
7767 static int
7768 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7769 struct regcache *regs,
7770 struct displaced_step_closure *dsc)
7771 {
7772 if (bit (insn, 25))
7773 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7774 else
7775 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7776 }
7777
7778 static int
7779 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7780 struct regcache *regs,
7781 struct displaced_step_closure *dsc)
7782 {
7783 unsigned int opcode = bits (insn, 20, 24);
7784
7785 switch (opcode)
7786 {
7787 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7788 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7789
7790 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7791 case 0x12: case 0x16:
7792 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7793
7794 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7795 case 0x13: case 0x17:
7796 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7797
7798 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7799 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7800 /* Note: no writeback for these instructions. Bit 25 will always be
7801 zero though (via caller), so the following works OK. */
7802 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7803 }
7804
7805 /* Should be unreachable. */
7806 return 1;
7807 }
7808
7809 /* Decode shifted register instructions. */
7810
7811 static int
7812 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7813 uint16_t insn2, struct regcache *regs,
7814 struct displaced_step_closure *dsc)
7815 {
7816 /* PC is only allowed to be used in instruction MOV. */
7817
7818 unsigned int op = bits (insn1, 5, 8);
7819 unsigned int rn = bits (insn1, 0, 3);
7820
7821 if (op == 0x2 && rn == 0xf) /* MOV */
7822 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7823 else
7824 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7825 "dp (shift reg)", dsc);
7826 }
7827
7828
7829 /* Decode extension register load/store. Exactly the same as
7830 arm_decode_ext_reg_ld_st. */
7831
7832 static int
7833 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7834 uint16_t insn2, struct regcache *regs,
7835 struct displaced_step_closure *dsc)
7836 {
7837 unsigned int opcode = bits (insn1, 4, 8);
7838
7839 switch (opcode)
7840 {
7841 case 0x04: case 0x05:
7842 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7843 "vfp/neon vmov", dsc);
7844
7845 case 0x08: case 0x0c: /* 01x00 */
7846 case 0x0a: case 0x0e: /* 01x10 */
7847 case 0x12: case 0x16: /* 10x10 */
7848 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7849 "vfp/neon vstm/vpush", dsc);
7850
7851 case 0x09: case 0x0d: /* 01x01 */
7852 case 0x0b: case 0x0f: /* 01x11 */
7853 case 0x13: case 0x17: /* 10x11 */
7854 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7855 "vfp/neon vldm/vpop", dsc);
7856
7857 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7858 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7859 "vstr", dsc);
7860 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7861 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7862 }
7863
7864 /* Should be unreachable. */
7865 return 1;
7866 }
7867
7868 static int
7869 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7870 struct regcache *regs, struct displaced_step_closure *dsc)
7871 {
7872 unsigned int op1 = bits (insn, 20, 25);
7873 int op = bit (insn, 4);
7874 unsigned int coproc = bits (insn, 8, 11);
7875 unsigned int rn = bits (insn, 16, 19);
7876
7877 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7878 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7879 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7880 && (coproc & 0xe) != 0xa)
7881 /* stc/stc2. */
7882 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7883 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7884 && (coproc & 0xe) != 0xa)
7885 /* ldc/ldc2 imm/lit. */
7886 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7887 else if ((op1 & 0x3e) == 0x00)
7888 return arm_copy_undef (gdbarch, insn, dsc);
7889 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7890 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7891 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7892 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7893 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7894 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7895 else if ((op1 & 0x30) == 0x20 && !op)
7896 {
7897 if ((coproc & 0xe) == 0xa)
7898 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7899 else
7900 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7901 }
7902 else if ((op1 & 0x30) == 0x20 && op)
7903 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7904 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7905 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7906 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7907 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7908 else if ((op1 & 0x30) == 0x30)
7909 return arm_copy_svc (gdbarch, insn, regs, dsc);
7910 else
7911 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7912 }
7913
7914 static int
7915 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7916 uint16_t insn2, struct regcache *regs,
7917 struct displaced_step_closure *dsc)
7918 {
7919 unsigned int coproc = bits (insn2, 8, 11);
7920 unsigned int op1 = bits (insn1, 4, 9);
7921 unsigned int bit_5_8 = bits (insn1, 5, 8);
7922 unsigned int bit_9 = bit (insn1, 9);
7923 unsigned int bit_4 = bit (insn1, 4);
7924 unsigned int rn = bits (insn1, 0, 3);
7925
7926 if (bit_9 == 0)
7927 {
7928 if (bit_5_8 == 2)
7929 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7930 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7931 dsc);
7932 else if (bit_5_8 == 0) /* UNDEFINED. */
7933 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7934 else
7935 {
7936 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7937 if ((coproc & 0xe) == 0xa)
7938 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7939 dsc);
7940 else /* coproc is not 101x. */
7941 {
7942 if (bit_4 == 0) /* STC/STC2. */
7943 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7944 "stc/stc2", dsc);
7945 else /* LDC/LDC2 {literal, immeidate}. */
7946 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7947 regs, dsc);
7948 }
7949 }
7950 }
7951 else
7952 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7953
7954 return 0;
7955 }
7956
7957 static void
7958 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7959 struct displaced_step_closure *dsc, int rd)
7960 {
7961 /* ADR Rd, #imm
7962
7963 Rewrite as:
7964
7965 Preparation: Rd <- PC
7966 Insn: ADD Rd, #imm
7967 Cleanup: Null.
7968 */
7969
7970 /* Rd <- PC */
7971 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
7972 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
7973 }
7974
7975 static int
7976 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
7977 struct displaced_step_closure *dsc,
7978 int rd, unsigned int imm)
7979 {
7980
7981 /* Encoding T2: ADDS Rd, #imm */
7982 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
7983
7984 install_pc_relative (gdbarch, regs, dsc, rd);
7985
7986 return 0;
7987 }
7988
7989 static int
7990 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
7991 struct regcache *regs,
7992 struct displaced_step_closure *dsc)
7993 {
7994 unsigned int rd = bits (insn, 8, 10);
7995 unsigned int imm8 = bits (insn, 0, 7);
7996
7997 if (debug_displaced)
7998 fprintf_unfiltered (gdb_stdlog,
7999 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8000 rd, imm8, insn);
8001
8002 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8003 }
8004
8005 static int
8006 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8007 uint16_t insn2, struct regcache *regs,
8008 struct displaced_step_closure *dsc)
8009 {
8010 unsigned int rd = bits (insn2, 8, 11);
8011 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8012 extract raw immediate encoding rather than computing immediate. When
8013 generating ADD or SUB instruction, we can simply perform OR operation to
8014 set immediate into ADD. */
8015 unsigned int imm_3_8 = insn2 & 0x70ff;
8016 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8017
8018 if (debug_displaced)
8019 fprintf_unfiltered (gdb_stdlog,
8020 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8021 rd, imm_i, imm_3_8, insn1, insn2);
8022
8023 if (bit (insn1, 7)) /* Encoding T2 */
8024 {
8025 /* Encoding T3: SUB Rd, Rd, #imm */
8026 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8027 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8028 }
8029 else /* Encoding T3 */
8030 {
8031 /* Encoding T3: ADD Rd, Rd, #imm */
8032 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8033 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8034 }
8035 dsc->numinsns = 2;
8036
8037 install_pc_relative (gdbarch, regs, dsc, rd);
8038
8039 return 0;
8040 }
8041
8042 static int
8043 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8044 struct regcache *regs,
8045 struct displaced_step_closure *dsc)
8046 {
8047 unsigned int rt = bits (insn1, 8, 10);
8048 unsigned int pc;
8049 int imm8 = (bits (insn1, 0, 7) << 2);
8050 CORE_ADDR from = dsc->insn_addr;
8051
8052 /* LDR Rd, #imm8
8053
8054 Rwrite as:
8055
8056 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8057
8058 Insn: LDR R0, [R2, R3];
8059 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8060
8061 if (debug_displaced)
8062 fprintf_unfiltered (gdb_stdlog,
8063 "displaced: copying thumb ldr r%d [pc #%d]\n"
8064 , rt, imm8);
8065
8066 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8067 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8068 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8069 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8070 /* The assembler calculates the required value of the offset from the
8071 Align(PC,4) value of this instruction to the label. */
8072 pc = pc & 0xfffffffc;
8073
8074 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8075 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8076
8077 dsc->rd = rt;
8078 dsc->u.ldst.xfersize = 4;
8079 dsc->u.ldst.rn = 0;
8080 dsc->u.ldst.immed = 0;
8081 dsc->u.ldst.writeback = 0;
8082 dsc->u.ldst.restore_r4 = 0;
8083
8084 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8085
8086 dsc->cleanup = &cleanup_load;
8087
8088 return 0;
8089 }
8090
8091 /* Copy Thumb cbnz/cbz insruction. */
8092
8093 static int
8094 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8095 struct regcache *regs,
8096 struct displaced_step_closure *dsc)
8097 {
8098 int non_zero = bit (insn1, 11);
8099 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8100 CORE_ADDR from = dsc->insn_addr;
8101 int rn = bits (insn1, 0, 2);
8102 int rn_val = displaced_read_reg (regs, dsc, rn);
8103
8104 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8105 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8106 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8107 condition is false, let it be, cleanup_branch will do nothing. */
8108 if (dsc->u.branch.cond)
8109 {
8110 dsc->u.branch.cond = INST_AL;
8111 dsc->u.branch.dest = from + 4 + imm5;
8112 }
8113 else
8114 dsc->u.branch.dest = from + 2;
8115
8116 dsc->u.branch.link = 0;
8117 dsc->u.branch.exchange = 0;
8118
8119 if (debug_displaced)
8120 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8121 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8122 rn, rn_val, insn1, dsc->u.branch.dest);
8123
8124 dsc->modinsn[0] = THUMB_NOP;
8125
8126 dsc->cleanup = &cleanup_branch;
8127 return 0;
8128 }
8129
8130 /* Copy Table Branch Byte/Halfword */
8131 static int
8132 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8133 uint16_t insn2, struct regcache *regs,
8134 struct displaced_step_closure *dsc)
8135 {
8136 ULONGEST rn_val, rm_val;
8137 int is_tbh = bit (insn2, 4);
8138 CORE_ADDR halfwords = 0;
8139 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8140
8141 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8142 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8143
8144 if (is_tbh)
8145 {
8146 gdb_byte buf[2];
8147
8148 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8149 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8150 }
8151 else
8152 {
8153 gdb_byte buf[1];
8154
8155 target_read_memory (rn_val + rm_val, buf, 1);
8156 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8157 }
8158
8159 if (debug_displaced)
8160 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8161 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8162 (unsigned int) rn_val, (unsigned int) rm_val,
8163 (unsigned int) halfwords);
8164
8165 dsc->u.branch.cond = INST_AL;
8166 dsc->u.branch.link = 0;
8167 dsc->u.branch.exchange = 0;
8168 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8169
8170 dsc->cleanup = &cleanup_branch;
8171
8172 return 0;
8173 }
8174
8175 static void
8176 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8177 struct displaced_step_closure *dsc)
8178 {
8179 /* PC <- r7 */
8180 int val = displaced_read_reg (regs, dsc, 7);
8181 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8182
8183 /* r7 <- r8 */
8184 val = displaced_read_reg (regs, dsc, 8);
8185 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8186
8187 /* r8 <- tmp[0] */
8188 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8189
8190 }
8191
8192 static int
8193 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8194 struct regcache *regs,
8195 struct displaced_step_closure *dsc)
8196 {
8197 dsc->u.block.regmask = insn1 & 0x00ff;
8198
8199 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8200 to :
8201
8202 (1) register list is full, that is, r0-r7 are used.
8203 Prepare: tmp[0] <- r8
8204
8205 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8206 MOV r8, r7; Move value of r7 to r8;
8207 POP {r7}; Store PC value into r7.
8208
8209 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8210
8211 (2) register list is not full, supposing there are N registers in
8212 register list (except PC, 0 <= N <= 7).
8213 Prepare: for each i, 0 - N, tmp[i] <- ri.
8214
8215 POP {r0, r1, ...., rN};
8216
8217 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8218 from tmp[] properly.
8219 */
8220 if (debug_displaced)
8221 fprintf_unfiltered (gdb_stdlog,
8222 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8223 dsc->u.block.regmask, insn1);
8224
8225 if (dsc->u.block.regmask == 0xff)
8226 {
8227 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8228
8229 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8230 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8231 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8232
8233 dsc->numinsns = 3;
8234 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8235 }
8236 else
8237 {
8238 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8239 unsigned int new_regmask, bit = 1;
8240 unsigned int to = 0, from = 0, i, new_rn;
8241
8242 for (i = 0; i < num_in_list + 1; i++)
8243 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8244
8245 new_regmask = (1 << (num_in_list + 1)) - 1;
8246
8247 if (debug_displaced)
8248 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8249 "{..., pc}: original reg list %.4x,"
8250 " modified list %.4x\n"),
8251 (int) dsc->u.block.regmask, new_regmask);
8252
8253 dsc->u.block.regmask |= 0x8000;
8254 dsc->u.block.writeback = 0;
8255 dsc->u.block.cond = INST_AL;
8256
8257 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8258
8259 dsc->cleanup = &cleanup_block_load_pc;
8260 }
8261
8262 return 0;
8263 }
8264
8265 static void
8266 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8267 struct regcache *regs,
8268 struct displaced_step_closure *dsc)
8269 {
8270 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8271 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8272 int err = 0;
8273
8274 /* 16-bit thumb instructions. */
8275 switch (op_bit_12_15)
8276 {
8277 /* Shift (imme), add, subtract, move and compare. */
8278 case 0: case 1: case 2: case 3:
8279 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8280 "shift/add/sub/mov/cmp",
8281 dsc);
8282 break;
8283 case 4:
8284 switch (op_bit_10_11)
8285 {
8286 case 0: /* Data-processing */
8287 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8288 "data-processing",
8289 dsc);
8290 break;
8291 case 1: /* Special data instructions and branch and exchange. */
8292 {
8293 unsigned short op = bits (insn1, 7, 9);
8294 if (op == 6 || op == 7) /* BX or BLX */
8295 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8296 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8297 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8298 else
8299 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8300 dsc);
8301 }
8302 break;
8303 default: /* LDR (literal) */
8304 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8305 }
8306 break;
8307 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8308 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8309 break;
8310 case 10:
8311 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8312 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8313 else /* Generate SP-relative address */
8314 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8315 break;
8316 case 11: /* Misc 16-bit instructions */
8317 {
8318 switch (bits (insn1, 8, 11))
8319 {
8320 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8321 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8322 break;
8323 case 12: case 13: /* POP */
8324 if (bit (insn1, 8)) /* PC is in register list. */
8325 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8326 else
8327 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8328 break;
8329 case 15: /* If-Then, and hints */
8330 if (bits (insn1, 0, 3))
8331 /* If-Then makes up to four following instructions conditional.
8332 IT instruction itself is not conditional, so handle it as a
8333 common unmodified instruction. */
8334 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8335 dsc);
8336 else
8337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8338 break;
8339 default:
8340 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8341 }
8342 }
8343 break;
8344 case 12:
8345 if (op_bit_10_11 < 2) /* Store multiple registers */
8346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8347 else /* Load multiple registers */
8348 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8349 break;
8350 case 13: /* Conditional branch and supervisor call */
8351 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8352 err = thumb_copy_b (gdbarch, insn1, dsc);
8353 else
8354 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8355 break;
8356 case 14: /* Unconditional branch */
8357 err = thumb_copy_b (gdbarch, insn1, dsc);
8358 break;
8359 default:
8360 err = 1;
8361 }
8362
8363 if (err)
8364 internal_error (__FILE__, __LINE__,
8365 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8366 }
8367
8368 static int
8369 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8370 uint16_t insn1, uint16_t insn2,
8371 struct regcache *regs,
8372 struct displaced_step_closure *dsc)
8373 {
8374 int rt = bits (insn2, 12, 15);
8375 int rn = bits (insn1, 0, 3);
8376 int op1 = bits (insn1, 7, 8);
8377 int err = 0;
8378
8379 switch (bits (insn1, 5, 6))
8380 {
8381 case 0: /* Load byte and memory hints */
8382 if (rt == 0xf) /* PLD/PLI */
8383 {
8384 if (rn == 0xf)
8385 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8386 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8387 else
8388 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8389 "pli/pld", dsc);
8390 }
8391 else
8392 {
8393 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8394 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8395 1);
8396 else
8397 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8398 "ldrb{reg, immediate}/ldrbt",
8399 dsc);
8400 }
8401
8402 break;
8403 case 1: /* Load halfword and memory hints. */
8404 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8405 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8406 "pld/unalloc memhint", dsc);
8407 else
8408 {
8409 if (rn == 0xf)
8410 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8411 2);
8412 else
8413 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8414 "ldrh/ldrht", dsc);
8415 }
8416 break;
8417 case 2: /* Load word */
8418 {
8419 int insn2_bit_8_11 = bits (insn2, 8, 11);
8420
8421 if (rn == 0xf)
8422 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8423 else if (op1 == 0x1) /* Encoding T3 */
8424 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8425 0, 1);
8426 else /* op1 == 0x0 */
8427 {
8428 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8429 /* LDR (immediate) */
8430 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8431 dsc, bit (insn2, 8), 1);
8432 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8433 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8434 "ldrt", dsc);
8435 else
8436 /* LDR (register) */
8437 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8438 dsc, 0, 0);
8439 }
8440 break;
8441 }
8442 default:
8443 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8444 break;
8445 }
8446 return 0;
8447 }
8448
8449 static void
8450 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8451 uint16_t insn2, struct regcache *regs,
8452 struct displaced_step_closure *dsc)
8453 {
8454 int err = 0;
8455 unsigned short op = bit (insn2, 15);
8456 unsigned int op1 = bits (insn1, 11, 12);
8457
8458 switch (op1)
8459 {
8460 case 1:
8461 {
8462 switch (bits (insn1, 9, 10))
8463 {
8464 case 0:
8465 if (bit (insn1, 6))
8466 {
8467 /* Load/store {dual, execlusive}, table branch. */
8468 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8469 && bits (insn2, 5, 7) == 0)
8470 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8471 dsc);
8472 else
8473 /* PC is not allowed to use in load/store {dual, exclusive}
8474 instructions. */
8475 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8476 "load/store dual/ex", dsc);
8477 }
8478 else /* load/store multiple */
8479 {
8480 switch (bits (insn1, 7, 8))
8481 {
8482 case 0: case 3: /* SRS, RFE */
8483 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8484 "srs/rfe", dsc);
8485 break;
8486 case 1: case 2: /* LDM/STM/PUSH/POP */
8487 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8488 break;
8489 }
8490 }
8491 break;
8492
8493 case 1:
8494 /* Data-processing (shift register). */
8495 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8496 dsc);
8497 break;
8498 default: /* Coprocessor instructions. */
8499 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8500 break;
8501 }
8502 break;
8503 }
8504 case 2: /* op1 = 2 */
8505 if (op) /* Branch and misc control. */
8506 {
8507 if (bit (insn2, 14) /* BLX/BL */
8508 || bit (insn2, 12) /* Unconditional branch */
8509 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8510 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8511 else
8512 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8513 "misc ctrl", dsc);
8514 }
8515 else
8516 {
8517 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8518 {
8519 int op = bits (insn1, 4, 8);
8520 int rn = bits (insn1, 0, 3);
8521 if ((op == 0 || op == 0xa) && rn == 0xf)
8522 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8523 regs, dsc);
8524 else
8525 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8526 "dp/pb", dsc);
8527 }
8528 else /* Data processing (modified immeidate) */
8529 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8530 "dp/mi", dsc);
8531 }
8532 break;
8533 case 3: /* op1 = 3 */
8534 switch (bits (insn1, 9, 10))
8535 {
8536 case 0:
8537 if (bit (insn1, 4))
8538 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8539 regs, dsc);
8540 else /* NEON Load/Store and Store single data item */
8541 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8542 "neon elt/struct load/store",
8543 dsc);
8544 break;
8545 case 1: /* op1 = 3, bits (9, 10) == 1 */
8546 switch (bits (insn1, 7, 8))
8547 {
8548 case 0: case 1: /* Data processing (register) */
8549 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8550 "dp(reg)", dsc);
8551 break;
8552 case 2: /* Multiply and absolute difference */
8553 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8554 "mul/mua/diff", dsc);
8555 break;
8556 case 3: /* Long multiply and divide */
8557 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8558 "lmul/lmua", dsc);
8559 break;
8560 }
8561 break;
8562 default: /* Coprocessor instructions */
8563 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8564 break;
8565 }
8566 break;
8567 default:
8568 err = 1;
8569 }
8570
8571 if (err)
8572 internal_error (__FILE__, __LINE__,
8573 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8574
8575 }
8576
8577 static void
8578 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8579 CORE_ADDR to, struct regcache *regs,
8580 struct displaced_step_closure *dsc)
8581 {
8582 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8583 uint16_t insn1
8584 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8585
8586 if (debug_displaced)
8587 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8588 "at %.8lx\n", insn1, (unsigned long) from);
8589
8590 dsc->is_thumb = 1;
8591 dsc->insn_size = thumb_insn_size (insn1);
8592 if (thumb_insn_size (insn1) == 4)
8593 {
8594 uint16_t insn2
8595 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8596 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8597 }
8598 else
8599 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8600 }
8601
8602 void
8603 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8604 CORE_ADDR to, struct regcache *regs,
8605 struct displaced_step_closure *dsc)
8606 {
8607 int err = 0;
8608 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8609 uint32_t insn;
8610
8611 /* Most displaced instructions use a 1-instruction scratch space, so set this
8612 here and override below if/when necessary. */
8613 dsc->numinsns = 1;
8614 dsc->insn_addr = from;
8615 dsc->scratch_base = to;
8616 dsc->cleanup = NULL;
8617 dsc->wrote_to_pc = 0;
8618
8619 if (!displaced_in_arm_mode (regs))
8620 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8621
8622 dsc->is_thumb = 0;
8623 dsc->insn_size = 4;
8624 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8625 if (debug_displaced)
8626 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8627 "at %.8lx\n", (unsigned long) insn,
8628 (unsigned long) from);
8629
8630 if ((insn & 0xf0000000) == 0xf0000000)
8631 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8632 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8633 {
8634 case 0x0: case 0x1: case 0x2: case 0x3:
8635 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8636 break;
8637
8638 case 0x4: case 0x5: case 0x6:
8639 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8640 break;
8641
8642 case 0x7:
8643 err = arm_decode_media (gdbarch, insn, dsc);
8644 break;
8645
8646 case 0x8: case 0x9: case 0xa: case 0xb:
8647 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8648 break;
8649
8650 case 0xc: case 0xd: case 0xe: case 0xf:
8651 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8652 break;
8653 }
8654
8655 if (err)
8656 internal_error (__FILE__, __LINE__,
8657 _("arm_process_displaced_insn: Instruction decode error"));
8658 }
8659
8660 /* Actually set up the scratch space for a displaced instruction. */
8661
8662 void
8663 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8664 CORE_ADDR to, struct displaced_step_closure *dsc)
8665 {
8666 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8667 unsigned int i, len, offset;
8668 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8669 int size = dsc->is_thumb? 2 : 4;
8670 const gdb_byte *bkp_insn;
8671
8672 offset = 0;
8673 /* Poke modified instruction(s). */
8674 for (i = 0; i < dsc->numinsns; i++)
8675 {
8676 if (debug_displaced)
8677 {
8678 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8679 if (size == 4)
8680 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8681 dsc->modinsn[i]);
8682 else if (size == 2)
8683 fprintf_unfiltered (gdb_stdlog, "%.4x",
8684 (unsigned short)dsc->modinsn[i]);
8685
8686 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8687 (unsigned long) to + offset);
8688
8689 }
8690 write_memory_unsigned_integer (to + offset, size,
8691 byte_order_for_code,
8692 dsc->modinsn[i]);
8693 offset += size;
8694 }
8695
8696 /* Choose the correct breakpoint instruction. */
8697 if (dsc->is_thumb)
8698 {
8699 bkp_insn = tdep->thumb_breakpoint;
8700 len = tdep->thumb_breakpoint_size;
8701 }
8702 else
8703 {
8704 bkp_insn = tdep->arm_breakpoint;
8705 len = tdep->arm_breakpoint_size;
8706 }
8707
8708 /* Put breakpoint afterwards. */
8709 write_memory (to + offset, bkp_insn, len);
8710
8711 if (debug_displaced)
8712 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8713 paddress (gdbarch, from), paddress (gdbarch, to));
8714 }
8715
8716 /* Entry point for copying an instruction into scratch space for displaced
8717 stepping. */
8718
8719 struct displaced_step_closure *
8720 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8721 CORE_ADDR from, CORE_ADDR to,
8722 struct regcache *regs)
8723 {
8724 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
8725
8726 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8727 arm_displaced_init_closure (gdbarch, from, to, dsc);
8728
8729 return dsc;
8730 }
8731
8732 /* Entry point for cleaning things up after a displaced instruction has been
8733 single-stepped. */
8734
8735 void
8736 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8737 struct displaced_step_closure *dsc,
8738 CORE_ADDR from, CORE_ADDR to,
8739 struct regcache *regs)
8740 {
8741 if (dsc->cleanup)
8742 dsc->cleanup (gdbarch, regs, dsc);
8743
8744 if (!dsc->wrote_to_pc)
8745 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8746 dsc->insn_addr + dsc->insn_size);
8747
8748 }
8749
8750 #include "bfd-in2.h"
8751 #include "libcoff.h"
8752
8753 static int
8754 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8755 {
8756 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
8757
8758 if (arm_pc_is_thumb (gdbarch, memaddr))
8759 {
8760 static asymbol *asym;
8761 static combined_entry_type ce;
8762 static struct coff_symbol_struct csym;
8763 static struct bfd fake_bfd;
8764 static bfd_target fake_target;
8765
8766 if (csym.native == NULL)
8767 {
8768 /* Create a fake symbol vector containing a Thumb symbol.
8769 This is solely so that the code in print_insn_little_arm()
8770 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8771 the presence of a Thumb symbol and switch to decoding
8772 Thumb instructions. */
8773
8774 fake_target.flavour = bfd_target_coff_flavour;
8775 fake_bfd.xvec = &fake_target;
8776 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8777 csym.native = &ce;
8778 csym.symbol.the_bfd = &fake_bfd;
8779 csym.symbol.name = "fake";
8780 asym = (asymbol *) & csym;
8781 }
8782
8783 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8784 info->symbols = &asym;
8785 }
8786 else
8787 info->symbols = NULL;
8788
8789 if (info->endian == BFD_ENDIAN_BIG)
8790 return print_insn_big_arm (memaddr, info);
8791 else
8792 return print_insn_little_arm (memaddr, info);
8793 }
8794
8795 /* The following define instruction sequences that will cause ARM
8796 cpu's to take an undefined instruction trap. These are used to
8797 signal a breakpoint to GDB.
8798
8799 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8800 modes. A different instruction is required for each mode. The ARM
8801 cpu's can also be big or little endian. Thus four different
8802 instructions are needed to support all cases.
8803
8804 Note: ARMv4 defines several new instructions that will take the
8805 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8806 not in fact add the new instructions. The new undefined
8807 instructions in ARMv4 are all instructions that had no defined
8808 behaviour in earlier chips. There is no guarantee that they will
8809 raise an exception, but may be treated as NOP's. In practice, it
8810 may only safe to rely on instructions matching:
8811
8812 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8813 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8814 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8815
8816 Even this may only true if the condition predicate is true. The
8817 following use a condition predicate of ALWAYS so it is always TRUE.
8818
8819 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8820 and NetBSD all use a software interrupt rather than an undefined
8821 instruction to force a trap. This can be handled by by the
8822 abi-specific code during establishment of the gdbarch vector. */
8823
8824 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8825 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8826 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8827 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8828
8829 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8830 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8831 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8832 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8833
8834 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8835 the program counter value to determine whether a 16-bit or 32-bit
8836 breakpoint should be used. It returns a pointer to a string of
8837 bytes that encode a breakpoint instruction, stores the length of
8838 the string to *lenptr, and adjusts the program counter (if
8839 necessary) to point to the actual memory location where the
8840 breakpoint should be inserted. */
8841
8842 static const unsigned char *
8843 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8844 {
8845 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8846 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8847
8848 if (arm_pc_is_thumb (gdbarch, *pcptr))
8849 {
8850 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8851
8852 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8853 check whether we are replacing a 32-bit instruction. */
8854 if (tdep->thumb2_breakpoint != NULL)
8855 {
8856 gdb_byte buf[2];
8857 if (target_read_memory (*pcptr, buf, 2) == 0)
8858 {
8859 unsigned short inst1;
8860 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8861 if (thumb_insn_size (inst1) == 4)
8862 {
8863 *lenptr = tdep->thumb2_breakpoint_size;
8864 return tdep->thumb2_breakpoint;
8865 }
8866 }
8867 }
8868
8869 *lenptr = tdep->thumb_breakpoint_size;
8870 return tdep->thumb_breakpoint;
8871 }
8872 else
8873 {
8874 *lenptr = tdep->arm_breakpoint_size;
8875 return tdep->arm_breakpoint;
8876 }
8877 }
8878
8879 static void
8880 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8881 int *kindptr)
8882 {
8883 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8884
8885 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8886 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8887 that this is not confused with a 32-bit ARM breakpoint. */
8888 *kindptr = 3;
8889 }
8890
8891 /* Extract from an array REGBUF containing the (raw) register state a
8892 function return value of type TYPE, and copy that, in virtual
8893 format, into VALBUF. */
8894
8895 static void
8896 arm_extract_return_value (struct type *type, struct regcache *regs,
8897 gdb_byte *valbuf)
8898 {
8899 struct gdbarch *gdbarch = get_regcache_arch (regs);
8900 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8901
8902 if (TYPE_CODE_FLT == TYPE_CODE (type))
8903 {
8904 switch (gdbarch_tdep (gdbarch)->fp_model)
8905 {
8906 case ARM_FLOAT_FPA:
8907 {
8908 /* The value is in register F0 in internal format. We need to
8909 extract the raw value and then convert it to the desired
8910 internal type. */
8911 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8912
8913 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8914 convert_from_extended (floatformat_from_type (type), tmpbuf,
8915 valbuf, gdbarch_byte_order (gdbarch));
8916 }
8917 break;
8918
8919 case ARM_FLOAT_SOFT_FPA:
8920 case ARM_FLOAT_SOFT_VFP:
8921 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8922 not using the VFP ABI code. */
8923 case ARM_FLOAT_VFP:
8924 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8925 if (TYPE_LENGTH (type) > 4)
8926 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8927 valbuf + INT_REGISTER_SIZE);
8928 break;
8929
8930 default:
8931 internal_error (__FILE__, __LINE__,
8932 _("arm_extract_return_value: "
8933 "Floating point model not supported"));
8934 break;
8935 }
8936 }
8937 else if (TYPE_CODE (type) == TYPE_CODE_INT
8938 || TYPE_CODE (type) == TYPE_CODE_CHAR
8939 || TYPE_CODE (type) == TYPE_CODE_BOOL
8940 || TYPE_CODE (type) == TYPE_CODE_PTR
8941 || TYPE_CODE (type) == TYPE_CODE_REF
8942 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8943 {
8944 /* If the type is a plain integer, then the access is
8945 straight-forward. Otherwise we have to play around a bit
8946 more. */
8947 int len = TYPE_LENGTH (type);
8948 int regno = ARM_A1_REGNUM;
8949 ULONGEST tmp;
8950
8951 while (len > 0)
8952 {
8953 /* By using store_unsigned_integer we avoid having to do
8954 anything special for small big-endian values. */
8955 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8956 store_unsigned_integer (valbuf,
8957 (len > INT_REGISTER_SIZE
8958 ? INT_REGISTER_SIZE : len),
8959 byte_order, tmp);
8960 len -= INT_REGISTER_SIZE;
8961 valbuf += INT_REGISTER_SIZE;
8962 }
8963 }
8964 else
8965 {
8966 /* For a structure or union the behaviour is as if the value had
8967 been stored to word-aligned memory and then loaded into
8968 registers with 32-bit load instruction(s). */
8969 int len = TYPE_LENGTH (type);
8970 int regno = ARM_A1_REGNUM;
8971 bfd_byte tmpbuf[INT_REGISTER_SIZE];
8972
8973 while (len > 0)
8974 {
8975 regcache_cooked_read (regs, regno++, tmpbuf);
8976 memcpy (valbuf, tmpbuf,
8977 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
8978 len -= INT_REGISTER_SIZE;
8979 valbuf += INT_REGISTER_SIZE;
8980 }
8981 }
8982 }
8983
8984
8985 /* Will a function return an aggregate type in memory or in a
8986 register? Return 0 if an aggregate type can be returned in a
8987 register, 1 if it must be returned in memory. */
8988
8989 static int
8990 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
8991 {
8992 enum type_code code;
8993
8994 type = check_typedef (type);
8995
8996 /* Simple, non-aggregate types (ie not including vectors and
8997 complex) are always returned in a register (or registers). */
8998 code = TYPE_CODE (type);
8999 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9000 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9001 return 0;
9002
9003 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9004 {
9005 /* The AAPCS says all aggregates not larger than a word are returned
9006 in a register. */
9007 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
9008 return 0;
9009
9010 return 1;
9011 }
9012 else
9013 {
9014 int nRc;
9015
9016 /* All aggregate types that won't fit in a register must be returned
9017 in memory. */
9018 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9019 return 1;
9020
9021 /* In the ARM ABI, "integer" like aggregate types are returned in
9022 registers. For an aggregate type to be integer like, its size
9023 must be less than or equal to INT_REGISTER_SIZE and the
9024 offset of each addressable subfield must be zero. Note that bit
9025 fields are not addressable, and all addressable subfields of
9026 unions always start at offset zero.
9027
9028 This function is based on the behaviour of GCC 2.95.1.
9029 See: gcc/arm.c: arm_return_in_memory() for details.
9030
9031 Note: All versions of GCC before GCC 2.95.2 do not set up the
9032 parameters correctly for a function returning the following
9033 structure: struct { float f;}; This should be returned in memory,
9034 not a register. Richard Earnshaw sent me a patch, but I do not
9035 know of any way to detect if a function like the above has been
9036 compiled with the correct calling convention. */
9037
9038 /* Assume all other aggregate types can be returned in a register.
9039 Run a check for structures, unions and arrays. */
9040 nRc = 0;
9041
9042 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9043 {
9044 int i;
9045 /* Need to check if this struct/union is "integer" like. For
9046 this to be true, its size must be less than or equal to
9047 INT_REGISTER_SIZE and the offset of each addressable
9048 subfield must be zero. Note that bit fields are not
9049 addressable, and unions always start at offset zero. If any
9050 of the subfields is a floating point type, the struct/union
9051 cannot be an integer type. */
9052
9053 /* For each field in the object, check:
9054 1) Is it FP? --> yes, nRc = 1;
9055 2) Is it addressable (bitpos != 0) and
9056 not packed (bitsize == 0)?
9057 --> yes, nRc = 1
9058 */
9059
9060 for (i = 0; i < TYPE_NFIELDS (type); i++)
9061 {
9062 enum type_code field_type_code;
9063
9064 field_type_code
9065 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9066 i)));
9067
9068 /* Is it a floating point type field? */
9069 if (field_type_code == TYPE_CODE_FLT)
9070 {
9071 nRc = 1;
9072 break;
9073 }
9074
9075 /* If bitpos != 0, then we have to care about it. */
9076 if (TYPE_FIELD_BITPOS (type, i) != 0)
9077 {
9078 /* Bitfields are not addressable. If the field bitsize is
9079 zero, then the field is not packed. Hence it cannot be
9080 a bitfield or any other packed type. */
9081 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9082 {
9083 nRc = 1;
9084 break;
9085 }
9086 }
9087 }
9088 }
9089
9090 return nRc;
9091 }
9092 }
9093
9094 /* Write into appropriate registers a function return value of type
9095 TYPE, given in virtual format. */
9096
9097 static void
9098 arm_store_return_value (struct type *type, struct regcache *regs,
9099 const gdb_byte *valbuf)
9100 {
9101 struct gdbarch *gdbarch = get_regcache_arch (regs);
9102 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9103
9104 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9105 {
9106 gdb_byte buf[MAX_REGISTER_SIZE];
9107
9108 switch (gdbarch_tdep (gdbarch)->fp_model)
9109 {
9110 case ARM_FLOAT_FPA:
9111
9112 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9113 gdbarch_byte_order (gdbarch));
9114 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9115 break;
9116
9117 case ARM_FLOAT_SOFT_FPA:
9118 case ARM_FLOAT_SOFT_VFP:
9119 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9120 not using the VFP ABI code. */
9121 case ARM_FLOAT_VFP:
9122 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9123 if (TYPE_LENGTH (type) > 4)
9124 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9125 valbuf + INT_REGISTER_SIZE);
9126 break;
9127
9128 default:
9129 internal_error (__FILE__, __LINE__,
9130 _("arm_store_return_value: Floating "
9131 "point model not supported"));
9132 break;
9133 }
9134 }
9135 else if (TYPE_CODE (type) == TYPE_CODE_INT
9136 || TYPE_CODE (type) == TYPE_CODE_CHAR
9137 || TYPE_CODE (type) == TYPE_CODE_BOOL
9138 || TYPE_CODE (type) == TYPE_CODE_PTR
9139 || TYPE_CODE (type) == TYPE_CODE_REF
9140 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9141 {
9142 if (TYPE_LENGTH (type) <= 4)
9143 {
9144 /* Values of one word or less are zero/sign-extended and
9145 returned in r0. */
9146 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9147 LONGEST val = unpack_long (type, valbuf);
9148
9149 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9150 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9151 }
9152 else
9153 {
9154 /* Integral values greater than one word are stored in consecutive
9155 registers starting with r0. This will always be a multiple of
9156 the regiser size. */
9157 int len = TYPE_LENGTH (type);
9158 int regno = ARM_A1_REGNUM;
9159
9160 while (len > 0)
9161 {
9162 regcache_cooked_write (regs, regno++, valbuf);
9163 len -= INT_REGISTER_SIZE;
9164 valbuf += INT_REGISTER_SIZE;
9165 }
9166 }
9167 }
9168 else
9169 {
9170 /* For a structure or union the behaviour is as if the value had
9171 been stored to word-aligned memory and then loaded into
9172 registers with 32-bit load instruction(s). */
9173 int len = TYPE_LENGTH (type);
9174 int regno = ARM_A1_REGNUM;
9175 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9176
9177 while (len > 0)
9178 {
9179 memcpy (tmpbuf, valbuf,
9180 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9181 regcache_cooked_write (regs, regno++, tmpbuf);
9182 len -= INT_REGISTER_SIZE;
9183 valbuf += INT_REGISTER_SIZE;
9184 }
9185 }
9186 }
9187
9188
9189 /* Handle function return values. */
9190
9191 static enum return_value_convention
9192 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9193 struct type *valtype, struct regcache *regcache,
9194 gdb_byte *readbuf, const gdb_byte *writebuf)
9195 {
9196 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9197 struct type *func_type = function ? value_type (function) : NULL;
9198 enum arm_vfp_cprc_base_type vfp_base_type;
9199 int vfp_base_count;
9200
9201 if (arm_vfp_abi_for_function (gdbarch, func_type)
9202 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9203 {
9204 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9205 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9206 int i;
9207 for (i = 0; i < vfp_base_count; i++)
9208 {
9209 if (reg_char == 'q')
9210 {
9211 if (writebuf)
9212 arm_neon_quad_write (gdbarch, regcache, i,
9213 writebuf + i * unit_length);
9214
9215 if (readbuf)
9216 arm_neon_quad_read (gdbarch, regcache, i,
9217 readbuf + i * unit_length);
9218 }
9219 else
9220 {
9221 char name_buf[4];
9222 int regnum;
9223
9224 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9225 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9226 strlen (name_buf));
9227 if (writebuf)
9228 regcache_cooked_write (regcache, regnum,
9229 writebuf + i * unit_length);
9230 if (readbuf)
9231 regcache_cooked_read (regcache, regnum,
9232 readbuf + i * unit_length);
9233 }
9234 }
9235 return RETURN_VALUE_REGISTER_CONVENTION;
9236 }
9237
9238 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9239 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9240 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9241 {
9242 if (tdep->struct_return == pcc_struct_return
9243 || arm_return_in_memory (gdbarch, valtype))
9244 return RETURN_VALUE_STRUCT_CONVENTION;
9245 }
9246 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
9247 {
9248 if (arm_return_in_memory (gdbarch, valtype))
9249 return RETURN_VALUE_STRUCT_CONVENTION;
9250 }
9251
9252 if (writebuf)
9253 arm_store_return_value (valtype, regcache, writebuf);
9254
9255 if (readbuf)
9256 arm_extract_return_value (valtype, regcache, readbuf);
9257
9258 return RETURN_VALUE_REGISTER_CONVENTION;
9259 }
9260
9261
9262 static int
9263 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9264 {
9265 struct gdbarch *gdbarch = get_frame_arch (frame);
9266 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9267 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9268 CORE_ADDR jb_addr;
9269 gdb_byte buf[INT_REGISTER_SIZE];
9270
9271 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9272
9273 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9274 INT_REGISTER_SIZE))
9275 return 0;
9276
9277 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9278 return 1;
9279 }
9280
9281 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9282 return the target PC. Otherwise return 0. */
9283
9284 CORE_ADDR
9285 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9286 {
9287 const char *name;
9288 int namelen;
9289 CORE_ADDR start_addr;
9290
9291 /* Find the starting address and name of the function containing the PC. */
9292 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9293 {
9294 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9295 check here. */
9296 start_addr = arm_skip_bx_reg (frame, pc);
9297 if (start_addr != 0)
9298 return start_addr;
9299
9300 return 0;
9301 }
9302
9303 /* If PC is in a Thumb call or return stub, return the address of the
9304 target PC, which is in a register. The thunk functions are called
9305 _call_via_xx, where x is the register name. The possible names
9306 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9307 functions, named __ARM_call_via_r[0-7]. */
9308 if (startswith (name, "_call_via_")
9309 || startswith (name, "__ARM_call_via_"))
9310 {
9311 /* Use the name suffix to determine which register contains the
9312 target PC. */
9313 static char *table[15] =
9314 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9315 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9316 };
9317 int regno;
9318 int offset = strlen (name) - 2;
9319
9320 for (regno = 0; regno <= 14; regno++)
9321 if (strcmp (&name[offset], table[regno]) == 0)
9322 return get_frame_register_unsigned (frame, regno);
9323 }
9324
9325 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9326 non-interworking calls to foo. We could decode the stubs
9327 to find the target but it's easier to use the symbol table. */
9328 namelen = strlen (name);
9329 if (name[0] == '_' && name[1] == '_'
9330 && ((namelen > 2 + strlen ("_from_thumb")
9331 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9332 || (namelen > 2 + strlen ("_from_arm")
9333 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9334 {
9335 char *target_name;
9336 int target_len = namelen - 2;
9337 struct bound_minimal_symbol minsym;
9338 struct objfile *objfile;
9339 struct obj_section *sec;
9340
9341 if (name[namelen - 1] == 'b')
9342 target_len -= strlen ("_from_thumb");
9343 else
9344 target_len -= strlen ("_from_arm");
9345
9346 target_name = (char *) alloca (target_len + 1);
9347 memcpy (target_name, name + 2, target_len);
9348 target_name[target_len] = '\0';
9349
9350 sec = find_pc_section (pc);
9351 objfile = (sec == NULL) ? NULL : sec->objfile;
9352 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9353 if (minsym.minsym != NULL)
9354 return BMSYMBOL_VALUE_ADDRESS (minsym);
9355 else
9356 return 0;
9357 }
9358
9359 return 0; /* not a stub */
9360 }
9361
9362 static void
9363 set_arm_command (char *args, int from_tty)
9364 {
9365 printf_unfiltered (_("\
9366 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9367 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9368 }
9369
9370 static void
9371 show_arm_command (char *args, int from_tty)
9372 {
9373 cmd_show_list (showarmcmdlist, from_tty, "");
9374 }
9375
9376 static void
9377 arm_update_current_architecture (void)
9378 {
9379 struct gdbarch_info info;
9380
9381 /* If the current architecture is not ARM, we have nothing to do. */
9382 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9383 return;
9384
9385 /* Update the architecture. */
9386 gdbarch_info_init (&info);
9387
9388 if (!gdbarch_update_p (info))
9389 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9390 }
9391
9392 static void
9393 set_fp_model_sfunc (char *args, int from_tty,
9394 struct cmd_list_element *c)
9395 {
9396 int fp_model;
9397
9398 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9399 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9400 {
9401 arm_fp_model = (enum arm_float_model) fp_model;
9402 break;
9403 }
9404
9405 if (fp_model == ARM_FLOAT_LAST)
9406 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9407 current_fp_model);
9408
9409 arm_update_current_architecture ();
9410 }
9411
9412 static void
9413 show_fp_model (struct ui_file *file, int from_tty,
9414 struct cmd_list_element *c, const char *value)
9415 {
9416 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9417
9418 if (arm_fp_model == ARM_FLOAT_AUTO
9419 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9420 fprintf_filtered (file, _("\
9421 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9422 fp_model_strings[tdep->fp_model]);
9423 else
9424 fprintf_filtered (file, _("\
9425 The current ARM floating point model is \"%s\".\n"),
9426 fp_model_strings[arm_fp_model]);
9427 }
9428
9429 static void
9430 arm_set_abi (char *args, int from_tty,
9431 struct cmd_list_element *c)
9432 {
9433 int arm_abi;
9434
9435 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9436 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9437 {
9438 arm_abi_global = (enum arm_abi_kind) arm_abi;
9439 break;
9440 }
9441
9442 if (arm_abi == ARM_ABI_LAST)
9443 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9444 arm_abi_string);
9445
9446 arm_update_current_architecture ();
9447 }
9448
9449 static void
9450 arm_show_abi (struct ui_file *file, int from_tty,
9451 struct cmd_list_element *c, const char *value)
9452 {
9453 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9454
9455 if (arm_abi_global == ARM_ABI_AUTO
9456 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9457 fprintf_filtered (file, _("\
9458 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9459 arm_abi_strings[tdep->arm_abi]);
9460 else
9461 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9462 arm_abi_string);
9463 }
9464
9465 static void
9466 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9467 struct cmd_list_element *c, const char *value)
9468 {
9469 fprintf_filtered (file,
9470 _("The current execution mode assumed "
9471 "(when symbols are unavailable) is \"%s\".\n"),
9472 arm_fallback_mode_string);
9473 }
9474
9475 static void
9476 arm_show_force_mode (struct ui_file *file, int from_tty,
9477 struct cmd_list_element *c, const char *value)
9478 {
9479 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9480
9481 fprintf_filtered (file,
9482 _("The current execution mode assumed "
9483 "(even when symbols are available) is \"%s\".\n"),
9484 arm_force_mode_string);
9485 }
9486
9487 /* If the user changes the register disassembly style used for info
9488 register and other commands, we have to also switch the style used
9489 in opcodes for disassembly output. This function is run in the "set
9490 arm disassembly" command, and does that. */
9491
9492 static void
9493 set_disassembly_style_sfunc (char *args, int from_tty,
9494 struct cmd_list_element *c)
9495 {
9496 set_disassembly_style ();
9497 }
9498 \f
9499 /* Return the ARM register name corresponding to register I. */
9500 static const char *
9501 arm_register_name (struct gdbarch *gdbarch, int i)
9502 {
9503 const int num_regs = gdbarch_num_regs (gdbarch);
9504
9505 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9506 && i >= num_regs && i < num_regs + 32)
9507 {
9508 static const char *const vfp_pseudo_names[] = {
9509 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9510 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9511 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9512 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9513 };
9514
9515 return vfp_pseudo_names[i - num_regs];
9516 }
9517
9518 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9519 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9520 {
9521 static const char *const neon_pseudo_names[] = {
9522 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9523 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9524 };
9525
9526 return neon_pseudo_names[i - num_regs - 32];
9527 }
9528
9529 if (i >= ARRAY_SIZE (arm_register_names))
9530 /* These registers are only supported on targets which supply
9531 an XML description. */
9532 return "";
9533
9534 return arm_register_names[i];
9535 }
9536
9537 static void
9538 set_disassembly_style (void)
9539 {
9540 int current;
9541
9542 /* Find the style that the user wants. */
9543 for (current = 0; current < num_disassembly_options; current++)
9544 if (disassembly_style == valid_disassembly_styles[current])
9545 break;
9546 gdb_assert (current < num_disassembly_options);
9547
9548 /* Synchronize the disassembler. */
9549 set_arm_regname_option (current);
9550 }
9551
9552 /* Test whether the coff symbol specific value corresponds to a Thumb
9553 function. */
9554
9555 static int
9556 coff_sym_is_thumb (int val)
9557 {
9558 return (val == C_THUMBEXT
9559 || val == C_THUMBSTAT
9560 || val == C_THUMBEXTFUNC
9561 || val == C_THUMBSTATFUNC
9562 || val == C_THUMBLABEL);
9563 }
9564
9565 /* arm_coff_make_msymbol_special()
9566 arm_elf_make_msymbol_special()
9567
9568 These functions test whether the COFF or ELF symbol corresponds to
9569 an address in thumb code, and set a "special" bit in a minimal
9570 symbol to indicate that it does. */
9571
9572 static void
9573 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9574 {
9575 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9576 == ST_BRANCH_TO_THUMB)
9577 MSYMBOL_SET_SPECIAL (msym);
9578 }
9579
9580 static void
9581 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9582 {
9583 if (coff_sym_is_thumb (val))
9584 MSYMBOL_SET_SPECIAL (msym);
9585 }
9586
9587 static void
9588 arm_objfile_data_free (struct objfile *objfile, void *arg)
9589 {
9590 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
9591 unsigned int i;
9592
9593 for (i = 0; i < objfile->obfd->section_count; i++)
9594 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9595 }
9596
9597 static void
9598 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9599 asymbol *sym)
9600 {
9601 const char *name = bfd_asymbol_name (sym);
9602 struct arm_per_objfile *data;
9603 VEC(arm_mapping_symbol_s) **map_p;
9604 struct arm_mapping_symbol new_map_sym;
9605
9606 gdb_assert (name[0] == '$');
9607 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9608 return;
9609
9610 data = (struct arm_per_objfile *) objfile_data (objfile,
9611 arm_objfile_data_key);
9612 if (data == NULL)
9613 {
9614 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9615 struct arm_per_objfile);
9616 set_objfile_data (objfile, arm_objfile_data_key, data);
9617 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9618 objfile->obfd->section_count,
9619 VEC(arm_mapping_symbol_s) *);
9620 }
9621 map_p = &data->section_maps[bfd_get_section (sym)->index];
9622
9623 new_map_sym.value = sym->value;
9624 new_map_sym.type = name[1];
9625
9626 /* Assume that most mapping symbols appear in order of increasing
9627 value. If they were randomly distributed, it would be faster to
9628 always push here and then sort at first use. */
9629 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9630 {
9631 struct arm_mapping_symbol *prev_map_sym;
9632
9633 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9634 if (prev_map_sym->value >= sym->value)
9635 {
9636 unsigned int idx;
9637 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9638 arm_compare_mapping_symbols);
9639 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9640 return;
9641 }
9642 }
9643
9644 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9645 }
9646
9647 static void
9648 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9649 {
9650 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9651 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9652
9653 /* If necessary, set the T bit. */
9654 if (arm_apcs_32)
9655 {
9656 ULONGEST val, t_bit;
9657 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9658 t_bit = arm_psr_thumb_bit (gdbarch);
9659 if (arm_pc_is_thumb (gdbarch, pc))
9660 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9661 val | t_bit);
9662 else
9663 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9664 val & ~t_bit);
9665 }
9666 }
9667
9668 /* Read the contents of a NEON quad register, by reading from two
9669 double registers. This is used to implement the quad pseudo
9670 registers, and for argument passing in case the quad registers are
9671 missing; vectors are passed in quad registers when using the VFP
9672 ABI, even if a NEON unit is not present. REGNUM is the index of
9673 the quad register, in [0, 15]. */
9674
9675 static enum register_status
9676 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9677 int regnum, gdb_byte *buf)
9678 {
9679 char name_buf[4];
9680 gdb_byte reg_buf[8];
9681 int offset, double_regnum;
9682 enum register_status status;
9683
9684 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9685 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9686 strlen (name_buf));
9687
9688 /* d0 is always the least significant half of q0. */
9689 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9690 offset = 8;
9691 else
9692 offset = 0;
9693
9694 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9695 if (status != REG_VALID)
9696 return status;
9697 memcpy (buf + offset, reg_buf, 8);
9698
9699 offset = 8 - offset;
9700 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9701 if (status != REG_VALID)
9702 return status;
9703 memcpy (buf + offset, reg_buf, 8);
9704
9705 return REG_VALID;
9706 }
9707
9708 static enum register_status
9709 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9710 int regnum, gdb_byte *buf)
9711 {
9712 const int num_regs = gdbarch_num_regs (gdbarch);
9713 char name_buf[4];
9714 gdb_byte reg_buf[8];
9715 int offset, double_regnum;
9716
9717 gdb_assert (regnum >= num_regs);
9718 regnum -= num_regs;
9719
9720 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9721 /* Quad-precision register. */
9722 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9723 else
9724 {
9725 enum register_status status;
9726
9727 /* Single-precision register. */
9728 gdb_assert (regnum < 32);
9729
9730 /* s0 is always the least significant half of d0. */
9731 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9732 offset = (regnum & 1) ? 0 : 4;
9733 else
9734 offset = (regnum & 1) ? 4 : 0;
9735
9736 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9737 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9738 strlen (name_buf));
9739
9740 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9741 if (status == REG_VALID)
9742 memcpy (buf, reg_buf + offset, 4);
9743 return status;
9744 }
9745 }
9746
9747 /* Store the contents of BUF to a NEON quad register, by writing to
9748 two double registers. This is used to implement the quad pseudo
9749 registers, and for argument passing in case the quad registers are
9750 missing; vectors are passed in quad registers when using the VFP
9751 ABI, even if a NEON unit is not present. REGNUM is the index
9752 of the quad register, in [0, 15]. */
9753
9754 static void
9755 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9756 int regnum, const gdb_byte *buf)
9757 {
9758 char name_buf[4];
9759 int offset, double_regnum;
9760
9761 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9762 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9763 strlen (name_buf));
9764
9765 /* d0 is always the least significant half of q0. */
9766 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9767 offset = 8;
9768 else
9769 offset = 0;
9770
9771 regcache_raw_write (regcache, double_regnum, buf + offset);
9772 offset = 8 - offset;
9773 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9774 }
9775
9776 static void
9777 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9778 int regnum, const gdb_byte *buf)
9779 {
9780 const int num_regs = gdbarch_num_regs (gdbarch);
9781 char name_buf[4];
9782 gdb_byte reg_buf[8];
9783 int offset, double_regnum;
9784
9785 gdb_assert (regnum >= num_regs);
9786 regnum -= num_regs;
9787
9788 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9789 /* Quad-precision register. */
9790 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9791 else
9792 {
9793 /* Single-precision register. */
9794 gdb_assert (regnum < 32);
9795
9796 /* s0 is always the least significant half of d0. */
9797 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9798 offset = (regnum & 1) ? 0 : 4;
9799 else
9800 offset = (regnum & 1) ? 4 : 0;
9801
9802 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9803 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9804 strlen (name_buf));
9805
9806 regcache_raw_read (regcache, double_regnum, reg_buf);
9807 memcpy (reg_buf + offset, buf, 4);
9808 regcache_raw_write (regcache, double_regnum, reg_buf);
9809 }
9810 }
9811
9812 static struct value *
9813 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9814 {
9815 const int *reg_p = (const int *) baton;
9816 return value_of_register (*reg_p, frame);
9817 }
9818 \f
9819 static enum gdb_osabi
9820 arm_elf_osabi_sniffer (bfd *abfd)
9821 {
9822 unsigned int elfosabi;
9823 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9824
9825 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9826
9827 if (elfosabi == ELFOSABI_ARM)
9828 /* GNU tools use this value. Check note sections in this case,
9829 as well. */
9830 bfd_map_over_sections (abfd,
9831 generic_elf_osabi_sniff_abi_tag_sections,
9832 &osabi);
9833
9834 /* Anything else will be handled by the generic ELF sniffer. */
9835 return osabi;
9836 }
9837
9838 static int
9839 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9840 struct reggroup *group)
9841 {
9842 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9843 this, FPS register belongs to save_regroup, restore_reggroup, and
9844 all_reggroup, of course. */
9845 if (regnum == ARM_FPS_REGNUM)
9846 return (group == float_reggroup
9847 || group == save_reggroup
9848 || group == restore_reggroup
9849 || group == all_reggroup);
9850 else
9851 return default_register_reggroup_p (gdbarch, regnum, group);
9852 }
9853
9854 \f
9855 /* For backward-compatibility we allow two 'g' packet lengths with
9856 the remote protocol depending on whether FPA registers are
9857 supplied. M-profile targets do not have FPA registers, but some
9858 stubs already exist in the wild which use a 'g' packet which
9859 supplies them albeit with dummy values. The packet format which
9860 includes FPA registers should be considered deprecated for
9861 M-profile targets. */
9862
9863 static void
9864 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9865 {
9866 if (gdbarch_tdep (gdbarch)->is_m)
9867 {
9868 /* If we know from the executable this is an M-profile target,
9869 cater for remote targets whose register set layout is the
9870 same as the FPA layout. */
9871 register_remote_g_packet_guess (gdbarch,
9872 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9873 (16 * INT_REGISTER_SIZE)
9874 + (8 * FP_REGISTER_SIZE)
9875 + (2 * INT_REGISTER_SIZE),
9876 tdesc_arm_with_m_fpa_layout);
9877
9878 /* The regular M-profile layout. */
9879 register_remote_g_packet_guess (gdbarch,
9880 /* r0-r12,sp,lr,pc; xpsr */
9881 (16 * INT_REGISTER_SIZE)
9882 + INT_REGISTER_SIZE,
9883 tdesc_arm_with_m);
9884
9885 /* M-profile plus M4F VFP. */
9886 register_remote_g_packet_guess (gdbarch,
9887 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9888 (16 * INT_REGISTER_SIZE)
9889 + (16 * VFP_REGISTER_SIZE)
9890 + (2 * INT_REGISTER_SIZE),
9891 tdesc_arm_with_m_vfp_d16);
9892 }
9893
9894 /* Otherwise we don't have a useful guess. */
9895 }
9896
9897 \f
9898 /* Initialize the current architecture based on INFO. If possible,
9899 re-use an architecture from ARCHES, which is a list of
9900 architectures already created during this debugging session.
9901
9902 Called e.g. at program startup, when reading a core file, and when
9903 reading a binary file. */
9904
9905 static struct gdbarch *
9906 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9907 {
9908 struct gdbarch_tdep *tdep;
9909 struct gdbarch *gdbarch;
9910 struct gdbarch_list *best_arch;
9911 enum arm_abi_kind arm_abi = arm_abi_global;
9912 enum arm_float_model fp_model = arm_fp_model;
9913 struct tdesc_arch_data *tdesc_data = NULL;
9914 int i, is_m = 0;
9915 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9916 int have_wmmx_registers = 0;
9917 int have_neon = 0;
9918 int have_fpa_registers = 1;
9919 const struct target_desc *tdesc = info.target_desc;
9920
9921 /* If we have an object to base this architecture on, try to determine
9922 its ABI. */
9923
9924 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9925 {
9926 int ei_osabi, e_flags;
9927
9928 switch (bfd_get_flavour (info.abfd))
9929 {
9930 case bfd_target_aout_flavour:
9931 /* Assume it's an old APCS-style ABI. */
9932 arm_abi = ARM_ABI_APCS;
9933 break;
9934
9935 case bfd_target_coff_flavour:
9936 /* Assume it's an old APCS-style ABI. */
9937 /* XXX WinCE? */
9938 arm_abi = ARM_ABI_APCS;
9939 break;
9940
9941 case bfd_target_elf_flavour:
9942 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9943 e_flags = elf_elfheader (info.abfd)->e_flags;
9944
9945 if (ei_osabi == ELFOSABI_ARM)
9946 {
9947 /* GNU tools used to use this value, but do not for EABI
9948 objects. There's nowhere to tag an EABI version
9949 anyway, so assume APCS. */
9950 arm_abi = ARM_ABI_APCS;
9951 }
9952 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9953 {
9954 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
9955 int attr_arch, attr_profile;
9956
9957 switch (eabi_ver)
9958 {
9959 case EF_ARM_EABI_UNKNOWN:
9960 /* Assume GNU tools. */
9961 arm_abi = ARM_ABI_APCS;
9962 break;
9963
9964 case EF_ARM_EABI_VER4:
9965 case EF_ARM_EABI_VER5:
9966 arm_abi = ARM_ABI_AAPCS;
9967 /* EABI binaries default to VFP float ordering.
9968 They may also contain build attributes that can
9969 be used to identify if the VFP argument-passing
9970 ABI is in use. */
9971 if (fp_model == ARM_FLOAT_AUTO)
9972 {
9973 #ifdef HAVE_ELF
9974 switch (bfd_elf_get_obj_attr_int (info.abfd,
9975 OBJ_ATTR_PROC,
9976 Tag_ABI_VFP_args))
9977 {
9978 case AEABI_VFP_args_base:
9979 /* "The user intended FP parameter/result
9980 passing to conform to AAPCS, base
9981 variant". */
9982 fp_model = ARM_FLOAT_SOFT_VFP;
9983 break;
9984 case AEABI_VFP_args_vfp:
9985 /* "The user intended FP parameter/result
9986 passing to conform to AAPCS, VFP
9987 variant". */
9988 fp_model = ARM_FLOAT_VFP;
9989 break;
9990 case AEABI_VFP_args_toolchain:
9991 /* "The user intended FP parameter/result
9992 passing to conform to tool chain-specific
9993 conventions" - we don't know any such
9994 conventions, so leave it as "auto". */
9995 break;
9996 case AEABI_VFP_args_compatible:
9997 /* "Code is compatible with both the base
9998 and VFP variants; the user did not permit
9999 non-variadic functions to pass FP
10000 parameters/results" - leave it as
10001 "auto". */
10002 break;
10003 default:
10004 /* Attribute value not mentioned in the
10005 November 2012 ABI, so leave it as
10006 "auto". */
10007 break;
10008 }
10009 #else
10010 fp_model = ARM_FLOAT_SOFT_VFP;
10011 #endif
10012 }
10013 break;
10014
10015 default:
10016 /* Leave it as "auto". */
10017 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10018 break;
10019 }
10020
10021 #ifdef HAVE_ELF
10022 /* Detect M-profile programs. This only works if the
10023 executable file includes build attributes; GCC does
10024 copy them to the executable, but e.g. RealView does
10025 not. */
10026 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10027 Tag_CPU_arch);
10028 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10029 OBJ_ATTR_PROC,
10030 Tag_CPU_arch_profile);
10031 /* GCC specifies the profile for v6-M; RealView only
10032 specifies the profile for architectures starting with
10033 V7 (as opposed to architectures with a tag
10034 numerically greater than TAG_CPU_ARCH_V7). */
10035 if (!tdesc_has_registers (tdesc)
10036 && (attr_arch == TAG_CPU_ARCH_V6_M
10037 || attr_arch == TAG_CPU_ARCH_V6S_M
10038 || attr_profile == 'M'))
10039 is_m = 1;
10040 #endif
10041 }
10042
10043 if (fp_model == ARM_FLOAT_AUTO)
10044 {
10045 int e_flags = elf_elfheader (info.abfd)->e_flags;
10046
10047 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10048 {
10049 case 0:
10050 /* Leave it as "auto". Strictly speaking this case
10051 means FPA, but almost nobody uses that now, and
10052 many toolchains fail to set the appropriate bits
10053 for the floating-point model they use. */
10054 break;
10055 case EF_ARM_SOFT_FLOAT:
10056 fp_model = ARM_FLOAT_SOFT_FPA;
10057 break;
10058 case EF_ARM_VFP_FLOAT:
10059 fp_model = ARM_FLOAT_VFP;
10060 break;
10061 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10062 fp_model = ARM_FLOAT_SOFT_VFP;
10063 break;
10064 }
10065 }
10066
10067 if (e_flags & EF_ARM_BE8)
10068 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10069
10070 break;
10071
10072 default:
10073 /* Leave it as "auto". */
10074 break;
10075 }
10076 }
10077
10078 /* Check any target description for validity. */
10079 if (tdesc_has_registers (tdesc))
10080 {
10081 /* For most registers we require GDB's default names; but also allow
10082 the numeric names for sp / lr / pc, as a convenience. */
10083 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10084 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10085 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10086
10087 const struct tdesc_feature *feature;
10088 int valid_p;
10089
10090 feature = tdesc_find_feature (tdesc,
10091 "org.gnu.gdb.arm.core");
10092 if (feature == NULL)
10093 {
10094 feature = tdesc_find_feature (tdesc,
10095 "org.gnu.gdb.arm.m-profile");
10096 if (feature == NULL)
10097 return NULL;
10098 else
10099 is_m = 1;
10100 }
10101
10102 tdesc_data = tdesc_data_alloc ();
10103
10104 valid_p = 1;
10105 for (i = 0; i < ARM_SP_REGNUM; i++)
10106 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10107 arm_register_names[i]);
10108 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10109 ARM_SP_REGNUM,
10110 arm_sp_names);
10111 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10112 ARM_LR_REGNUM,
10113 arm_lr_names);
10114 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10115 ARM_PC_REGNUM,
10116 arm_pc_names);
10117 if (is_m)
10118 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10119 ARM_PS_REGNUM, "xpsr");
10120 else
10121 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10122 ARM_PS_REGNUM, "cpsr");
10123
10124 if (!valid_p)
10125 {
10126 tdesc_data_cleanup (tdesc_data);
10127 return NULL;
10128 }
10129
10130 feature = tdesc_find_feature (tdesc,
10131 "org.gnu.gdb.arm.fpa");
10132 if (feature != NULL)
10133 {
10134 valid_p = 1;
10135 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10136 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10137 arm_register_names[i]);
10138 if (!valid_p)
10139 {
10140 tdesc_data_cleanup (tdesc_data);
10141 return NULL;
10142 }
10143 }
10144 else
10145 have_fpa_registers = 0;
10146
10147 feature = tdesc_find_feature (tdesc,
10148 "org.gnu.gdb.xscale.iwmmxt");
10149 if (feature != NULL)
10150 {
10151 static const char *const iwmmxt_names[] = {
10152 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10153 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10154 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10155 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10156 };
10157
10158 valid_p = 1;
10159 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10160 valid_p
10161 &= tdesc_numbered_register (feature, tdesc_data, i,
10162 iwmmxt_names[i - ARM_WR0_REGNUM]);
10163
10164 /* Check for the control registers, but do not fail if they
10165 are missing. */
10166 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10167 tdesc_numbered_register (feature, tdesc_data, i,
10168 iwmmxt_names[i - ARM_WR0_REGNUM]);
10169
10170 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10171 valid_p
10172 &= tdesc_numbered_register (feature, tdesc_data, i,
10173 iwmmxt_names[i - ARM_WR0_REGNUM]);
10174
10175 if (!valid_p)
10176 {
10177 tdesc_data_cleanup (tdesc_data);
10178 return NULL;
10179 }
10180
10181 have_wmmx_registers = 1;
10182 }
10183
10184 /* If we have a VFP unit, check whether the single precision registers
10185 are present. If not, then we will synthesize them as pseudo
10186 registers. */
10187 feature = tdesc_find_feature (tdesc,
10188 "org.gnu.gdb.arm.vfp");
10189 if (feature != NULL)
10190 {
10191 static const char *const vfp_double_names[] = {
10192 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10193 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10194 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10195 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10196 };
10197
10198 /* Require the double precision registers. There must be either
10199 16 or 32. */
10200 valid_p = 1;
10201 for (i = 0; i < 32; i++)
10202 {
10203 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10204 ARM_D0_REGNUM + i,
10205 vfp_double_names[i]);
10206 if (!valid_p)
10207 break;
10208 }
10209 if (!valid_p && i == 16)
10210 valid_p = 1;
10211
10212 /* Also require FPSCR. */
10213 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10214 ARM_FPSCR_REGNUM, "fpscr");
10215 if (!valid_p)
10216 {
10217 tdesc_data_cleanup (tdesc_data);
10218 return NULL;
10219 }
10220
10221 if (tdesc_unnumbered_register (feature, "s0") == 0)
10222 have_vfp_pseudos = 1;
10223
10224 vfp_register_count = i;
10225
10226 /* If we have VFP, also check for NEON. The architecture allows
10227 NEON without VFP (integer vector operations only), but GDB
10228 does not support that. */
10229 feature = tdesc_find_feature (tdesc,
10230 "org.gnu.gdb.arm.neon");
10231 if (feature != NULL)
10232 {
10233 /* NEON requires 32 double-precision registers. */
10234 if (i != 32)
10235 {
10236 tdesc_data_cleanup (tdesc_data);
10237 return NULL;
10238 }
10239
10240 /* If there are quad registers defined by the stub, use
10241 their type; otherwise (normally) provide them with
10242 the default type. */
10243 if (tdesc_unnumbered_register (feature, "q0") == 0)
10244 have_neon_pseudos = 1;
10245
10246 have_neon = 1;
10247 }
10248 }
10249 }
10250
10251 /* If there is already a candidate, use it. */
10252 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10253 best_arch != NULL;
10254 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10255 {
10256 if (arm_abi != ARM_ABI_AUTO
10257 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10258 continue;
10259
10260 if (fp_model != ARM_FLOAT_AUTO
10261 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10262 continue;
10263
10264 /* There are various other properties in tdep that we do not
10265 need to check here: those derived from a target description,
10266 since gdbarches with a different target description are
10267 automatically disqualified. */
10268
10269 /* Do check is_m, though, since it might come from the binary. */
10270 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10271 continue;
10272
10273 /* Found a match. */
10274 break;
10275 }
10276
10277 if (best_arch != NULL)
10278 {
10279 if (tdesc_data != NULL)
10280 tdesc_data_cleanup (tdesc_data);
10281 return best_arch->gdbarch;
10282 }
10283
10284 tdep = XCNEW (struct gdbarch_tdep);
10285 gdbarch = gdbarch_alloc (&info, tdep);
10286
10287 /* Record additional information about the architecture we are defining.
10288 These are gdbarch discriminators, like the OSABI. */
10289 tdep->arm_abi = arm_abi;
10290 tdep->fp_model = fp_model;
10291 tdep->is_m = is_m;
10292 tdep->have_fpa_registers = have_fpa_registers;
10293 tdep->have_wmmx_registers = have_wmmx_registers;
10294 gdb_assert (vfp_register_count == 0
10295 || vfp_register_count == 16
10296 || vfp_register_count == 32);
10297 tdep->vfp_register_count = vfp_register_count;
10298 tdep->have_vfp_pseudos = have_vfp_pseudos;
10299 tdep->have_neon_pseudos = have_neon_pseudos;
10300 tdep->have_neon = have_neon;
10301
10302 arm_register_g_packet_guesses (gdbarch);
10303
10304 /* Breakpoints. */
10305 switch (info.byte_order_for_code)
10306 {
10307 case BFD_ENDIAN_BIG:
10308 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10309 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10310 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10311 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10312
10313 break;
10314
10315 case BFD_ENDIAN_LITTLE:
10316 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10317 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10318 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10319 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10320
10321 break;
10322
10323 default:
10324 internal_error (__FILE__, __LINE__,
10325 _("arm_gdbarch_init: bad byte order for float format"));
10326 }
10327
10328 /* On ARM targets char defaults to unsigned. */
10329 set_gdbarch_char_signed (gdbarch, 0);
10330
10331 /* Note: for displaced stepping, this includes the breakpoint, and one word
10332 of additional scratch space. This setting isn't used for anything beside
10333 displaced stepping at present. */
10334 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10335
10336 /* This should be low enough for everything. */
10337 tdep->lowest_pc = 0x20;
10338 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10339
10340 /* The default, for both APCS and AAPCS, is to return small
10341 structures in registers. */
10342 tdep->struct_return = reg_struct_return;
10343
10344 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10345 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10346
10347 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10348
10349 /* Frame handling. */
10350 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10351 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10352 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10353
10354 frame_base_set_default (gdbarch, &arm_normal_base);
10355
10356 /* Address manipulation. */
10357 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10358
10359 /* Advance PC across function entry code. */
10360 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10361
10362 /* Detect whether PC is at a point where the stack has been destroyed. */
10363 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10364
10365 /* Skip trampolines. */
10366 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10367
10368 /* The stack grows downward. */
10369 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10370
10371 /* Breakpoint manipulation. */
10372 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10373 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10374 arm_remote_breakpoint_from_pc);
10375
10376 /* Information about registers, etc. */
10377 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10378 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10379 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10380 set_gdbarch_register_type (gdbarch, arm_register_type);
10381 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10382
10383 /* This "info float" is FPA-specific. Use the generic version if we
10384 do not have FPA. */
10385 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10386 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10387
10388 /* Internal <-> external register number maps. */
10389 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10390 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10391
10392 set_gdbarch_register_name (gdbarch, arm_register_name);
10393
10394 /* Returning results. */
10395 set_gdbarch_return_value (gdbarch, arm_return_value);
10396
10397 /* Disassembly. */
10398 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10399
10400 /* Minsymbol frobbing. */
10401 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10402 set_gdbarch_coff_make_msymbol_special (gdbarch,
10403 arm_coff_make_msymbol_special);
10404 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10405
10406 /* Thumb-2 IT block support. */
10407 set_gdbarch_adjust_breakpoint_address (gdbarch,
10408 arm_adjust_breakpoint_address);
10409
10410 /* Virtual tables. */
10411 set_gdbarch_vbit_in_delta (gdbarch, 1);
10412
10413 /* Hook in the ABI-specific overrides, if they have been registered. */
10414 gdbarch_init_osabi (info, gdbarch);
10415
10416 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10417
10418 /* Add some default predicates. */
10419 if (is_m)
10420 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10421 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10422 dwarf2_append_unwinders (gdbarch);
10423 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10424 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10425
10426 /* Now we have tuned the configuration, set a few final things,
10427 based on what the OS ABI has told us. */
10428
10429 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10430 binaries are always marked. */
10431 if (tdep->arm_abi == ARM_ABI_AUTO)
10432 tdep->arm_abi = ARM_ABI_APCS;
10433
10434 /* Watchpoints are not steppable. */
10435 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10436
10437 /* We used to default to FPA for generic ARM, but almost nobody
10438 uses that now, and we now provide a way for the user to force
10439 the model. So default to the most useful variant. */
10440 if (tdep->fp_model == ARM_FLOAT_AUTO)
10441 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10442
10443 if (tdep->jb_pc >= 0)
10444 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10445
10446 /* Floating point sizes and format. */
10447 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10448 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10449 {
10450 set_gdbarch_double_format
10451 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10452 set_gdbarch_long_double_format
10453 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10454 }
10455 else
10456 {
10457 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10458 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10459 }
10460
10461 if (have_vfp_pseudos)
10462 {
10463 /* NOTE: These are the only pseudo registers used by
10464 the ARM target at the moment. If more are added, a
10465 little more care in numbering will be needed. */
10466
10467 int num_pseudos = 32;
10468 if (have_neon_pseudos)
10469 num_pseudos += 16;
10470 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10471 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10472 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10473 }
10474
10475 if (tdesc_data)
10476 {
10477 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10478
10479 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10480
10481 /* Override tdesc_register_type to adjust the types of VFP
10482 registers for NEON. */
10483 set_gdbarch_register_type (gdbarch, arm_register_type);
10484 }
10485
10486 /* Add standard register aliases. We add aliases even for those
10487 nanes which are used by the current architecture - it's simpler,
10488 and does no harm, since nothing ever lists user registers. */
10489 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10490 user_reg_add (gdbarch, arm_register_aliases[i].name,
10491 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10492
10493 return gdbarch;
10494 }
10495
10496 static void
10497 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10498 {
10499 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10500
10501 if (tdep == NULL)
10502 return;
10503
10504 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10505 (unsigned long) tdep->lowest_pc);
10506 }
10507
10508 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10509
10510 void
10511 _initialize_arm_tdep (void)
10512 {
10513 struct ui_file *stb;
10514 long length;
10515 struct cmd_list_element *new_set, *new_show;
10516 const char *setname;
10517 const char *setdesc;
10518 const char *const *regnames;
10519 int numregs, i, j;
10520 static char *helptext;
10521 char regdesc[1024], *rdptr = regdesc;
10522 size_t rest = sizeof (regdesc);
10523
10524 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10525
10526 arm_objfile_data_key
10527 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10528
10529 /* Add ourselves to objfile event chain. */
10530 observer_attach_new_objfile (arm_exidx_new_objfile);
10531 arm_exidx_data_key
10532 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10533
10534 /* Register an ELF OS ABI sniffer for ARM binaries. */
10535 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10536 bfd_target_elf_flavour,
10537 arm_elf_osabi_sniffer);
10538
10539 /* Initialize the standard target descriptions. */
10540 initialize_tdesc_arm_with_m ();
10541 initialize_tdesc_arm_with_m_fpa_layout ();
10542 initialize_tdesc_arm_with_m_vfp_d16 ();
10543 initialize_tdesc_arm_with_iwmmxt ();
10544 initialize_tdesc_arm_with_vfpv2 ();
10545 initialize_tdesc_arm_with_vfpv3 ();
10546 initialize_tdesc_arm_with_neon ();
10547
10548 /* Get the number of possible sets of register names defined in opcodes. */
10549 num_disassembly_options = get_arm_regname_num_options ();
10550
10551 /* Add root prefix command for all "set arm"/"show arm" commands. */
10552 add_prefix_cmd ("arm", no_class, set_arm_command,
10553 _("Various ARM-specific commands."),
10554 &setarmcmdlist, "set arm ", 0, &setlist);
10555
10556 add_prefix_cmd ("arm", no_class, show_arm_command,
10557 _("Various ARM-specific commands."),
10558 &showarmcmdlist, "show arm ", 0, &showlist);
10559
10560 /* Sync the opcode insn printer with our register viewer. */
10561 parse_arm_disassembler_option ("reg-names-std");
10562
10563 /* Initialize the array that will be passed to
10564 add_setshow_enum_cmd(). */
10565 valid_disassembly_styles = XNEWVEC (const char *,
10566 num_disassembly_options + 1);
10567 for (i = 0; i < num_disassembly_options; i++)
10568 {
10569 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10570 valid_disassembly_styles[i] = setname;
10571 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10572 rdptr += length;
10573 rest -= length;
10574 /* When we find the default names, tell the disassembler to use
10575 them. */
10576 if (!strcmp (setname, "std"))
10577 {
10578 disassembly_style = setname;
10579 set_arm_regname_option (i);
10580 }
10581 }
10582 /* Mark the end of valid options. */
10583 valid_disassembly_styles[num_disassembly_options] = NULL;
10584
10585 /* Create the help text. */
10586 stb = mem_fileopen ();
10587 fprintf_unfiltered (stb, "%s%s%s",
10588 _("The valid values are:\n"),
10589 regdesc,
10590 _("The default is \"std\"."));
10591 helptext = ui_file_xstrdup (stb, NULL);
10592 ui_file_delete (stb);
10593
10594 add_setshow_enum_cmd("disassembler", no_class,
10595 valid_disassembly_styles, &disassembly_style,
10596 _("Set the disassembly style."),
10597 _("Show the disassembly style."),
10598 helptext,
10599 set_disassembly_style_sfunc,
10600 NULL, /* FIXME: i18n: The disassembly style is
10601 \"%s\". */
10602 &setarmcmdlist, &showarmcmdlist);
10603
10604 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10605 _("Set usage of ARM 32-bit mode."),
10606 _("Show usage of ARM 32-bit mode."),
10607 _("When off, a 26-bit PC will be used."),
10608 NULL,
10609 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10610 mode is %s. */
10611 &setarmcmdlist, &showarmcmdlist);
10612
10613 /* Add a command to allow the user to force the FPU model. */
10614 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10615 _("Set the floating point type."),
10616 _("Show the floating point type."),
10617 _("auto - Determine the FP typefrom the OS-ABI.\n\
10618 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10619 fpa - FPA co-processor (GCC compiled).\n\
10620 softvfp - Software FP with pure-endian doubles.\n\
10621 vfp - VFP co-processor."),
10622 set_fp_model_sfunc, show_fp_model,
10623 &setarmcmdlist, &showarmcmdlist);
10624
10625 /* Add a command to allow the user to force the ABI. */
10626 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10627 _("Set the ABI."),
10628 _("Show the ABI."),
10629 NULL, arm_set_abi, arm_show_abi,
10630 &setarmcmdlist, &showarmcmdlist);
10631
10632 /* Add two commands to allow the user to force the assumed
10633 execution mode. */
10634 add_setshow_enum_cmd ("fallback-mode", class_support,
10635 arm_mode_strings, &arm_fallback_mode_string,
10636 _("Set the mode assumed when symbols are unavailable."),
10637 _("Show the mode assumed when symbols are unavailable."),
10638 NULL, NULL, arm_show_fallback_mode,
10639 &setarmcmdlist, &showarmcmdlist);
10640 add_setshow_enum_cmd ("force-mode", class_support,
10641 arm_mode_strings, &arm_force_mode_string,
10642 _("Set the mode assumed even when symbols are available."),
10643 _("Show the mode assumed even when symbols are available."),
10644 NULL, NULL, arm_show_force_mode,
10645 &setarmcmdlist, &showarmcmdlist);
10646
10647 /* Debugging flag. */
10648 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10649 _("Set ARM debugging."),
10650 _("Show ARM debugging."),
10651 _("When on, arm-specific debugging is enabled."),
10652 NULL,
10653 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10654 &setdebuglist, &showdebuglist);
10655 }
10656
10657 /* ARM-reversible process record data structures. */
10658
10659 #define ARM_INSN_SIZE_BYTES 4
10660 #define THUMB_INSN_SIZE_BYTES 2
10661 #define THUMB2_INSN_SIZE_BYTES 4
10662
10663
10664 /* Position of the bit within a 32-bit ARM instruction
10665 that defines whether the instruction is a load or store. */
10666 #define INSN_S_L_BIT_NUM 20
10667
10668 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10669 do \
10670 { \
10671 unsigned int reg_len = LENGTH; \
10672 if (reg_len) \
10673 { \
10674 REGS = XNEWVEC (uint32_t, reg_len); \
10675 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10676 } \
10677 } \
10678 while (0)
10679
10680 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10681 do \
10682 { \
10683 unsigned int mem_len = LENGTH; \
10684 if (mem_len) \
10685 { \
10686 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10687 memcpy(&MEMS->len, &RECORD_BUF[0], \
10688 sizeof(struct arm_mem_r) * LENGTH); \
10689 } \
10690 } \
10691 while (0)
10692
10693 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10694 #define INSN_RECORDED(ARM_RECORD) \
10695 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10696
10697 /* ARM memory record structure. */
10698 struct arm_mem_r
10699 {
10700 uint32_t len; /* Record length. */
10701 uint32_t addr; /* Memory address. */
10702 };
10703
10704 /* ARM instruction record contains opcode of current insn
10705 and execution state (before entry to decode_insn()),
10706 contains list of to-be-modified registers and
10707 memory blocks (on return from decode_insn()). */
10708
10709 typedef struct insn_decode_record_t
10710 {
10711 struct gdbarch *gdbarch;
10712 struct regcache *regcache;
10713 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10714 uint32_t arm_insn; /* Should accommodate thumb. */
10715 uint32_t cond; /* Condition code. */
10716 uint32_t opcode; /* Insn opcode. */
10717 uint32_t decode; /* Insn decode bits. */
10718 uint32_t mem_rec_count; /* No of mem records. */
10719 uint32_t reg_rec_count; /* No of reg records. */
10720 uint32_t *arm_regs; /* Registers to be saved for this record. */
10721 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10722 } insn_decode_record;
10723
10724
10725 /* Checks ARM SBZ and SBO mandatory fields. */
10726
10727 static int
10728 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10729 {
10730 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10731
10732 if (!len)
10733 return 1;
10734
10735 if (!sbo)
10736 ones = ~ones;
10737
10738 while (ones)
10739 {
10740 if (!(ones & sbo))
10741 {
10742 return 0;
10743 }
10744 ones = ones >> 1;
10745 }
10746 return 1;
10747 }
10748
10749 enum arm_record_result
10750 {
10751 ARM_RECORD_SUCCESS = 0,
10752 ARM_RECORD_FAILURE = 1
10753 };
10754
10755 typedef enum
10756 {
10757 ARM_RECORD_STRH=1,
10758 ARM_RECORD_STRD
10759 } arm_record_strx_t;
10760
10761 typedef enum
10762 {
10763 ARM_RECORD=1,
10764 THUMB_RECORD,
10765 THUMB2_RECORD
10766 } record_type_t;
10767
10768
10769 static int
10770 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10771 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10772 {
10773
10774 struct regcache *reg_cache = arm_insn_r->regcache;
10775 ULONGEST u_regval[2]= {0};
10776
10777 uint32_t reg_src1 = 0, reg_src2 = 0;
10778 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10779 uint32_t opcode1 = 0;
10780
10781 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10782 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10783 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10784
10785
10786 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10787 {
10788 /* 1) Handle misc store, immediate offset. */
10789 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10790 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10791 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10792 regcache_raw_read_unsigned (reg_cache, reg_src1,
10793 &u_regval[0]);
10794 if (ARM_PC_REGNUM == reg_src1)
10795 {
10796 /* If R15 was used as Rn, hence current PC+8. */
10797 u_regval[0] = u_regval[0] + 8;
10798 }
10799 offset_8 = (immed_high << 4) | immed_low;
10800 /* Calculate target store address. */
10801 if (14 == arm_insn_r->opcode)
10802 {
10803 tgt_mem_addr = u_regval[0] + offset_8;
10804 }
10805 else
10806 {
10807 tgt_mem_addr = u_regval[0] - offset_8;
10808 }
10809 if (ARM_RECORD_STRH == str_type)
10810 {
10811 record_buf_mem[0] = 2;
10812 record_buf_mem[1] = tgt_mem_addr;
10813 arm_insn_r->mem_rec_count = 1;
10814 }
10815 else if (ARM_RECORD_STRD == str_type)
10816 {
10817 record_buf_mem[0] = 4;
10818 record_buf_mem[1] = tgt_mem_addr;
10819 record_buf_mem[2] = 4;
10820 record_buf_mem[3] = tgt_mem_addr + 4;
10821 arm_insn_r->mem_rec_count = 2;
10822 }
10823 }
10824 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10825 {
10826 /* 2) Store, register offset. */
10827 /* Get Rm. */
10828 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10829 /* Get Rn. */
10830 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10831 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10832 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10833 if (15 == reg_src2)
10834 {
10835 /* If R15 was used as Rn, hence current PC+8. */
10836 u_regval[0] = u_regval[0] + 8;
10837 }
10838 /* Calculate target store address, Rn +/- Rm, register offset. */
10839 if (12 == arm_insn_r->opcode)
10840 {
10841 tgt_mem_addr = u_regval[0] + u_regval[1];
10842 }
10843 else
10844 {
10845 tgt_mem_addr = u_regval[1] - u_regval[0];
10846 }
10847 if (ARM_RECORD_STRH == str_type)
10848 {
10849 record_buf_mem[0] = 2;
10850 record_buf_mem[1] = tgt_mem_addr;
10851 arm_insn_r->mem_rec_count = 1;
10852 }
10853 else if (ARM_RECORD_STRD == str_type)
10854 {
10855 record_buf_mem[0] = 4;
10856 record_buf_mem[1] = tgt_mem_addr;
10857 record_buf_mem[2] = 4;
10858 record_buf_mem[3] = tgt_mem_addr + 4;
10859 arm_insn_r->mem_rec_count = 2;
10860 }
10861 }
10862 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10863 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10864 {
10865 /* 3) Store, immediate pre-indexed. */
10866 /* 5) Store, immediate post-indexed. */
10867 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10868 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10869 offset_8 = (immed_high << 4) | immed_low;
10870 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10871 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10872 /* Calculate target store address, Rn +/- Rm, register offset. */
10873 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10874 {
10875 tgt_mem_addr = u_regval[0] + offset_8;
10876 }
10877 else
10878 {
10879 tgt_mem_addr = u_regval[0] - offset_8;
10880 }
10881 if (ARM_RECORD_STRH == str_type)
10882 {
10883 record_buf_mem[0] = 2;
10884 record_buf_mem[1] = tgt_mem_addr;
10885 arm_insn_r->mem_rec_count = 1;
10886 }
10887 else if (ARM_RECORD_STRD == str_type)
10888 {
10889 record_buf_mem[0] = 4;
10890 record_buf_mem[1] = tgt_mem_addr;
10891 record_buf_mem[2] = 4;
10892 record_buf_mem[3] = tgt_mem_addr + 4;
10893 arm_insn_r->mem_rec_count = 2;
10894 }
10895 /* Record Rn also as it changes. */
10896 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10897 arm_insn_r->reg_rec_count = 1;
10898 }
10899 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10900 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10901 {
10902 /* 4) Store, register pre-indexed. */
10903 /* 6) Store, register post -indexed. */
10904 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10905 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10906 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10907 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10908 /* Calculate target store address, Rn +/- Rm, register offset. */
10909 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10910 {
10911 tgt_mem_addr = u_regval[0] + u_regval[1];
10912 }
10913 else
10914 {
10915 tgt_mem_addr = u_regval[1] - u_regval[0];
10916 }
10917 if (ARM_RECORD_STRH == str_type)
10918 {
10919 record_buf_mem[0] = 2;
10920 record_buf_mem[1] = tgt_mem_addr;
10921 arm_insn_r->mem_rec_count = 1;
10922 }
10923 else if (ARM_RECORD_STRD == str_type)
10924 {
10925 record_buf_mem[0] = 4;
10926 record_buf_mem[1] = tgt_mem_addr;
10927 record_buf_mem[2] = 4;
10928 record_buf_mem[3] = tgt_mem_addr + 4;
10929 arm_insn_r->mem_rec_count = 2;
10930 }
10931 /* Record Rn also as it changes. */
10932 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10933 arm_insn_r->reg_rec_count = 1;
10934 }
10935 return 0;
10936 }
10937
10938 /* Handling ARM extension space insns. */
10939
10940 static int
10941 arm_record_extension_space (insn_decode_record *arm_insn_r)
10942 {
10943 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10944 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10945 uint32_t record_buf[8], record_buf_mem[8];
10946 uint32_t reg_src1 = 0;
10947 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10948 struct regcache *reg_cache = arm_insn_r->regcache;
10949 ULONGEST u_regval = 0;
10950
10951 gdb_assert (!INSN_RECORDED(arm_insn_r));
10952 /* Handle unconditional insn extension space. */
10953
10954 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
10955 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10956 if (arm_insn_r->cond)
10957 {
10958 /* PLD has no affect on architectural state, it just affects
10959 the caches. */
10960 if (5 == ((opcode1 & 0xE0) >> 5))
10961 {
10962 /* BLX(1) */
10963 record_buf[0] = ARM_PS_REGNUM;
10964 record_buf[1] = ARM_LR_REGNUM;
10965 arm_insn_r->reg_rec_count = 2;
10966 }
10967 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
10968 }
10969
10970
10971 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
10972 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
10973 {
10974 ret = -1;
10975 /* Undefined instruction on ARM V5; need to handle if later
10976 versions define it. */
10977 }
10978
10979 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
10980 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
10981 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
10982
10983 /* Handle arithmetic insn extension space. */
10984 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
10985 && !INSN_RECORDED(arm_insn_r))
10986 {
10987 /* Handle MLA(S) and MUL(S). */
10988 if (0 <= insn_op1 && 3 >= insn_op1)
10989 {
10990 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
10991 record_buf[1] = ARM_PS_REGNUM;
10992 arm_insn_r->reg_rec_count = 2;
10993 }
10994 else if (4 <= insn_op1 && 15 >= insn_op1)
10995 {
10996 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
10997 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
10998 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
10999 record_buf[2] = ARM_PS_REGNUM;
11000 arm_insn_r->reg_rec_count = 3;
11001 }
11002 }
11003
11004 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11005 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11006 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11007
11008 /* Handle control insn extension space. */
11009
11010 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11011 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11012 {
11013 if (!bit (arm_insn_r->arm_insn,25))
11014 {
11015 if (!bits (arm_insn_r->arm_insn, 4, 7))
11016 {
11017 if ((0 == insn_op1) || (2 == insn_op1))
11018 {
11019 /* MRS. */
11020 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11021 arm_insn_r->reg_rec_count = 1;
11022 }
11023 else if (1 == insn_op1)
11024 {
11025 /* CSPR is going to be changed. */
11026 record_buf[0] = ARM_PS_REGNUM;
11027 arm_insn_r->reg_rec_count = 1;
11028 }
11029 else if (3 == insn_op1)
11030 {
11031 /* SPSR is going to be changed. */
11032 /* We need to get SPSR value, which is yet to be done. */
11033 printf_unfiltered (_("Process record does not support "
11034 "instruction 0x%0x at address %s.\n"),
11035 arm_insn_r->arm_insn,
11036 paddress (arm_insn_r->gdbarch,
11037 arm_insn_r->this_addr));
11038 return -1;
11039 }
11040 }
11041 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11042 {
11043 if (1 == insn_op1)
11044 {
11045 /* BX. */
11046 record_buf[0] = ARM_PS_REGNUM;
11047 arm_insn_r->reg_rec_count = 1;
11048 }
11049 else if (3 == insn_op1)
11050 {
11051 /* CLZ. */
11052 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11053 arm_insn_r->reg_rec_count = 1;
11054 }
11055 }
11056 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11057 {
11058 /* BLX. */
11059 record_buf[0] = ARM_PS_REGNUM;
11060 record_buf[1] = ARM_LR_REGNUM;
11061 arm_insn_r->reg_rec_count = 2;
11062 }
11063 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11064 {
11065 /* QADD, QSUB, QDADD, QDSUB */
11066 record_buf[0] = ARM_PS_REGNUM;
11067 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11068 arm_insn_r->reg_rec_count = 2;
11069 }
11070 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11071 {
11072 /* BKPT. */
11073 record_buf[0] = ARM_PS_REGNUM;
11074 record_buf[1] = ARM_LR_REGNUM;
11075 arm_insn_r->reg_rec_count = 2;
11076
11077 /* Save SPSR also;how? */
11078 printf_unfiltered (_("Process record does not support "
11079 "instruction 0x%0x at address %s.\n"),
11080 arm_insn_r->arm_insn,
11081 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11082 return -1;
11083 }
11084 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11085 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11086 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11087 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11088 )
11089 {
11090 if (0 == insn_op1 || 1 == insn_op1)
11091 {
11092 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11093 /* We dont do optimization for SMULW<y> where we
11094 need only Rd. */
11095 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11096 record_buf[1] = ARM_PS_REGNUM;
11097 arm_insn_r->reg_rec_count = 2;
11098 }
11099 else if (2 == insn_op1)
11100 {
11101 /* SMLAL<x><y>. */
11102 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11103 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11104 arm_insn_r->reg_rec_count = 2;
11105 }
11106 else if (3 == insn_op1)
11107 {
11108 /* SMUL<x><y>. */
11109 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11110 arm_insn_r->reg_rec_count = 1;
11111 }
11112 }
11113 }
11114 else
11115 {
11116 /* MSR : immediate form. */
11117 if (1 == insn_op1)
11118 {
11119 /* CSPR is going to be changed. */
11120 record_buf[0] = ARM_PS_REGNUM;
11121 arm_insn_r->reg_rec_count = 1;
11122 }
11123 else if (3 == insn_op1)
11124 {
11125 /* SPSR is going to be changed. */
11126 /* we need to get SPSR value, which is yet to be done */
11127 printf_unfiltered (_("Process record does not support "
11128 "instruction 0x%0x at address %s.\n"),
11129 arm_insn_r->arm_insn,
11130 paddress (arm_insn_r->gdbarch,
11131 arm_insn_r->this_addr));
11132 return -1;
11133 }
11134 }
11135 }
11136
11137 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11138 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11139 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11140
11141 /* Handle load/store insn extension space. */
11142
11143 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11144 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11145 && !INSN_RECORDED(arm_insn_r))
11146 {
11147 /* SWP/SWPB. */
11148 if (0 == insn_op1)
11149 {
11150 /* These insn, changes register and memory as well. */
11151 /* SWP or SWPB insn. */
11152 /* Get memory address given by Rn. */
11153 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11154 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11155 /* SWP insn ?, swaps word. */
11156 if (8 == arm_insn_r->opcode)
11157 {
11158 record_buf_mem[0] = 4;
11159 }
11160 else
11161 {
11162 /* SWPB insn, swaps only byte. */
11163 record_buf_mem[0] = 1;
11164 }
11165 record_buf_mem[1] = u_regval;
11166 arm_insn_r->mem_rec_count = 1;
11167 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11168 arm_insn_r->reg_rec_count = 1;
11169 }
11170 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11171 {
11172 /* STRH. */
11173 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11174 ARM_RECORD_STRH);
11175 }
11176 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11177 {
11178 /* LDRD. */
11179 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11180 record_buf[1] = record_buf[0] + 1;
11181 arm_insn_r->reg_rec_count = 2;
11182 }
11183 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11184 {
11185 /* STRD. */
11186 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11187 ARM_RECORD_STRD);
11188 }
11189 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11190 {
11191 /* LDRH, LDRSB, LDRSH. */
11192 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11193 arm_insn_r->reg_rec_count = 1;
11194 }
11195
11196 }
11197
11198 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11199 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11200 && !INSN_RECORDED(arm_insn_r))
11201 {
11202 ret = -1;
11203 /* Handle coprocessor insn extension space. */
11204 }
11205
11206 /* To be done for ARMv5 and later; as of now we return -1. */
11207 if (-1 == ret)
11208 printf_unfiltered (_("Process record does not support instruction x%0x "
11209 "at address %s.\n"),arm_insn_r->arm_insn,
11210 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11211
11212
11213 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11214 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11215
11216 return ret;
11217 }
11218
11219 /* Handling opcode 000 insns. */
11220
11221 static int
11222 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11223 {
11224 struct regcache *reg_cache = arm_insn_r->regcache;
11225 uint32_t record_buf[8], record_buf_mem[8];
11226 ULONGEST u_regval[2] = {0};
11227
11228 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11229 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11230 uint32_t opcode1 = 0;
11231
11232 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11233 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11234 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11235
11236 /* Data processing insn /multiply insn. */
11237 if (9 == arm_insn_r->decode
11238 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11239 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11240 {
11241 /* Handle multiply instructions. */
11242 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11243 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11244 {
11245 /* Handle MLA and MUL. */
11246 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11247 record_buf[1] = ARM_PS_REGNUM;
11248 arm_insn_r->reg_rec_count = 2;
11249 }
11250 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11251 {
11252 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11253 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11254 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11255 record_buf[2] = ARM_PS_REGNUM;
11256 arm_insn_r->reg_rec_count = 3;
11257 }
11258 }
11259 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11260 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11261 {
11262 /* Handle misc load insns, as 20th bit (L = 1). */
11263 /* LDR insn has a capability to do branching, if
11264 MOV LR, PC is precceded by LDR insn having Rn as R15
11265 in that case, it emulates branch and link insn, and hence we
11266 need to save CSPR and PC as well. I am not sure this is right
11267 place; as opcode = 010 LDR insn make this happen, if R15 was
11268 used. */
11269 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11270 if (15 != reg_dest)
11271 {
11272 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11273 arm_insn_r->reg_rec_count = 1;
11274 }
11275 else
11276 {
11277 record_buf[0] = reg_dest;
11278 record_buf[1] = ARM_PS_REGNUM;
11279 arm_insn_r->reg_rec_count = 2;
11280 }
11281 }
11282 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11283 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11284 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11285 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11286 {
11287 /* Handle MSR insn. */
11288 if (9 == arm_insn_r->opcode)
11289 {
11290 /* CSPR is going to be changed. */
11291 record_buf[0] = ARM_PS_REGNUM;
11292 arm_insn_r->reg_rec_count = 1;
11293 }
11294 else
11295 {
11296 /* SPSR is going to be changed. */
11297 /* How to read SPSR value? */
11298 printf_unfiltered (_("Process record does not support instruction "
11299 "0x%0x at address %s.\n"),
11300 arm_insn_r->arm_insn,
11301 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11302 return -1;
11303 }
11304 }
11305 else if (9 == arm_insn_r->decode
11306 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11307 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11308 {
11309 /* Handling SWP, SWPB. */
11310 /* These insn, changes register and memory as well. */
11311 /* SWP or SWPB insn. */
11312
11313 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11314 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11315 /* SWP insn ?, swaps word. */
11316 if (8 == arm_insn_r->opcode)
11317 {
11318 record_buf_mem[0] = 4;
11319 }
11320 else
11321 {
11322 /* SWPB insn, swaps only byte. */
11323 record_buf_mem[0] = 1;
11324 }
11325 record_buf_mem[1] = u_regval[0];
11326 arm_insn_r->mem_rec_count = 1;
11327 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11328 arm_insn_r->reg_rec_count = 1;
11329 }
11330 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11331 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11332 {
11333 /* Handle BLX, branch and link/exchange. */
11334 if (9 == arm_insn_r->opcode)
11335 {
11336 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11337 and R14 stores the return address. */
11338 record_buf[0] = ARM_PS_REGNUM;
11339 record_buf[1] = ARM_LR_REGNUM;
11340 arm_insn_r->reg_rec_count = 2;
11341 }
11342 }
11343 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11344 {
11345 /* Handle enhanced software breakpoint insn, BKPT. */
11346 /* CPSR is changed to be executed in ARM state, disabling normal
11347 interrupts, entering abort mode. */
11348 /* According to high vector configuration PC is set. */
11349 /* user hit breakpoint and type reverse, in
11350 that case, we need to go back with previous CPSR and
11351 Program Counter. */
11352 record_buf[0] = ARM_PS_REGNUM;
11353 record_buf[1] = ARM_LR_REGNUM;
11354 arm_insn_r->reg_rec_count = 2;
11355
11356 /* Save SPSR also; how? */
11357 printf_unfiltered (_("Process record does not support instruction "
11358 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11359 paddress (arm_insn_r->gdbarch,
11360 arm_insn_r->this_addr));
11361 return -1;
11362 }
11363 else if (11 == arm_insn_r->decode
11364 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11365 {
11366 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11367
11368 /* Handle str(x) insn */
11369 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11370 ARM_RECORD_STRH);
11371 }
11372 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11373 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11374 {
11375 /* Handle BX, branch and link/exchange. */
11376 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11377 record_buf[0] = ARM_PS_REGNUM;
11378 arm_insn_r->reg_rec_count = 1;
11379 }
11380 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11381 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11382 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11383 {
11384 /* Count leading zeros: CLZ. */
11385 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11386 arm_insn_r->reg_rec_count = 1;
11387 }
11388 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11389 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11390 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11391 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11392 )
11393 {
11394 /* Handle MRS insn. */
11395 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11396 arm_insn_r->reg_rec_count = 1;
11397 }
11398 else if (arm_insn_r->opcode <= 15)
11399 {
11400 /* Normal data processing insns. */
11401 /* Out of 11 shifter operands mode, all the insn modifies destination
11402 register, which is specified by 13-16 decode. */
11403 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11404 record_buf[1] = ARM_PS_REGNUM;
11405 arm_insn_r->reg_rec_count = 2;
11406 }
11407 else
11408 {
11409 return -1;
11410 }
11411
11412 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11413 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11414 return 0;
11415 }
11416
11417 /* Handling opcode 001 insns. */
11418
11419 static int
11420 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11421 {
11422 uint32_t record_buf[8], record_buf_mem[8];
11423
11424 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11425 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11426
11427 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11428 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11429 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11430 )
11431 {
11432 /* Handle MSR insn. */
11433 if (9 == arm_insn_r->opcode)
11434 {
11435 /* CSPR is going to be changed. */
11436 record_buf[0] = ARM_PS_REGNUM;
11437 arm_insn_r->reg_rec_count = 1;
11438 }
11439 else
11440 {
11441 /* SPSR is going to be changed. */
11442 }
11443 }
11444 else if (arm_insn_r->opcode <= 15)
11445 {
11446 /* Normal data processing insns. */
11447 /* Out of 11 shifter operands mode, all the insn modifies destination
11448 register, which is specified by 13-16 decode. */
11449 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11450 record_buf[1] = ARM_PS_REGNUM;
11451 arm_insn_r->reg_rec_count = 2;
11452 }
11453 else
11454 {
11455 return -1;
11456 }
11457
11458 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11459 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11460 return 0;
11461 }
11462
11463 /* Handle ARM mode instructions with opcode 010. */
11464
11465 static int
11466 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11467 {
11468 struct regcache *reg_cache = arm_insn_r->regcache;
11469
11470 uint32_t reg_base , reg_dest;
11471 uint32_t offset_12, tgt_mem_addr;
11472 uint32_t record_buf[8], record_buf_mem[8];
11473 unsigned char wback;
11474 ULONGEST u_regval;
11475
11476 /* Calculate wback. */
11477 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11478 || (bit (arm_insn_r->arm_insn, 21) == 1);
11479
11480 arm_insn_r->reg_rec_count = 0;
11481 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11482
11483 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11484 {
11485 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11486 and LDRT. */
11487
11488 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11489 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11490
11491 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11492 preceeds a LDR instruction having R15 as reg_base, it
11493 emulates a branch and link instruction, and hence we need to save
11494 CPSR and PC as well. */
11495 if (ARM_PC_REGNUM == reg_dest)
11496 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11497
11498 /* If wback is true, also save the base register, which is going to be
11499 written to. */
11500 if (wback)
11501 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11502 }
11503 else
11504 {
11505 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11506
11507 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11508 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11509
11510 /* Handle bit U. */
11511 if (bit (arm_insn_r->arm_insn, 23))
11512 {
11513 /* U == 1: Add the offset. */
11514 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11515 }
11516 else
11517 {
11518 /* U == 0: subtract the offset. */
11519 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11520 }
11521
11522 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11523 bytes. */
11524 if (bit (arm_insn_r->arm_insn, 22))
11525 {
11526 /* STRB and STRBT: 1 byte. */
11527 record_buf_mem[0] = 1;
11528 }
11529 else
11530 {
11531 /* STR and STRT: 4 bytes. */
11532 record_buf_mem[0] = 4;
11533 }
11534
11535 /* Handle bit P. */
11536 if (bit (arm_insn_r->arm_insn, 24))
11537 record_buf_mem[1] = tgt_mem_addr;
11538 else
11539 record_buf_mem[1] = (uint32_t) u_regval;
11540
11541 arm_insn_r->mem_rec_count = 1;
11542
11543 /* If wback is true, also save the base register, which is going to be
11544 written to. */
11545 if (wback)
11546 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11547 }
11548
11549 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11550 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11551 return 0;
11552 }
11553
11554 /* Handling opcode 011 insns. */
11555
11556 static int
11557 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11558 {
11559 struct regcache *reg_cache = arm_insn_r->regcache;
11560
11561 uint32_t shift_imm = 0;
11562 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11563 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11564 uint32_t record_buf[8], record_buf_mem[8];
11565
11566 LONGEST s_word;
11567 ULONGEST u_regval[2];
11568
11569 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11570 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11571
11572 /* Handle enhanced store insns and LDRD DSP insn,
11573 order begins according to addressing modes for store insns
11574 STRH insn. */
11575
11576 /* LDR or STR? */
11577 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11578 {
11579 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11580 /* LDR insn has a capability to do branching, if
11581 MOV LR, PC is precedded by LDR insn having Rn as R15
11582 in that case, it emulates branch and link insn, and hence we
11583 need to save CSPR and PC as well. */
11584 if (15 != reg_dest)
11585 {
11586 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11587 arm_insn_r->reg_rec_count = 1;
11588 }
11589 else
11590 {
11591 record_buf[0] = reg_dest;
11592 record_buf[1] = ARM_PS_REGNUM;
11593 arm_insn_r->reg_rec_count = 2;
11594 }
11595 }
11596 else
11597 {
11598 if (! bits (arm_insn_r->arm_insn, 4, 11))
11599 {
11600 /* Store insn, register offset and register pre-indexed,
11601 register post-indexed. */
11602 /* Get Rm. */
11603 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11604 /* Get Rn. */
11605 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11606 regcache_raw_read_unsigned (reg_cache, reg_src1
11607 , &u_regval[0]);
11608 regcache_raw_read_unsigned (reg_cache, reg_src2
11609 , &u_regval[1]);
11610 if (15 == reg_src2)
11611 {
11612 /* If R15 was used as Rn, hence current PC+8. */
11613 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11614 u_regval[0] = u_regval[0] + 8;
11615 }
11616 /* Calculate target store address, Rn +/- Rm, register offset. */
11617 /* U == 1. */
11618 if (bit (arm_insn_r->arm_insn, 23))
11619 {
11620 tgt_mem_addr = u_regval[0] + u_regval[1];
11621 }
11622 else
11623 {
11624 tgt_mem_addr = u_regval[1] - u_regval[0];
11625 }
11626
11627 switch (arm_insn_r->opcode)
11628 {
11629 /* STR. */
11630 case 8:
11631 case 12:
11632 /* STR. */
11633 case 9:
11634 case 13:
11635 /* STRT. */
11636 case 1:
11637 case 5:
11638 /* STR. */
11639 case 0:
11640 case 4:
11641 record_buf_mem[0] = 4;
11642 break;
11643
11644 /* STRB. */
11645 case 10:
11646 case 14:
11647 /* STRB. */
11648 case 11:
11649 case 15:
11650 /* STRBT. */
11651 case 3:
11652 case 7:
11653 /* STRB. */
11654 case 2:
11655 case 6:
11656 record_buf_mem[0] = 1;
11657 break;
11658
11659 default:
11660 gdb_assert_not_reached ("no decoding pattern found");
11661 break;
11662 }
11663 record_buf_mem[1] = tgt_mem_addr;
11664 arm_insn_r->mem_rec_count = 1;
11665
11666 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11667 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11668 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11669 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11670 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11671 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11672 )
11673 {
11674 /* Rn is going to be changed in pre-indexed mode and
11675 post-indexed mode as well. */
11676 record_buf[0] = reg_src2;
11677 arm_insn_r->reg_rec_count = 1;
11678 }
11679 }
11680 else
11681 {
11682 /* Store insn, scaled register offset; scaled pre-indexed. */
11683 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11684 /* Get Rm. */
11685 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11686 /* Get Rn. */
11687 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11688 /* Get shift_imm. */
11689 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11690 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11691 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11692 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11693 /* Offset_12 used as shift. */
11694 switch (offset_12)
11695 {
11696 case 0:
11697 /* Offset_12 used as index. */
11698 offset_12 = u_regval[0] << shift_imm;
11699 break;
11700
11701 case 1:
11702 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11703 break;
11704
11705 case 2:
11706 if (!shift_imm)
11707 {
11708 if (bit (u_regval[0], 31))
11709 {
11710 offset_12 = 0xFFFFFFFF;
11711 }
11712 else
11713 {
11714 offset_12 = 0;
11715 }
11716 }
11717 else
11718 {
11719 /* This is arithmetic shift. */
11720 offset_12 = s_word >> shift_imm;
11721 }
11722 break;
11723
11724 case 3:
11725 if (!shift_imm)
11726 {
11727 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11728 &u_regval[1]);
11729 /* Get C flag value and shift it by 31. */
11730 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11731 | (u_regval[0]) >> 1);
11732 }
11733 else
11734 {
11735 offset_12 = (u_regval[0] >> shift_imm) \
11736 | (u_regval[0] <<
11737 (sizeof(uint32_t) - shift_imm));
11738 }
11739 break;
11740
11741 default:
11742 gdb_assert_not_reached ("no decoding pattern found");
11743 break;
11744 }
11745
11746 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11747 /* bit U set. */
11748 if (bit (arm_insn_r->arm_insn, 23))
11749 {
11750 tgt_mem_addr = u_regval[1] + offset_12;
11751 }
11752 else
11753 {
11754 tgt_mem_addr = u_regval[1] - offset_12;
11755 }
11756
11757 switch (arm_insn_r->opcode)
11758 {
11759 /* STR. */
11760 case 8:
11761 case 12:
11762 /* STR. */
11763 case 9:
11764 case 13:
11765 /* STRT. */
11766 case 1:
11767 case 5:
11768 /* STR. */
11769 case 0:
11770 case 4:
11771 record_buf_mem[0] = 4;
11772 break;
11773
11774 /* STRB. */
11775 case 10:
11776 case 14:
11777 /* STRB. */
11778 case 11:
11779 case 15:
11780 /* STRBT. */
11781 case 3:
11782 case 7:
11783 /* STRB. */
11784 case 2:
11785 case 6:
11786 record_buf_mem[0] = 1;
11787 break;
11788
11789 default:
11790 gdb_assert_not_reached ("no decoding pattern found");
11791 break;
11792 }
11793 record_buf_mem[1] = tgt_mem_addr;
11794 arm_insn_r->mem_rec_count = 1;
11795
11796 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11797 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11798 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11799 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11800 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11801 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11802 )
11803 {
11804 /* Rn is going to be changed in register scaled pre-indexed
11805 mode,and scaled post indexed mode. */
11806 record_buf[0] = reg_src2;
11807 arm_insn_r->reg_rec_count = 1;
11808 }
11809 }
11810 }
11811
11812 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11813 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11814 return 0;
11815 }
11816
11817 /* Handle ARM mode instructions with opcode 100. */
11818
11819 static int
11820 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11821 {
11822 struct regcache *reg_cache = arm_insn_r->regcache;
11823 uint32_t register_count = 0, register_bits;
11824 uint32_t reg_base, addr_mode;
11825 uint32_t record_buf[24], record_buf_mem[48];
11826 uint32_t wback;
11827 ULONGEST u_regval;
11828
11829 /* Fetch the list of registers. */
11830 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11831 arm_insn_r->reg_rec_count = 0;
11832
11833 /* Fetch the base register that contains the address we are loading data
11834 to. */
11835 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11836
11837 /* Calculate wback. */
11838 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11839
11840 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11841 {
11842 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11843
11844 /* Find out which registers are going to be loaded from memory. */
11845 while (register_bits)
11846 {
11847 if (register_bits & 0x00000001)
11848 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11849 register_bits = register_bits >> 1;
11850 register_count++;
11851 }
11852
11853
11854 /* If wback is true, also save the base register, which is going to be
11855 written to. */
11856 if (wback)
11857 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11858
11859 /* Save the CPSR register. */
11860 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11861 }
11862 else
11863 {
11864 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11865
11866 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11867
11868 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11869
11870 /* Find out how many registers are going to be stored to memory. */
11871 while (register_bits)
11872 {
11873 if (register_bits & 0x00000001)
11874 register_count++;
11875 register_bits = register_bits >> 1;
11876 }
11877
11878 switch (addr_mode)
11879 {
11880 /* STMDA (STMED): Decrement after. */
11881 case 0:
11882 record_buf_mem[1] = (uint32_t) u_regval
11883 - register_count * INT_REGISTER_SIZE + 4;
11884 break;
11885 /* STM (STMIA, STMEA): Increment after. */
11886 case 1:
11887 record_buf_mem[1] = (uint32_t) u_regval;
11888 break;
11889 /* STMDB (STMFD): Decrement before. */
11890 case 2:
11891 record_buf_mem[1] = (uint32_t) u_regval
11892 - register_count * INT_REGISTER_SIZE;
11893 break;
11894 /* STMIB (STMFA): Increment before. */
11895 case 3:
11896 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11897 break;
11898 default:
11899 gdb_assert_not_reached ("no decoding pattern found");
11900 break;
11901 }
11902
11903 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11904 arm_insn_r->mem_rec_count = 1;
11905
11906 /* If wback is true, also save the base register, which is going to be
11907 written to. */
11908 if (wback)
11909 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11910 }
11911
11912 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11913 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11914 return 0;
11915 }
11916
11917 /* Handling opcode 101 insns. */
11918
11919 static int
11920 arm_record_b_bl (insn_decode_record *arm_insn_r)
11921 {
11922 uint32_t record_buf[8];
11923
11924 /* Handle B, BL, BLX(1) insns. */
11925 /* B simply branches so we do nothing here. */
11926 /* Note: BLX(1) doesnt fall here but instead it falls into
11927 extension space. */
11928 if (bit (arm_insn_r->arm_insn, 24))
11929 {
11930 record_buf[0] = ARM_LR_REGNUM;
11931 arm_insn_r->reg_rec_count = 1;
11932 }
11933
11934 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11935
11936 return 0;
11937 }
11938
11939 /* Handling opcode 110 insns. */
11940
11941 static int
11942 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11943 {
11944 printf_unfiltered (_("Process record does not support instruction "
11945 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11946 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11947
11948 return -1;
11949 }
11950
11951 /* Record handler for vector data transfer instructions. */
11952
11953 static int
11954 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
11955 {
11956 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
11957 uint32_t record_buf[4];
11958
11959 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
11960 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
11961 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
11962 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
11963 bit_l = bit (arm_insn_r->arm_insn, 20);
11964 bit_c = bit (arm_insn_r->arm_insn, 8);
11965
11966 /* Handle VMOV instruction. */
11967 if (bit_l && bit_c)
11968 {
11969 record_buf[0] = reg_t;
11970 arm_insn_r->reg_rec_count = 1;
11971 }
11972 else if (bit_l && !bit_c)
11973 {
11974 /* Handle VMOV instruction. */
11975 if (bits_a == 0x00)
11976 {
11977 if (bit (arm_insn_r->arm_insn, 20))
11978 record_buf[0] = reg_t;
11979 else
11980 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
11981 (reg_v << 1));
11982
11983 arm_insn_r->reg_rec_count = 1;
11984 }
11985 /* Handle VMRS instruction. */
11986 else if (bits_a == 0x07)
11987 {
11988 if (reg_t == 15)
11989 reg_t = ARM_PS_REGNUM;
11990
11991 record_buf[0] = reg_t;
11992 arm_insn_r->reg_rec_count = 1;
11993 }
11994 }
11995 else if (!bit_l && !bit_c)
11996 {
11997 /* Handle VMOV instruction. */
11998 if (bits_a == 0x00)
11999 {
12000 if (bit (arm_insn_r->arm_insn, 20))
12001 record_buf[0] = reg_t;
12002 else
12003 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12004 (reg_v << 1));
12005
12006 arm_insn_r->reg_rec_count = 1;
12007 }
12008 /* Handle VMSR instruction. */
12009 else if (bits_a == 0x07)
12010 {
12011 record_buf[0] = ARM_FPSCR_REGNUM;
12012 arm_insn_r->reg_rec_count = 1;
12013 }
12014 }
12015 else if (!bit_l && bit_c)
12016 {
12017 /* Handle VMOV instruction. */
12018 if (!(bits_a & 0x04))
12019 {
12020 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12021 + ARM_D0_REGNUM;
12022 arm_insn_r->reg_rec_count = 1;
12023 }
12024 /* Handle VDUP instruction. */
12025 else
12026 {
12027 if (bit (arm_insn_r->arm_insn, 21))
12028 {
12029 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12030 record_buf[0] = reg_v + ARM_D0_REGNUM;
12031 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12032 arm_insn_r->reg_rec_count = 2;
12033 }
12034 else
12035 {
12036 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12037 record_buf[0] = reg_v + ARM_D0_REGNUM;
12038 arm_insn_r->reg_rec_count = 1;
12039 }
12040 }
12041 }
12042
12043 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12044 return 0;
12045 }
12046
12047 /* Record handler for extension register load/store instructions. */
12048
12049 static int
12050 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12051 {
12052 uint32_t opcode, single_reg;
12053 uint8_t op_vldm_vstm;
12054 uint32_t record_buf[8], record_buf_mem[128];
12055 ULONGEST u_regval = 0;
12056
12057 struct regcache *reg_cache = arm_insn_r->regcache;
12058 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12059
12060 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12061 single_reg = bit (arm_insn_r->arm_insn, 8);
12062 op_vldm_vstm = opcode & 0x1b;
12063
12064 /* Handle VMOV instructions. */
12065 if ((opcode & 0x1e) == 0x04)
12066 {
12067 if (bit (arm_insn_r->arm_insn, 4))
12068 {
12069 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12070 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12071 arm_insn_r->reg_rec_count = 2;
12072 }
12073 else
12074 {
12075 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12076 | bit (arm_insn_r->arm_insn, 5);
12077
12078 if (!single_reg)
12079 {
12080 record_buf[0] = num_regs + reg_m;
12081 record_buf[1] = num_regs + reg_m + 1;
12082 arm_insn_r->reg_rec_count = 2;
12083 }
12084 else
12085 {
12086 record_buf[0] = reg_m + ARM_D0_REGNUM;
12087 arm_insn_r->reg_rec_count = 1;
12088 }
12089 }
12090 }
12091 /* Handle VSTM and VPUSH instructions. */
12092 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12093 || op_vldm_vstm == 0x12)
12094 {
12095 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12096 uint32_t memory_index = 0;
12097
12098 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12099 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12100 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12101 imm_off32 = imm_off8 << 24;
12102 memory_count = imm_off8;
12103
12104 if (bit (arm_insn_r->arm_insn, 23))
12105 start_address = u_regval;
12106 else
12107 start_address = u_regval - imm_off32;
12108
12109 if (bit (arm_insn_r->arm_insn, 21))
12110 {
12111 record_buf[0] = reg_rn;
12112 arm_insn_r->reg_rec_count = 1;
12113 }
12114
12115 while (memory_count > 0)
12116 {
12117 if (!single_reg)
12118 {
12119 record_buf_mem[memory_index] = start_address;
12120 record_buf_mem[memory_index + 1] = 4;
12121 start_address = start_address + 4;
12122 memory_index = memory_index + 2;
12123 }
12124 else
12125 {
12126 record_buf_mem[memory_index] = start_address;
12127 record_buf_mem[memory_index + 1] = 4;
12128 record_buf_mem[memory_index + 2] = start_address + 4;
12129 record_buf_mem[memory_index + 3] = 4;
12130 start_address = start_address + 8;
12131 memory_index = memory_index + 4;
12132 }
12133 memory_count--;
12134 }
12135 arm_insn_r->mem_rec_count = (memory_index >> 1);
12136 }
12137 /* Handle VLDM instructions. */
12138 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12139 || op_vldm_vstm == 0x13)
12140 {
12141 uint32_t reg_count, reg_vd;
12142 uint32_t reg_index = 0;
12143
12144 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12145 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12146
12147 if (single_reg)
12148 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12149 else
12150 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12151
12152 if (bit (arm_insn_r->arm_insn, 21))
12153 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12154
12155 while (reg_count > 0)
12156 {
12157 if (single_reg)
12158 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12159 else
12160 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12161
12162 reg_count--;
12163 }
12164 arm_insn_r->reg_rec_count = reg_index;
12165 }
12166 /* VSTR Vector store register. */
12167 else if ((opcode & 0x13) == 0x10)
12168 {
12169 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12170 uint32_t memory_index = 0;
12171
12172 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12173 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12174 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12175 imm_off32 = imm_off8 << 24;
12176 memory_count = imm_off8;
12177
12178 if (bit (arm_insn_r->arm_insn, 23))
12179 start_address = u_regval + imm_off32;
12180 else
12181 start_address = u_regval - imm_off32;
12182
12183 if (single_reg)
12184 {
12185 record_buf_mem[memory_index] = start_address;
12186 record_buf_mem[memory_index + 1] = 4;
12187 arm_insn_r->mem_rec_count = 1;
12188 }
12189 else
12190 {
12191 record_buf_mem[memory_index] = start_address;
12192 record_buf_mem[memory_index + 1] = 4;
12193 record_buf_mem[memory_index + 2] = start_address + 4;
12194 record_buf_mem[memory_index + 3] = 4;
12195 arm_insn_r->mem_rec_count = 2;
12196 }
12197 }
12198 /* VLDR Vector load register. */
12199 else if ((opcode & 0x13) == 0x11)
12200 {
12201 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12202
12203 if (!single_reg)
12204 {
12205 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12206 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12207 }
12208 else
12209 {
12210 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12211 record_buf[0] = num_regs + reg_vd;
12212 }
12213 arm_insn_r->reg_rec_count = 1;
12214 }
12215
12216 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12217 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12218 return 0;
12219 }
12220
12221 /* Record handler for arm/thumb mode VFP data processing instructions. */
12222
12223 static int
12224 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12225 {
12226 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12227 uint32_t record_buf[4];
12228 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12229 enum insn_types curr_insn_type = INSN_INV;
12230
12231 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12232 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12233 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12234 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12235 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12236 bit_d = bit (arm_insn_r->arm_insn, 22);
12237 opc1 = opc1 & 0x04;
12238
12239 /* Handle VMLA, VMLS. */
12240 if (opc1 == 0x00)
12241 {
12242 if (bit (arm_insn_r->arm_insn, 10))
12243 {
12244 if (bit (arm_insn_r->arm_insn, 6))
12245 curr_insn_type = INSN_T0;
12246 else
12247 curr_insn_type = INSN_T1;
12248 }
12249 else
12250 {
12251 if (dp_op_sz)
12252 curr_insn_type = INSN_T1;
12253 else
12254 curr_insn_type = INSN_T2;
12255 }
12256 }
12257 /* Handle VNMLA, VNMLS, VNMUL. */
12258 else if (opc1 == 0x01)
12259 {
12260 if (dp_op_sz)
12261 curr_insn_type = INSN_T1;
12262 else
12263 curr_insn_type = INSN_T2;
12264 }
12265 /* Handle VMUL. */
12266 else if (opc1 == 0x02 && !(opc3 & 0x01))
12267 {
12268 if (bit (arm_insn_r->arm_insn, 10))
12269 {
12270 if (bit (arm_insn_r->arm_insn, 6))
12271 curr_insn_type = INSN_T0;
12272 else
12273 curr_insn_type = INSN_T1;
12274 }
12275 else
12276 {
12277 if (dp_op_sz)
12278 curr_insn_type = INSN_T1;
12279 else
12280 curr_insn_type = INSN_T2;
12281 }
12282 }
12283 /* Handle VADD, VSUB. */
12284 else if (opc1 == 0x03)
12285 {
12286 if (!bit (arm_insn_r->arm_insn, 9))
12287 {
12288 if (bit (arm_insn_r->arm_insn, 6))
12289 curr_insn_type = INSN_T0;
12290 else
12291 curr_insn_type = INSN_T1;
12292 }
12293 else
12294 {
12295 if (dp_op_sz)
12296 curr_insn_type = INSN_T1;
12297 else
12298 curr_insn_type = INSN_T2;
12299 }
12300 }
12301 /* Handle VDIV. */
12302 else if (opc1 == 0x0b)
12303 {
12304 if (dp_op_sz)
12305 curr_insn_type = INSN_T1;
12306 else
12307 curr_insn_type = INSN_T2;
12308 }
12309 /* Handle all other vfp data processing instructions. */
12310 else if (opc1 == 0x0b)
12311 {
12312 /* Handle VMOV. */
12313 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12314 {
12315 if (bit (arm_insn_r->arm_insn, 4))
12316 {
12317 if (bit (arm_insn_r->arm_insn, 6))
12318 curr_insn_type = INSN_T0;
12319 else
12320 curr_insn_type = INSN_T1;
12321 }
12322 else
12323 {
12324 if (dp_op_sz)
12325 curr_insn_type = INSN_T1;
12326 else
12327 curr_insn_type = INSN_T2;
12328 }
12329 }
12330 /* Handle VNEG and VABS. */
12331 else if ((opc2 == 0x01 && opc3 == 0x01)
12332 || (opc2 == 0x00 && opc3 == 0x03))
12333 {
12334 if (!bit (arm_insn_r->arm_insn, 11))
12335 {
12336 if (bit (arm_insn_r->arm_insn, 6))
12337 curr_insn_type = INSN_T0;
12338 else
12339 curr_insn_type = INSN_T1;
12340 }
12341 else
12342 {
12343 if (dp_op_sz)
12344 curr_insn_type = INSN_T1;
12345 else
12346 curr_insn_type = INSN_T2;
12347 }
12348 }
12349 /* Handle VSQRT. */
12350 else if (opc2 == 0x01 && opc3 == 0x03)
12351 {
12352 if (dp_op_sz)
12353 curr_insn_type = INSN_T1;
12354 else
12355 curr_insn_type = INSN_T2;
12356 }
12357 /* Handle VCVT. */
12358 else if (opc2 == 0x07 && opc3 == 0x03)
12359 {
12360 if (!dp_op_sz)
12361 curr_insn_type = INSN_T1;
12362 else
12363 curr_insn_type = INSN_T2;
12364 }
12365 else if (opc3 & 0x01)
12366 {
12367 /* Handle VCVT. */
12368 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12369 {
12370 if (!bit (arm_insn_r->arm_insn, 18))
12371 curr_insn_type = INSN_T2;
12372 else
12373 {
12374 if (dp_op_sz)
12375 curr_insn_type = INSN_T1;
12376 else
12377 curr_insn_type = INSN_T2;
12378 }
12379 }
12380 /* Handle VCVT. */
12381 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12382 {
12383 if (dp_op_sz)
12384 curr_insn_type = INSN_T1;
12385 else
12386 curr_insn_type = INSN_T2;
12387 }
12388 /* Handle VCVTB, VCVTT. */
12389 else if ((opc2 & 0x0e) == 0x02)
12390 curr_insn_type = INSN_T2;
12391 /* Handle VCMP, VCMPE. */
12392 else if ((opc2 & 0x0e) == 0x04)
12393 curr_insn_type = INSN_T3;
12394 }
12395 }
12396
12397 switch (curr_insn_type)
12398 {
12399 case INSN_T0:
12400 reg_vd = reg_vd | (bit_d << 4);
12401 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12402 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12403 arm_insn_r->reg_rec_count = 2;
12404 break;
12405
12406 case INSN_T1:
12407 reg_vd = reg_vd | (bit_d << 4);
12408 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12409 arm_insn_r->reg_rec_count = 1;
12410 break;
12411
12412 case INSN_T2:
12413 reg_vd = (reg_vd << 1) | bit_d;
12414 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12415 arm_insn_r->reg_rec_count = 1;
12416 break;
12417
12418 case INSN_T3:
12419 record_buf[0] = ARM_FPSCR_REGNUM;
12420 arm_insn_r->reg_rec_count = 1;
12421 break;
12422
12423 default:
12424 gdb_assert_not_reached ("no decoding pattern found");
12425 break;
12426 }
12427
12428 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12429 return 0;
12430 }
12431
12432 /* Handling opcode 110 insns. */
12433
12434 static int
12435 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12436 {
12437 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12438
12439 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12440 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12441 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12442
12443 if ((coproc & 0x0e) == 0x0a)
12444 {
12445 /* Handle extension register ld/st instructions. */
12446 if (!(op1 & 0x20))
12447 return arm_record_exreg_ld_st_insn (arm_insn_r);
12448
12449 /* 64-bit transfers between arm core and extension registers. */
12450 if ((op1 & 0x3e) == 0x04)
12451 return arm_record_exreg_ld_st_insn (arm_insn_r);
12452 }
12453 else
12454 {
12455 /* Handle coprocessor ld/st instructions. */
12456 if (!(op1 & 0x3a))
12457 {
12458 /* Store. */
12459 if (!op1_ebit)
12460 return arm_record_unsupported_insn (arm_insn_r);
12461 else
12462 /* Load. */
12463 return arm_record_unsupported_insn (arm_insn_r);
12464 }
12465
12466 /* Move to coprocessor from two arm core registers. */
12467 if (op1 == 0x4)
12468 return arm_record_unsupported_insn (arm_insn_r);
12469
12470 /* Move to two arm core registers from coprocessor. */
12471 if (op1 == 0x5)
12472 {
12473 uint32_t reg_t[2];
12474
12475 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12476 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12477 arm_insn_r->reg_rec_count = 2;
12478
12479 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12480 return 0;
12481 }
12482 }
12483 return arm_record_unsupported_insn (arm_insn_r);
12484 }
12485
12486 /* Handling opcode 111 insns. */
12487
12488 static int
12489 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12490 {
12491 uint32_t op, op1_sbit, op1_ebit, coproc;
12492 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12493 struct regcache *reg_cache = arm_insn_r->regcache;
12494 ULONGEST u_regval = 0;
12495
12496 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12497 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12498 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12499 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12500 op = bit (arm_insn_r->arm_insn, 4);
12501
12502 /* Handle arm SWI/SVC system call instructions. */
12503 if (op1_sbit)
12504 {
12505 if (tdep->arm_syscall_record != NULL)
12506 {
12507 ULONGEST svc_operand, svc_number;
12508
12509 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12510
12511 if (svc_operand) /* OABI. */
12512 svc_number = svc_operand - 0x900000;
12513 else /* EABI. */
12514 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12515
12516 return tdep->arm_syscall_record (reg_cache, svc_number);
12517 }
12518 else
12519 {
12520 printf_unfiltered (_("no syscall record support\n"));
12521 return -1;
12522 }
12523 }
12524
12525 if ((coproc & 0x0e) == 0x0a)
12526 {
12527 /* VFP data-processing instructions. */
12528 if (!op1_sbit && !op)
12529 return arm_record_vfp_data_proc_insn (arm_insn_r);
12530
12531 /* Advanced SIMD, VFP instructions. */
12532 if (!op1_sbit && op)
12533 return arm_record_vdata_transfer_insn (arm_insn_r);
12534 }
12535 else
12536 {
12537 /* Coprocessor data operations. */
12538 if (!op1_sbit && !op)
12539 return arm_record_unsupported_insn (arm_insn_r);
12540
12541 /* Move to Coprocessor from ARM core register. */
12542 if (!op1_sbit && !op1_ebit && op)
12543 return arm_record_unsupported_insn (arm_insn_r);
12544
12545 /* Move to arm core register from coprocessor. */
12546 if (!op1_sbit && op1_ebit && op)
12547 {
12548 uint32_t record_buf[1];
12549
12550 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12551 if (record_buf[0] == 15)
12552 record_buf[0] = ARM_PS_REGNUM;
12553
12554 arm_insn_r->reg_rec_count = 1;
12555 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12556 record_buf);
12557 return 0;
12558 }
12559 }
12560
12561 return arm_record_unsupported_insn (arm_insn_r);
12562 }
12563
12564 /* Handling opcode 000 insns. */
12565
12566 static int
12567 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12568 {
12569 uint32_t record_buf[8];
12570 uint32_t reg_src1 = 0;
12571
12572 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12573
12574 record_buf[0] = ARM_PS_REGNUM;
12575 record_buf[1] = reg_src1;
12576 thumb_insn_r->reg_rec_count = 2;
12577
12578 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12579
12580 return 0;
12581 }
12582
12583
12584 /* Handling opcode 001 insns. */
12585
12586 static int
12587 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12588 {
12589 uint32_t record_buf[8];
12590 uint32_t reg_src1 = 0;
12591
12592 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12593
12594 record_buf[0] = ARM_PS_REGNUM;
12595 record_buf[1] = reg_src1;
12596 thumb_insn_r->reg_rec_count = 2;
12597
12598 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12599
12600 return 0;
12601 }
12602
12603 /* Handling opcode 010 insns. */
12604
12605 static int
12606 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12607 {
12608 struct regcache *reg_cache = thumb_insn_r->regcache;
12609 uint32_t record_buf[8], record_buf_mem[8];
12610
12611 uint32_t reg_src1 = 0, reg_src2 = 0;
12612 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12613
12614 ULONGEST u_regval[2] = {0};
12615
12616 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12617
12618 if (bit (thumb_insn_r->arm_insn, 12))
12619 {
12620 /* Handle load/store register offset. */
12621 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12622 if (opcode2 >= 12 && opcode2 <= 15)
12623 {
12624 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12625 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12626 record_buf[0] = reg_src1;
12627 thumb_insn_r->reg_rec_count = 1;
12628 }
12629 else if (opcode2 >= 8 && opcode2 <= 10)
12630 {
12631 /* STR(2), STRB(2), STRH(2) . */
12632 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12633 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12634 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12635 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12636 if (8 == opcode2)
12637 record_buf_mem[0] = 4; /* STR (2). */
12638 else if (10 == opcode2)
12639 record_buf_mem[0] = 1; /* STRB (2). */
12640 else if (9 == opcode2)
12641 record_buf_mem[0] = 2; /* STRH (2). */
12642 record_buf_mem[1] = u_regval[0] + u_regval[1];
12643 thumb_insn_r->mem_rec_count = 1;
12644 }
12645 }
12646 else if (bit (thumb_insn_r->arm_insn, 11))
12647 {
12648 /* Handle load from literal pool. */
12649 /* LDR(3). */
12650 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12651 record_buf[0] = reg_src1;
12652 thumb_insn_r->reg_rec_count = 1;
12653 }
12654 else if (opcode1)
12655 {
12656 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12657 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12658 if ((3 == opcode2) && (!opcode3))
12659 {
12660 /* Branch with exchange. */
12661 record_buf[0] = ARM_PS_REGNUM;
12662 thumb_insn_r->reg_rec_count = 1;
12663 }
12664 else
12665 {
12666 /* Format 8; special data processing insns. */
12667 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12668 record_buf[0] = ARM_PS_REGNUM;
12669 record_buf[1] = reg_src1;
12670 thumb_insn_r->reg_rec_count = 2;
12671 }
12672 }
12673 else
12674 {
12675 /* Format 5; data processing insns. */
12676 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12677 if (bit (thumb_insn_r->arm_insn, 7))
12678 {
12679 reg_src1 = reg_src1 + 8;
12680 }
12681 record_buf[0] = ARM_PS_REGNUM;
12682 record_buf[1] = reg_src1;
12683 thumb_insn_r->reg_rec_count = 2;
12684 }
12685
12686 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12687 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12688 record_buf_mem);
12689
12690 return 0;
12691 }
12692
12693 /* Handling opcode 001 insns. */
12694
12695 static int
12696 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12697 {
12698 struct regcache *reg_cache = thumb_insn_r->regcache;
12699 uint32_t record_buf[8], record_buf_mem[8];
12700
12701 uint32_t reg_src1 = 0;
12702 uint32_t opcode = 0, immed_5 = 0;
12703
12704 ULONGEST u_regval = 0;
12705
12706 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12707
12708 if (opcode)
12709 {
12710 /* LDR(1). */
12711 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12712 record_buf[0] = reg_src1;
12713 thumb_insn_r->reg_rec_count = 1;
12714 }
12715 else
12716 {
12717 /* STR(1). */
12718 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12719 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12720 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12721 record_buf_mem[0] = 4;
12722 record_buf_mem[1] = u_regval + (immed_5 * 4);
12723 thumb_insn_r->mem_rec_count = 1;
12724 }
12725
12726 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12727 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12728 record_buf_mem);
12729
12730 return 0;
12731 }
12732
12733 /* Handling opcode 100 insns. */
12734
12735 static int
12736 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12737 {
12738 struct regcache *reg_cache = thumb_insn_r->regcache;
12739 uint32_t record_buf[8], record_buf_mem[8];
12740
12741 uint32_t reg_src1 = 0;
12742 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12743
12744 ULONGEST u_regval = 0;
12745
12746 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12747
12748 if (3 == opcode)
12749 {
12750 /* LDR(4). */
12751 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12752 record_buf[0] = reg_src1;
12753 thumb_insn_r->reg_rec_count = 1;
12754 }
12755 else if (1 == opcode)
12756 {
12757 /* LDRH(1). */
12758 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12759 record_buf[0] = reg_src1;
12760 thumb_insn_r->reg_rec_count = 1;
12761 }
12762 else if (2 == opcode)
12763 {
12764 /* STR(3). */
12765 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12766 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12767 record_buf_mem[0] = 4;
12768 record_buf_mem[1] = u_regval + (immed_8 * 4);
12769 thumb_insn_r->mem_rec_count = 1;
12770 }
12771 else if (0 == opcode)
12772 {
12773 /* STRH(1). */
12774 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12775 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12776 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12777 record_buf_mem[0] = 2;
12778 record_buf_mem[1] = u_regval + (immed_5 * 2);
12779 thumb_insn_r->mem_rec_count = 1;
12780 }
12781
12782 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12783 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12784 record_buf_mem);
12785
12786 return 0;
12787 }
12788
12789 /* Handling opcode 101 insns. */
12790
12791 static int
12792 thumb_record_misc (insn_decode_record *thumb_insn_r)
12793 {
12794 struct regcache *reg_cache = thumb_insn_r->regcache;
12795
12796 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12797 uint32_t register_bits = 0, register_count = 0;
12798 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12799 uint32_t record_buf[24], record_buf_mem[48];
12800 uint32_t reg_src1;
12801
12802 ULONGEST u_regval = 0;
12803
12804 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12805 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12806 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12807
12808 if (14 == opcode2)
12809 {
12810 /* POP. */
12811 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12812 while (register_bits)
12813 {
12814 if (register_bits & 0x00000001)
12815 record_buf[index++] = register_count;
12816 register_bits = register_bits >> 1;
12817 register_count++;
12818 }
12819 record_buf[index++] = ARM_PS_REGNUM;
12820 record_buf[index++] = ARM_SP_REGNUM;
12821 thumb_insn_r->reg_rec_count = index;
12822 }
12823 else if (10 == opcode2)
12824 {
12825 /* PUSH. */
12826 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12827 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12828 while (register_bits)
12829 {
12830 if (register_bits & 0x00000001)
12831 register_count++;
12832 register_bits = register_bits >> 1;
12833 }
12834 start_address = u_regval - \
12835 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12836 thumb_insn_r->mem_rec_count = register_count;
12837 while (register_count)
12838 {
12839 record_buf_mem[(register_count * 2) - 1] = start_address;
12840 record_buf_mem[(register_count * 2) - 2] = 4;
12841 start_address = start_address + 4;
12842 register_count--;
12843 }
12844 record_buf[0] = ARM_SP_REGNUM;
12845 thumb_insn_r->reg_rec_count = 1;
12846 }
12847 else if (0x1E == opcode1)
12848 {
12849 /* BKPT insn. */
12850 /* Handle enhanced software breakpoint insn, BKPT. */
12851 /* CPSR is changed to be executed in ARM state, disabling normal
12852 interrupts, entering abort mode. */
12853 /* According to high vector configuration PC is set. */
12854 /* User hits breakpoint and type reverse, in that case, we need to go back with
12855 previous CPSR and Program Counter. */
12856 record_buf[0] = ARM_PS_REGNUM;
12857 record_buf[1] = ARM_LR_REGNUM;
12858 thumb_insn_r->reg_rec_count = 2;
12859 /* We need to save SPSR value, which is not yet done. */
12860 printf_unfiltered (_("Process record does not support instruction "
12861 "0x%0x at address %s.\n"),
12862 thumb_insn_r->arm_insn,
12863 paddress (thumb_insn_r->gdbarch,
12864 thumb_insn_r->this_addr));
12865 return -1;
12866 }
12867 else if ((0 == opcode) || (1 == opcode))
12868 {
12869 /* ADD(5), ADD(6). */
12870 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12871 record_buf[0] = reg_src1;
12872 thumb_insn_r->reg_rec_count = 1;
12873 }
12874 else if (2 == opcode)
12875 {
12876 /* ADD(7), SUB(4). */
12877 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12878 record_buf[0] = ARM_SP_REGNUM;
12879 thumb_insn_r->reg_rec_count = 1;
12880 }
12881
12882 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12883 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12884 record_buf_mem);
12885
12886 return 0;
12887 }
12888
12889 /* Handling opcode 110 insns. */
12890
12891 static int
12892 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12893 {
12894 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12895 struct regcache *reg_cache = thumb_insn_r->regcache;
12896
12897 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12898 uint32_t reg_src1 = 0;
12899 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12900 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12901 uint32_t record_buf[24], record_buf_mem[48];
12902
12903 ULONGEST u_regval = 0;
12904
12905 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12906 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12907
12908 if (1 == opcode2)
12909 {
12910
12911 /* LDMIA. */
12912 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12913 /* Get Rn. */
12914 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12915 while (register_bits)
12916 {
12917 if (register_bits & 0x00000001)
12918 record_buf[index++] = register_count;
12919 register_bits = register_bits >> 1;
12920 register_count++;
12921 }
12922 record_buf[index++] = reg_src1;
12923 thumb_insn_r->reg_rec_count = index;
12924 }
12925 else if (0 == opcode2)
12926 {
12927 /* It handles both STMIA. */
12928 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12929 /* Get Rn. */
12930 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12931 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12932 while (register_bits)
12933 {
12934 if (register_bits & 0x00000001)
12935 register_count++;
12936 register_bits = register_bits >> 1;
12937 }
12938 start_address = u_regval;
12939 thumb_insn_r->mem_rec_count = register_count;
12940 while (register_count)
12941 {
12942 record_buf_mem[(register_count * 2) - 1] = start_address;
12943 record_buf_mem[(register_count * 2) - 2] = 4;
12944 start_address = start_address + 4;
12945 register_count--;
12946 }
12947 }
12948 else if (0x1F == opcode1)
12949 {
12950 /* Handle arm syscall insn. */
12951 if (tdep->arm_syscall_record != NULL)
12952 {
12953 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12954 ret = tdep->arm_syscall_record (reg_cache, u_regval);
12955 }
12956 else
12957 {
12958 printf_unfiltered (_("no syscall record support\n"));
12959 return -1;
12960 }
12961 }
12962
12963 /* B (1), conditional branch is automatically taken care in process_record,
12964 as PC is saved there. */
12965
12966 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12967 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12968 record_buf_mem);
12969
12970 return ret;
12971 }
12972
12973 /* Handling opcode 111 insns. */
12974
12975 static int
12976 thumb_record_branch (insn_decode_record *thumb_insn_r)
12977 {
12978 uint32_t record_buf[8];
12979 uint32_t bits_h = 0;
12980
12981 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
12982
12983 if (2 == bits_h || 3 == bits_h)
12984 {
12985 /* BL */
12986 record_buf[0] = ARM_LR_REGNUM;
12987 thumb_insn_r->reg_rec_count = 1;
12988 }
12989 else if (1 == bits_h)
12990 {
12991 /* BLX(1). */
12992 record_buf[0] = ARM_PS_REGNUM;
12993 record_buf[1] = ARM_LR_REGNUM;
12994 thumb_insn_r->reg_rec_count = 2;
12995 }
12996
12997 /* B(2) is automatically taken care in process_record, as PC is
12998 saved there. */
12999
13000 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13001
13002 return 0;
13003 }
13004
13005 /* Handler for thumb2 load/store multiple instructions. */
13006
13007 static int
13008 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13009 {
13010 struct regcache *reg_cache = thumb2_insn_r->regcache;
13011
13012 uint32_t reg_rn, op;
13013 uint32_t register_bits = 0, register_count = 0;
13014 uint32_t index = 0, start_address = 0;
13015 uint32_t record_buf[24], record_buf_mem[48];
13016
13017 ULONGEST u_regval = 0;
13018
13019 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13020 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13021
13022 if (0 == op || 3 == op)
13023 {
13024 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13025 {
13026 /* Handle RFE instruction. */
13027 record_buf[0] = ARM_PS_REGNUM;
13028 thumb2_insn_r->reg_rec_count = 1;
13029 }
13030 else
13031 {
13032 /* Handle SRS instruction after reading banked SP. */
13033 return arm_record_unsupported_insn (thumb2_insn_r);
13034 }
13035 }
13036 else if (1 == op || 2 == op)
13037 {
13038 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13039 {
13040 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13041 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13042 while (register_bits)
13043 {
13044 if (register_bits & 0x00000001)
13045 record_buf[index++] = register_count;
13046
13047 register_count++;
13048 register_bits = register_bits >> 1;
13049 }
13050 record_buf[index++] = reg_rn;
13051 record_buf[index++] = ARM_PS_REGNUM;
13052 thumb2_insn_r->reg_rec_count = index;
13053 }
13054 else
13055 {
13056 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13057 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13058 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13059 while (register_bits)
13060 {
13061 if (register_bits & 0x00000001)
13062 register_count++;
13063
13064 register_bits = register_bits >> 1;
13065 }
13066
13067 if (1 == op)
13068 {
13069 /* Start address calculation for LDMDB/LDMEA. */
13070 start_address = u_regval;
13071 }
13072 else if (2 == op)
13073 {
13074 /* Start address calculation for LDMDB/LDMEA. */
13075 start_address = u_regval - register_count * 4;
13076 }
13077
13078 thumb2_insn_r->mem_rec_count = register_count;
13079 while (register_count)
13080 {
13081 record_buf_mem[register_count * 2 - 1] = start_address;
13082 record_buf_mem[register_count * 2 - 2] = 4;
13083 start_address = start_address + 4;
13084 register_count--;
13085 }
13086 record_buf[0] = reg_rn;
13087 record_buf[1] = ARM_PS_REGNUM;
13088 thumb2_insn_r->reg_rec_count = 2;
13089 }
13090 }
13091
13092 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13093 record_buf_mem);
13094 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13095 record_buf);
13096 return ARM_RECORD_SUCCESS;
13097 }
13098
13099 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13100 instructions. */
13101
13102 static int
13103 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13104 {
13105 struct regcache *reg_cache = thumb2_insn_r->regcache;
13106
13107 uint32_t reg_rd, reg_rn, offset_imm;
13108 uint32_t reg_dest1, reg_dest2;
13109 uint32_t address, offset_addr;
13110 uint32_t record_buf[8], record_buf_mem[8];
13111 uint32_t op1, op2, op3;
13112 LONGEST s_word;
13113
13114 ULONGEST u_regval[2];
13115
13116 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13117 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13118 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13119
13120 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13121 {
13122 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13123 {
13124 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13125 record_buf[0] = reg_dest1;
13126 record_buf[1] = ARM_PS_REGNUM;
13127 thumb2_insn_r->reg_rec_count = 2;
13128 }
13129
13130 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13131 {
13132 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13133 record_buf[2] = reg_dest2;
13134 thumb2_insn_r->reg_rec_count = 3;
13135 }
13136 }
13137 else
13138 {
13139 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13140 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13141
13142 if (0 == op1 && 0 == op2)
13143 {
13144 /* Handle STREX. */
13145 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13146 address = u_regval[0] + (offset_imm * 4);
13147 record_buf_mem[0] = 4;
13148 record_buf_mem[1] = address;
13149 thumb2_insn_r->mem_rec_count = 1;
13150 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13151 record_buf[0] = reg_rd;
13152 thumb2_insn_r->reg_rec_count = 1;
13153 }
13154 else if (1 == op1 && 0 == op2)
13155 {
13156 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13157 record_buf[0] = reg_rd;
13158 thumb2_insn_r->reg_rec_count = 1;
13159 address = u_regval[0];
13160 record_buf_mem[1] = address;
13161
13162 if (4 == op3)
13163 {
13164 /* Handle STREXB. */
13165 record_buf_mem[0] = 1;
13166 thumb2_insn_r->mem_rec_count = 1;
13167 }
13168 else if (5 == op3)
13169 {
13170 /* Handle STREXH. */
13171 record_buf_mem[0] = 2 ;
13172 thumb2_insn_r->mem_rec_count = 1;
13173 }
13174 else if (7 == op3)
13175 {
13176 /* Handle STREXD. */
13177 address = u_regval[0];
13178 record_buf_mem[0] = 4;
13179 record_buf_mem[2] = 4;
13180 record_buf_mem[3] = address + 4;
13181 thumb2_insn_r->mem_rec_count = 2;
13182 }
13183 }
13184 else
13185 {
13186 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13187
13188 if (bit (thumb2_insn_r->arm_insn, 24))
13189 {
13190 if (bit (thumb2_insn_r->arm_insn, 23))
13191 offset_addr = u_regval[0] + (offset_imm * 4);
13192 else
13193 offset_addr = u_regval[0] - (offset_imm * 4);
13194
13195 address = offset_addr;
13196 }
13197 else
13198 address = u_regval[0];
13199
13200 record_buf_mem[0] = 4;
13201 record_buf_mem[1] = address;
13202 record_buf_mem[2] = 4;
13203 record_buf_mem[3] = address + 4;
13204 thumb2_insn_r->mem_rec_count = 2;
13205 record_buf[0] = reg_rn;
13206 thumb2_insn_r->reg_rec_count = 1;
13207 }
13208 }
13209
13210 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13211 record_buf);
13212 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13213 record_buf_mem);
13214 return ARM_RECORD_SUCCESS;
13215 }
13216
13217 /* Handler for thumb2 data processing (shift register and modified immediate)
13218 instructions. */
13219
13220 static int
13221 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13222 {
13223 uint32_t reg_rd, op;
13224 uint32_t record_buf[8];
13225
13226 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13227 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13228
13229 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13230 {
13231 record_buf[0] = ARM_PS_REGNUM;
13232 thumb2_insn_r->reg_rec_count = 1;
13233 }
13234 else
13235 {
13236 record_buf[0] = reg_rd;
13237 record_buf[1] = ARM_PS_REGNUM;
13238 thumb2_insn_r->reg_rec_count = 2;
13239 }
13240
13241 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13242 record_buf);
13243 return ARM_RECORD_SUCCESS;
13244 }
13245
13246 /* Generic handler for thumb2 instructions which effect destination and PS
13247 registers. */
13248
13249 static int
13250 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13251 {
13252 uint32_t reg_rd;
13253 uint32_t record_buf[8];
13254
13255 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13256
13257 record_buf[0] = reg_rd;
13258 record_buf[1] = ARM_PS_REGNUM;
13259 thumb2_insn_r->reg_rec_count = 2;
13260
13261 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13262 record_buf);
13263 return ARM_RECORD_SUCCESS;
13264 }
13265
13266 /* Handler for thumb2 branch and miscellaneous control instructions. */
13267
13268 static int
13269 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13270 {
13271 uint32_t op, op1, op2;
13272 uint32_t record_buf[8];
13273
13274 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13275 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13276 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13277
13278 /* Handle MSR insn. */
13279 if (!(op1 & 0x2) && 0x38 == op)
13280 {
13281 if (!(op2 & 0x3))
13282 {
13283 /* CPSR is going to be changed. */
13284 record_buf[0] = ARM_PS_REGNUM;
13285 thumb2_insn_r->reg_rec_count = 1;
13286 }
13287 else
13288 {
13289 arm_record_unsupported_insn(thumb2_insn_r);
13290 return -1;
13291 }
13292 }
13293 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13294 {
13295 /* BLX. */
13296 record_buf[0] = ARM_PS_REGNUM;
13297 record_buf[1] = ARM_LR_REGNUM;
13298 thumb2_insn_r->reg_rec_count = 2;
13299 }
13300
13301 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13302 record_buf);
13303 return ARM_RECORD_SUCCESS;
13304 }
13305
13306 /* Handler for thumb2 store single data item instructions. */
13307
13308 static int
13309 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13310 {
13311 struct regcache *reg_cache = thumb2_insn_r->regcache;
13312
13313 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13314 uint32_t address, offset_addr;
13315 uint32_t record_buf[8], record_buf_mem[8];
13316 uint32_t op1, op2;
13317
13318 ULONGEST u_regval[2];
13319
13320 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13321 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13322 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13323 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13324
13325 if (bit (thumb2_insn_r->arm_insn, 23))
13326 {
13327 /* T2 encoding. */
13328 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13329 offset_addr = u_regval[0] + offset_imm;
13330 address = offset_addr;
13331 }
13332 else
13333 {
13334 /* T3 encoding. */
13335 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13336 {
13337 /* Handle STRB (register). */
13338 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13339 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13340 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13341 offset_addr = u_regval[1] << shift_imm;
13342 address = u_regval[0] + offset_addr;
13343 }
13344 else
13345 {
13346 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13347 if (bit (thumb2_insn_r->arm_insn, 10))
13348 {
13349 if (bit (thumb2_insn_r->arm_insn, 9))
13350 offset_addr = u_regval[0] + offset_imm;
13351 else
13352 offset_addr = u_regval[0] - offset_imm;
13353
13354 address = offset_addr;
13355 }
13356 else
13357 address = u_regval[0];
13358 }
13359 }
13360
13361 switch (op1)
13362 {
13363 /* Store byte instructions. */
13364 case 4:
13365 case 0:
13366 record_buf_mem[0] = 1;
13367 break;
13368 /* Store half word instructions. */
13369 case 1:
13370 case 5:
13371 record_buf_mem[0] = 2;
13372 break;
13373 /* Store word instructions. */
13374 case 2:
13375 case 6:
13376 record_buf_mem[0] = 4;
13377 break;
13378
13379 default:
13380 gdb_assert_not_reached ("no decoding pattern found");
13381 break;
13382 }
13383
13384 record_buf_mem[1] = address;
13385 thumb2_insn_r->mem_rec_count = 1;
13386 record_buf[0] = reg_rn;
13387 thumb2_insn_r->reg_rec_count = 1;
13388
13389 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13390 record_buf);
13391 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13392 record_buf_mem);
13393 return ARM_RECORD_SUCCESS;
13394 }
13395
13396 /* Handler for thumb2 load memory hints instructions. */
13397
13398 static int
13399 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13400 {
13401 uint32_t record_buf[8];
13402 uint32_t reg_rt, reg_rn;
13403
13404 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13405 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13406
13407 if (ARM_PC_REGNUM != reg_rt)
13408 {
13409 record_buf[0] = reg_rt;
13410 record_buf[1] = reg_rn;
13411 record_buf[2] = ARM_PS_REGNUM;
13412 thumb2_insn_r->reg_rec_count = 3;
13413
13414 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13415 record_buf);
13416 return ARM_RECORD_SUCCESS;
13417 }
13418
13419 return ARM_RECORD_FAILURE;
13420 }
13421
13422 /* Handler for thumb2 load word instructions. */
13423
13424 static int
13425 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13426 {
13427 uint32_t opcode1 = 0, opcode2 = 0;
13428 uint32_t record_buf[8];
13429
13430 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13431 record_buf[1] = ARM_PS_REGNUM;
13432 thumb2_insn_r->reg_rec_count = 2;
13433
13434 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13435 record_buf);
13436 return ARM_RECORD_SUCCESS;
13437 }
13438
13439 /* Handler for thumb2 long multiply, long multiply accumulate, and
13440 divide instructions. */
13441
13442 static int
13443 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13444 {
13445 uint32_t opcode1 = 0, opcode2 = 0;
13446 uint32_t record_buf[8];
13447 uint32_t reg_src1 = 0;
13448
13449 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13450 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13451
13452 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13453 {
13454 /* Handle SMULL, UMULL, SMULAL. */
13455 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13456 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13457 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13458 record_buf[2] = ARM_PS_REGNUM;
13459 thumb2_insn_r->reg_rec_count = 3;
13460 }
13461 else if (1 == opcode1 || 3 == opcode2)
13462 {
13463 /* Handle SDIV and UDIV. */
13464 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13465 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13466 record_buf[2] = ARM_PS_REGNUM;
13467 thumb2_insn_r->reg_rec_count = 3;
13468 }
13469 else
13470 return ARM_RECORD_FAILURE;
13471
13472 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13473 record_buf);
13474 return ARM_RECORD_SUCCESS;
13475 }
13476
13477 /* Record handler for thumb32 coprocessor instructions. */
13478
13479 static int
13480 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13481 {
13482 if (bit (thumb2_insn_r->arm_insn, 25))
13483 return arm_record_coproc_data_proc (thumb2_insn_r);
13484 else
13485 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13486 }
13487
13488 /* Record handler for advance SIMD structure load/store instructions. */
13489
13490 static int
13491 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13492 {
13493 struct regcache *reg_cache = thumb2_insn_r->regcache;
13494 uint32_t l_bit, a_bit, b_bits;
13495 uint32_t record_buf[128], record_buf_mem[128];
13496 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13497 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13498 uint8_t f_ebytes;
13499
13500 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13501 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13502 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13503 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13504 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13505 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13506 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13507 f_esize = 8 * f_ebytes;
13508 f_elem = 8 / f_ebytes;
13509
13510 if (!l_bit)
13511 {
13512 ULONGEST u_regval = 0;
13513 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13514 address = u_regval;
13515
13516 if (!a_bit)
13517 {
13518 /* Handle VST1. */
13519 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13520 {
13521 if (b_bits == 0x07)
13522 bf_regs = 1;
13523 else if (b_bits == 0x0a)
13524 bf_regs = 2;
13525 else if (b_bits == 0x06)
13526 bf_regs = 3;
13527 else if (b_bits == 0x02)
13528 bf_regs = 4;
13529 else
13530 bf_regs = 0;
13531
13532 for (index_r = 0; index_r < bf_regs; index_r++)
13533 {
13534 for (index_e = 0; index_e < f_elem; index_e++)
13535 {
13536 record_buf_mem[index_m++] = f_ebytes;
13537 record_buf_mem[index_m++] = address;
13538 address = address + f_ebytes;
13539 thumb2_insn_r->mem_rec_count += 1;
13540 }
13541 }
13542 }
13543 /* Handle VST2. */
13544 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13545 {
13546 if (b_bits == 0x09 || b_bits == 0x08)
13547 bf_regs = 1;
13548 else if (b_bits == 0x03)
13549 bf_regs = 2;
13550 else
13551 bf_regs = 0;
13552
13553 for (index_r = 0; index_r < bf_regs; index_r++)
13554 for (index_e = 0; index_e < f_elem; index_e++)
13555 {
13556 for (loop_t = 0; loop_t < 2; loop_t++)
13557 {
13558 record_buf_mem[index_m++] = f_ebytes;
13559 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13560 thumb2_insn_r->mem_rec_count += 1;
13561 }
13562 address = address + (2 * f_ebytes);
13563 }
13564 }
13565 /* Handle VST3. */
13566 else if ((b_bits & 0x0e) == 0x04)
13567 {
13568 for (index_e = 0; index_e < f_elem; index_e++)
13569 {
13570 for (loop_t = 0; loop_t < 3; loop_t++)
13571 {
13572 record_buf_mem[index_m++] = f_ebytes;
13573 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13574 thumb2_insn_r->mem_rec_count += 1;
13575 }
13576 address = address + (3 * f_ebytes);
13577 }
13578 }
13579 /* Handle VST4. */
13580 else if (!(b_bits & 0x0e))
13581 {
13582 for (index_e = 0; index_e < f_elem; index_e++)
13583 {
13584 for (loop_t = 0; loop_t < 4; loop_t++)
13585 {
13586 record_buf_mem[index_m++] = f_ebytes;
13587 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13588 thumb2_insn_r->mem_rec_count += 1;
13589 }
13590 address = address + (4 * f_ebytes);
13591 }
13592 }
13593 }
13594 else
13595 {
13596 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13597
13598 if (bft_size == 0x00)
13599 f_ebytes = 1;
13600 else if (bft_size == 0x01)
13601 f_ebytes = 2;
13602 else if (bft_size == 0x02)
13603 f_ebytes = 4;
13604 else
13605 f_ebytes = 0;
13606
13607 /* Handle VST1. */
13608 if (!(b_bits & 0x0b) || b_bits == 0x08)
13609 thumb2_insn_r->mem_rec_count = 1;
13610 /* Handle VST2. */
13611 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13612 thumb2_insn_r->mem_rec_count = 2;
13613 /* Handle VST3. */
13614 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13615 thumb2_insn_r->mem_rec_count = 3;
13616 /* Handle VST4. */
13617 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13618 thumb2_insn_r->mem_rec_count = 4;
13619
13620 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13621 {
13622 record_buf_mem[index_m] = f_ebytes;
13623 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13624 }
13625 }
13626 }
13627 else
13628 {
13629 if (!a_bit)
13630 {
13631 /* Handle VLD1. */
13632 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13633 thumb2_insn_r->reg_rec_count = 1;
13634 /* Handle VLD2. */
13635 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13636 thumb2_insn_r->reg_rec_count = 2;
13637 /* Handle VLD3. */
13638 else if ((b_bits & 0x0e) == 0x04)
13639 thumb2_insn_r->reg_rec_count = 3;
13640 /* Handle VLD4. */
13641 else if (!(b_bits & 0x0e))
13642 thumb2_insn_r->reg_rec_count = 4;
13643 }
13644 else
13645 {
13646 /* Handle VLD1. */
13647 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13648 thumb2_insn_r->reg_rec_count = 1;
13649 /* Handle VLD2. */
13650 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13651 thumb2_insn_r->reg_rec_count = 2;
13652 /* Handle VLD3. */
13653 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13654 thumb2_insn_r->reg_rec_count = 3;
13655 /* Handle VLD4. */
13656 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13657 thumb2_insn_r->reg_rec_count = 4;
13658
13659 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13660 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13661 }
13662 }
13663
13664 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13665 {
13666 record_buf[index_r] = reg_rn;
13667 thumb2_insn_r->reg_rec_count += 1;
13668 }
13669
13670 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13671 record_buf);
13672 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13673 record_buf_mem);
13674 return 0;
13675 }
13676
13677 /* Decodes thumb2 instruction type and invokes its record handler. */
13678
13679 static unsigned int
13680 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13681 {
13682 uint32_t op, op1, op2;
13683
13684 op = bit (thumb2_insn_r->arm_insn, 15);
13685 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13686 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13687
13688 if (op1 == 0x01)
13689 {
13690 if (!(op2 & 0x64 ))
13691 {
13692 /* Load/store multiple instruction. */
13693 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13694 }
13695 else if (!((op2 & 0x64) ^ 0x04))
13696 {
13697 /* Load/store (dual/exclusive) and table branch instruction. */
13698 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13699 }
13700 else if (!((op2 & 0x20) ^ 0x20))
13701 {
13702 /* Data-processing (shifted register). */
13703 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13704 }
13705 else if (op2 & 0x40)
13706 {
13707 /* Co-processor instructions. */
13708 return thumb2_record_coproc_insn (thumb2_insn_r);
13709 }
13710 }
13711 else if (op1 == 0x02)
13712 {
13713 if (op)
13714 {
13715 /* Branches and miscellaneous control instructions. */
13716 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13717 }
13718 else if (op2 & 0x20)
13719 {
13720 /* Data-processing (plain binary immediate) instruction. */
13721 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13722 }
13723 else
13724 {
13725 /* Data-processing (modified immediate). */
13726 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13727 }
13728 }
13729 else if (op1 == 0x03)
13730 {
13731 if (!(op2 & 0x71 ))
13732 {
13733 /* Store single data item. */
13734 return thumb2_record_str_single_data (thumb2_insn_r);
13735 }
13736 else if (!((op2 & 0x71) ^ 0x10))
13737 {
13738 /* Advanced SIMD or structure load/store instructions. */
13739 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13740 }
13741 else if (!((op2 & 0x67) ^ 0x01))
13742 {
13743 /* Load byte, memory hints instruction. */
13744 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13745 }
13746 else if (!((op2 & 0x67) ^ 0x03))
13747 {
13748 /* Load halfword, memory hints instruction. */
13749 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13750 }
13751 else if (!((op2 & 0x67) ^ 0x05))
13752 {
13753 /* Load word instruction. */
13754 return thumb2_record_ld_word (thumb2_insn_r);
13755 }
13756 else if (!((op2 & 0x70) ^ 0x20))
13757 {
13758 /* Data-processing (register) instruction. */
13759 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13760 }
13761 else if (!((op2 & 0x78) ^ 0x30))
13762 {
13763 /* Multiply, multiply accumulate, abs diff instruction. */
13764 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13765 }
13766 else if (!((op2 & 0x78) ^ 0x38))
13767 {
13768 /* Long multiply, long multiply accumulate, and divide. */
13769 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13770 }
13771 else if (op2 & 0x40)
13772 {
13773 /* Co-processor instructions. */
13774 return thumb2_record_coproc_insn (thumb2_insn_r);
13775 }
13776 }
13777
13778 return -1;
13779 }
13780
13781 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13782 and positive val on fauilure. */
13783
13784 static int
13785 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13786 {
13787 gdb_byte buf[insn_size];
13788
13789 memset (&buf[0], 0, insn_size);
13790
13791 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13792 return 1;
13793 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13794 insn_size,
13795 gdbarch_byte_order_for_code (insn_record->gdbarch));
13796 return 0;
13797 }
13798
13799 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13800
13801 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13802 dispatch it. */
13803
13804 static int
13805 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13806 uint32_t insn_size)
13807 {
13808
13809 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13810 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13811 {
13812 arm_record_data_proc_misc_ld_str, /* 000. */
13813 arm_record_data_proc_imm, /* 001. */
13814 arm_record_ld_st_imm_offset, /* 010. */
13815 arm_record_ld_st_reg_offset, /* 011. */
13816 arm_record_ld_st_multiple, /* 100. */
13817 arm_record_b_bl, /* 101. */
13818 arm_record_asimd_vfp_coproc, /* 110. */
13819 arm_record_coproc_data_proc /* 111. */
13820 };
13821
13822 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13823 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13824 { \
13825 thumb_record_shift_add_sub, /* 000. */
13826 thumb_record_add_sub_cmp_mov, /* 001. */
13827 thumb_record_ld_st_reg_offset, /* 010. */
13828 thumb_record_ld_st_imm_offset, /* 011. */
13829 thumb_record_ld_st_stack, /* 100. */
13830 thumb_record_misc, /* 101. */
13831 thumb_record_ldm_stm_swi, /* 110. */
13832 thumb_record_branch /* 111. */
13833 };
13834
13835 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13836 uint32_t insn_id = 0;
13837
13838 if (extract_arm_insn (arm_record, insn_size))
13839 {
13840 if (record_debug)
13841 {
13842 printf_unfiltered (_("Process record: error reading memory at "
13843 "addr %s len = %d.\n"),
13844 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13845 }
13846 return -1;
13847 }
13848 else if (ARM_RECORD == record_type)
13849 {
13850 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13851 insn_id = bits (arm_record->arm_insn, 25, 27);
13852 ret = arm_record_extension_space (arm_record);
13853 /* If this insn has fallen into extension space
13854 then we need not decode it anymore. */
13855 if (ret != -1 && !INSN_RECORDED(arm_record))
13856 {
13857 ret = arm_handle_insn[insn_id] (arm_record);
13858 }
13859 }
13860 else if (THUMB_RECORD == record_type)
13861 {
13862 /* As thumb does not have condition codes, we set negative. */
13863 arm_record->cond = -1;
13864 insn_id = bits (arm_record->arm_insn, 13, 15);
13865 ret = thumb_handle_insn[insn_id] (arm_record);
13866 }
13867 else if (THUMB2_RECORD == record_type)
13868 {
13869 /* As thumb does not have condition codes, we set negative. */
13870 arm_record->cond = -1;
13871
13872 /* Swap first half of 32bit thumb instruction with second half. */
13873 arm_record->arm_insn
13874 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13875
13876 insn_id = thumb2_record_decode_insn_handler (arm_record);
13877
13878 if (insn_id != ARM_RECORD_SUCCESS)
13879 {
13880 arm_record_unsupported_insn (arm_record);
13881 ret = -1;
13882 }
13883 }
13884 else
13885 {
13886 /* Throw assertion. */
13887 gdb_assert_not_reached ("not a valid instruction, could not decode");
13888 }
13889
13890 return ret;
13891 }
13892
13893
13894 /* Cleans up local record registers and memory allocations. */
13895
13896 static void
13897 deallocate_reg_mem (insn_decode_record *record)
13898 {
13899 xfree (record->arm_regs);
13900 xfree (record->arm_mems);
13901 }
13902
13903
13904 /* Parse the current instruction and record the values of the registers and
13905 memory that will be changed in current instruction to record_arch_list".
13906 Return -1 if something is wrong. */
13907
13908 int
13909 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13910 CORE_ADDR insn_addr)
13911 {
13912
13913 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13914 uint32_t no_of_rec = 0;
13915 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13916 ULONGEST t_bit = 0, insn_id = 0;
13917
13918 ULONGEST u_regval = 0;
13919
13920 insn_decode_record arm_record;
13921
13922 memset (&arm_record, 0, sizeof (insn_decode_record));
13923 arm_record.regcache = regcache;
13924 arm_record.this_addr = insn_addr;
13925 arm_record.gdbarch = gdbarch;
13926
13927
13928 if (record_debug > 1)
13929 {
13930 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13931 "addr = %s\n",
13932 paddress (gdbarch, arm_record.this_addr));
13933 }
13934
13935 if (extract_arm_insn (&arm_record, 2))
13936 {
13937 if (record_debug)
13938 {
13939 printf_unfiltered (_("Process record: error reading memory at "
13940 "addr %s len = %d.\n"),
13941 paddress (arm_record.gdbarch,
13942 arm_record.this_addr), 2);
13943 }
13944 return -1;
13945 }
13946
13947 /* Check the insn, whether it is thumb or arm one. */
13948
13949 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13950 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13951
13952
13953 if (!(u_regval & t_bit))
13954 {
13955 /* We are decoding arm insn. */
13956 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
13957 }
13958 else
13959 {
13960 insn_id = bits (arm_record.arm_insn, 11, 15);
13961 /* is it thumb2 insn? */
13962 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
13963 {
13964 ret = decode_insn (&arm_record, THUMB2_RECORD,
13965 THUMB2_INSN_SIZE_BYTES);
13966 }
13967 else
13968 {
13969 /* We are decoding thumb insn. */
13970 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
13971 }
13972 }
13973
13974 if (0 == ret)
13975 {
13976 /* Record registers. */
13977 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
13978 if (arm_record.arm_regs)
13979 {
13980 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
13981 {
13982 if (record_full_arch_list_add_reg
13983 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
13984 ret = -1;
13985 }
13986 }
13987 /* Record memories. */
13988 if (arm_record.arm_mems)
13989 {
13990 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
13991 {
13992 if (record_full_arch_list_add_mem
13993 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
13994 arm_record.arm_mems[no_of_rec].len))
13995 ret = -1;
13996 }
13997 }
13998
13999 if (record_full_arch_list_add_end ())
14000 ret = -1;
14001 }
14002
14003
14004 deallocate_reg_mem (&arm_record);
14005
14006 return ret;
14007 }
14008