]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arm-tdep.c
Fix bug in arm_push_dummy_call by -fsanitize=address
[thirdparty/binutils-gdb.git] / gdb / arm-tdep.c
1 /* Common target dependent code for GDB on ARM systems.
2
3 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "defs.h"
21
22 #include <ctype.h> /* XXX for isupper (). */
23
24 #include "frame.h"
25 #include "inferior.h"
26 #include "infrun.h"
27 #include "gdbcmd.h"
28 #include "gdbcore.h"
29 #include "dis-asm.h" /* For register styles. */
30 #include "regcache.h"
31 #include "reggroups.h"
32 #include "doublest.h"
33 #include "value.h"
34 #include "arch-utils.h"
35 #include "osabi.h"
36 #include "frame-unwind.h"
37 #include "frame-base.h"
38 #include "trad-frame.h"
39 #include "objfiles.h"
40 #include "dwarf2-frame.h"
41 #include "gdbtypes.h"
42 #include "prologue-value.h"
43 #include "remote.h"
44 #include "target-descriptions.h"
45 #include "user-regs.h"
46 #include "observer.h"
47
48 #include "arch/arm.h"
49 #include "arm-tdep.h"
50 #include "gdb/sim-arm.h"
51
52 #include "elf-bfd.h"
53 #include "coff/internal.h"
54 #include "elf/arm.h"
55
56 #include "vec.h"
57
58 #include "record.h"
59 #include "record-full.h"
60
61 #include "features/arm-with-m.c"
62 #include "features/arm-with-m-fpa-layout.c"
63 #include "features/arm-with-m-vfp-d16.c"
64 #include "features/arm-with-iwmmxt.c"
65 #include "features/arm-with-vfpv2.c"
66 #include "features/arm-with-vfpv3.c"
67 #include "features/arm-with-neon.c"
68
69 static int arm_debug;
70
71 /* Macros for setting and testing a bit in a minimal symbol that marks
72 it as Thumb function. The MSB of the minimal symbol's "info" field
73 is used for this purpose.
74
75 MSYMBOL_SET_SPECIAL Actually sets the "special" bit.
76 MSYMBOL_IS_SPECIAL Tests the "special" bit in a minimal symbol. */
77
78 #define MSYMBOL_SET_SPECIAL(msym) \
79 MSYMBOL_TARGET_FLAG_1 (msym) = 1
80
81 #define MSYMBOL_IS_SPECIAL(msym) \
82 MSYMBOL_TARGET_FLAG_1 (msym)
83
84 /* Per-objfile data used for mapping symbols. */
85 static const struct objfile_data *arm_objfile_data_key;
86
87 struct arm_mapping_symbol
88 {
89 bfd_vma value;
90 char type;
91 };
92 typedef struct arm_mapping_symbol arm_mapping_symbol_s;
93 DEF_VEC_O(arm_mapping_symbol_s);
94
95 struct arm_per_objfile
96 {
97 VEC(arm_mapping_symbol_s) **section_maps;
98 };
99
100 /* The list of available "set arm ..." and "show arm ..." commands. */
101 static struct cmd_list_element *setarmcmdlist = NULL;
102 static struct cmd_list_element *showarmcmdlist = NULL;
103
104 /* The type of floating-point to use. Keep this in sync with enum
105 arm_float_model, and the help string in _initialize_arm_tdep. */
106 static const char *const fp_model_strings[] =
107 {
108 "auto",
109 "softfpa",
110 "fpa",
111 "softvfp",
112 "vfp",
113 NULL
114 };
115
116 /* A variable that can be configured by the user. */
117 static enum arm_float_model arm_fp_model = ARM_FLOAT_AUTO;
118 static const char *current_fp_model = "auto";
119
120 /* The ABI to use. Keep this in sync with arm_abi_kind. */
121 static const char *const arm_abi_strings[] =
122 {
123 "auto",
124 "APCS",
125 "AAPCS",
126 NULL
127 };
128
129 /* A variable that can be configured by the user. */
130 static enum arm_abi_kind arm_abi_global = ARM_ABI_AUTO;
131 static const char *arm_abi_string = "auto";
132
133 /* The execution mode to assume. */
134 static const char *const arm_mode_strings[] =
135 {
136 "auto",
137 "arm",
138 "thumb",
139 NULL
140 };
141
142 static const char *arm_fallback_mode_string = "auto";
143 static const char *arm_force_mode_string = "auto";
144
145 /* Internal override of the execution mode. -1 means no override,
146 0 means override to ARM mode, 1 means override to Thumb mode.
147 The effect is the same as if arm_force_mode has been set by the
148 user (except the internal override has precedence over a user's
149 arm_force_mode override). */
150 static int arm_override_mode = -1;
151
152 /* Number of different reg name sets (options). */
153 static int num_disassembly_options;
154
155 /* The standard register names, and all the valid aliases for them. Note
156 that `fp', `sp' and `pc' are not added in this alias list, because they
157 have been added as builtin user registers in
158 std-regs.c:_initialize_frame_reg. */
159 static const struct
160 {
161 const char *name;
162 int regnum;
163 } arm_register_aliases[] = {
164 /* Basic register numbers. */
165 { "r0", 0 },
166 { "r1", 1 },
167 { "r2", 2 },
168 { "r3", 3 },
169 { "r4", 4 },
170 { "r5", 5 },
171 { "r6", 6 },
172 { "r7", 7 },
173 { "r8", 8 },
174 { "r9", 9 },
175 { "r10", 10 },
176 { "r11", 11 },
177 { "r12", 12 },
178 { "r13", 13 },
179 { "r14", 14 },
180 { "r15", 15 },
181 /* Synonyms (argument and variable registers). */
182 { "a1", 0 },
183 { "a2", 1 },
184 { "a3", 2 },
185 { "a4", 3 },
186 { "v1", 4 },
187 { "v2", 5 },
188 { "v3", 6 },
189 { "v4", 7 },
190 { "v5", 8 },
191 { "v6", 9 },
192 { "v7", 10 },
193 { "v8", 11 },
194 /* Other platform-specific names for r9. */
195 { "sb", 9 },
196 { "tr", 9 },
197 /* Special names. */
198 { "ip", 12 },
199 { "lr", 14 },
200 /* Names used by GCC (not listed in the ARM EABI). */
201 { "sl", 10 },
202 /* A special name from the older ATPCS. */
203 { "wr", 7 },
204 };
205
206 static const char *const arm_register_names[] =
207 {"r0", "r1", "r2", "r3", /* 0 1 2 3 */
208 "r4", "r5", "r6", "r7", /* 4 5 6 7 */
209 "r8", "r9", "r10", "r11", /* 8 9 10 11 */
210 "r12", "sp", "lr", "pc", /* 12 13 14 15 */
211 "f0", "f1", "f2", "f3", /* 16 17 18 19 */
212 "f4", "f5", "f6", "f7", /* 20 21 22 23 */
213 "fps", "cpsr" }; /* 24 25 */
214
215 /* Valid register name styles. */
216 static const char **valid_disassembly_styles;
217
218 /* Disassembly style to use. Default to "std" register names. */
219 static const char *disassembly_style;
220
221 /* This is used to keep the bfd arch_info in sync with the disassembly
222 style. */
223 static void set_disassembly_style_sfunc(char *, int,
224 struct cmd_list_element *);
225 static void set_disassembly_style (void);
226
227 static void convert_from_extended (const struct floatformat *, const void *,
228 void *, int);
229 static void convert_to_extended (const struct floatformat *, void *,
230 const void *, int);
231
232 static enum register_status arm_neon_quad_read (struct gdbarch *gdbarch,
233 struct regcache *regcache,
234 int regnum, gdb_byte *buf);
235 static void arm_neon_quad_write (struct gdbarch *gdbarch,
236 struct regcache *regcache,
237 int regnum, const gdb_byte *buf);
238
239 struct arm_prologue_cache
240 {
241 /* The stack pointer at the time this frame was created; i.e. the
242 caller's stack pointer when this function was called. It is used
243 to identify this frame. */
244 CORE_ADDR prev_sp;
245
246 /* The frame base for this frame is just prev_sp - frame size.
247 FRAMESIZE is the distance from the frame pointer to the
248 initial stack pointer. */
249
250 int framesize;
251
252 /* The register used to hold the frame pointer for this frame. */
253 int framereg;
254
255 /* Saved register offsets. */
256 struct trad_frame_saved_reg *saved_regs;
257 };
258
259 static CORE_ADDR arm_analyze_prologue (struct gdbarch *gdbarch,
260 CORE_ADDR prologue_start,
261 CORE_ADDR prologue_end,
262 struct arm_prologue_cache *cache);
263
264 /* Architecture version for displaced stepping. This effects the behaviour of
265 certain instructions, and really should not be hard-wired. */
266
267 #define DISPLACED_STEPPING_ARCH_VERSION 5
268
269 /* Set to true if the 32-bit mode is in use. */
270
271 int arm_apcs_32 = 1;
272
273 /* Return the bit mask in ARM_PS_REGNUM that indicates Thumb mode. */
274
275 int
276 arm_psr_thumb_bit (struct gdbarch *gdbarch)
277 {
278 if (gdbarch_tdep (gdbarch)->is_m)
279 return XPSR_T;
280 else
281 return CPSR_T;
282 }
283
284 /* Determine if FRAME is executing in Thumb mode. */
285
286 int
287 arm_frame_is_thumb (struct frame_info *frame)
288 {
289 CORE_ADDR cpsr;
290 ULONGEST t_bit = arm_psr_thumb_bit (get_frame_arch (frame));
291
292 /* Every ARM frame unwinder can unwind the T bit of the CPSR, either
293 directly (from a signal frame or dummy frame) or by interpreting
294 the saved LR (from a prologue or DWARF frame). So consult it and
295 trust the unwinders. */
296 cpsr = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
297
298 return (cpsr & t_bit) != 0;
299 }
300
301 /* Callback for VEC_lower_bound. */
302
303 static inline int
304 arm_compare_mapping_symbols (const struct arm_mapping_symbol *lhs,
305 const struct arm_mapping_symbol *rhs)
306 {
307 return lhs->value < rhs->value;
308 }
309
310 /* Search for the mapping symbol covering MEMADDR. If one is found,
311 return its type. Otherwise, return 0. If START is non-NULL,
312 set *START to the location of the mapping symbol. */
313
314 static char
315 arm_find_mapping_symbol (CORE_ADDR memaddr, CORE_ADDR *start)
316 {
317 struct obj_section *sec;
318
319 /* If there are mapping symbols, consult them. */
320 sec = find_pc_section (memaddr);
321 if (sec != NULL)
322 {
323 struct arm_per_objfile *data;
324 VEC(arm_mapping_symbol_s) *map;
325 struct arm_mapping_symbol map_key = { memaddr - obj_section_addr (sec),
326 0 };
327 unsigned int idx;
328
329 data = (struct arm_per_objfile *) objfile_data (sec->objfile,
330 arm_objfile_data_key);
331 if (data != NULL)
332 {
333 map = data->section_maps[sec->the_bfd_section->index];
334 if (!VEC_empty (arm_mapping_symbol_s, map))
335 {
336 struct arm_mapping_symbol *map_sym;
337
338 idx = VEC_lower_bound (arm_mapping_symbol_s, map, &map_key,
339 arm_compare_mapping_symbols);
340
341 /* VEC_lower_bound finds the earliest ordered insertion
342 point. If the following symbol starts at this exact
343 address, we use that; otherwise, the preceding
344 mapping symbol covers this address. */
345 if (idx < VEC_length (arm_mapping_symbol_s, map))
346 {
347 map_sym = VEC_index (arm_mapping_symbol_s, map, idx);
348 if (map_sym->value == map_key.value)
349 {
350 if (start)
351 *start = map_sym->value + obj_section_addr (sec);
352 return map_sym->type;
353 }
354 }
355
356 if (idx > 0)
357 {
358 map_sym = VEC_index (arm_mapping_symbol_s, map, idx - 1);
359 if (start)
360 *start = map_sym->value + obj_section_addr (sec);
361 return map_sym->type;
362 }
363 }
364 }
365 }
366
367 return 0;
368 }
369
370 /* Determine if the program counter specified in MEMADDR is in a Thumb
371 function. This function should be called for addresses unrelated to
372 any executing frame; otherwise, prefer arm_frame_is_thumb. */
373
374 int
375 arm_pc_is_thumb (struct gdbarch *gdbarch, CORE_ADDR memaddr)
376 {
377 struct bound_minimal_symbol sym;
378 char type;
379 struct displaced_step_closure* dsc
380 = get_displaced_step_closure_by_addr(memaddr);
381
382 /* If checking the mode of displaced instruction in copy area, the mode
383 should be determined by instruction on the original address. */
384 if (dsc)
385 {
386 if (debug_displaced)
387 fprintf_unfiltered (gdb_stdlog,
388 "displaced: check mode of %.8lx instead of %.8lx\n",
389 (unsigned long) dsc->insn_addr,
390 (unsigned long) memaddr);
391 memaddr = dsc->insn_addr;
392 }
393
394 /* If bit 0 of the address is set, assume this is a Thumb address. */
395 if (IS_THUMB_ADDR (memaddr))
396 return 1;
397
398 /* Respect internal mode override if active. */
399 if (arm_override_mode != -1)
400 return arm_override_mode;
401
402 /* If the user wants to override the symbol table, let him. */
403 if (strcmp (arm_force_mode_string, "arm") == 0)
404 return 0;
405 if (strcmp (arm_force_mode_string, "thumb") == 0)
406 return 1;
407
408 /* ARM v6-M and v7-M are always in Thumb mode. */
409 if (gdbarch_tdep (gdbarch)->is_m)
410 return 1;
411
412 /* If there are mapping symbols, consult them. */
413 type = arm_find_mapping_symbol (memaddr, NULL);
414 if (type)
415 return type == 't';
416
417 /* Thumb functions have a "special" bit set in minimal symbols. */
418 sym = lookup_minimal_symbol_by_pc (memaddr);
419 if (sym.minsym)
420 return (MSYMBOL_IS_SPECIAL (sym.minsym));
421
422 /* If the user wants to override the fallback mode, let them. */
423 if (strcmp (arm_fallback_mode_string, "arm") == 0)
424 return 0;
425 if (strcmp (arm_fallback_mode_string, "thumb") == 0)
426 return 1;
427
428 /* If we couldn't find any symbol, but we're talking to a running
429 target, then trust the current value of $cpsr. This lets
430 "display/i $pc" always show the correct mode (though if there is
431 a symbol table we will not reach here, so it still may not be
432 displayed in the mode it will be executed). */
433 if (target_has_registers)
434 return arm_frame_is_thumb (get_current_frame ());
435
436 /* Otherwise we're out of luck; we assume ARM. */
437 return 0;
438 }
439
440 /* Remove useless bits from addresses in a running program. */
441 static CORE_ADDR
442 arm_addr_bits_remove (struct gdbarch *gdbarch, CORE_ADDR val)
443 {
444 /* On M-profile devices, do not strip the low bit from EXC_RETURN
445 (the magic exception return address). */
446 if (gdbarch_tdep (gdbarch)->is_m
447 && (val & 0xfffffff0) == 0xfffffff0)
448 return val;
449
450 if (arm_apcs_32)
451 return UNMAKE_THUMB_ADDR (val);
452 else
453 return (val & 0x03fffffc);
454 }
455
456 /* Return 1 if PC is the start of a compiler helper function which
457 can be safely ignored during prologue skipping. IS_THUMB is true
458 if the function is known to be a Thumb function due to the way it
459 is being called. */
460 static int
461 skip_prologue_function (struct gdbarch *gdbarch, CORE_ADDR pc, int is_thumb)
462 {
463 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
464 struct bound_minimal_symbol msym;
465
466 msym = lookup_minimal_symbol_by_pc (pc);
467 if (msym.minsym != NULL
468 && BMSYMBOL_VALUE_ADDRESS (msym) == pc
469 && MSYMBOL_LINKAGE_NAME (msym.minsym) != NULL)
470 {
471 const char *name = MSYMBOL_LINKAGE_NAME (msym.minsym);
472
473 /* The GNU linker's Thumb call stub to foo is named
474 __foo_from_thumb. */
475 if (strstr (name, "_from_thumb") != NULL)
476 name += 2;
477
478 /* On soft-float targets, __truncdfsf2 is called to convert promoted
479 arguments to their argument types in non-prototyped
480 functions. */
481 if (startswith (name, "__truncdfsf2"))
482 return 1;
483 if (startswith (name, "__aeabi_d2f"))
484 return 1;
485
486 /* Internal functions related to thread-local storage. */
487 if (startswith (name, "__tls_get_addr"))
488 return 1;
489 if (startswith (name, "__aeabi_read_tp"))
490 return 1;
491 }
492 else
493 {
494 /* If we run against a stripped glibc, we may be unable to identify
495 special functions by name. Check for one important case,
496 __aeabi_read_tp, by comparing the *code* against the default
497 implementation (this is hand-written ARM assembler in glibc). */
498
499 if (!is_thumb
500 && read_memory_unsigned_integer (pc, 4, byte_order_for_code)
501 == 0xe3e00a0f /* mov r0, #0xffff0fff */
502 && read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code)
503 == 0xe240f01f) /* sub pc, r0, #31 */
504 return 1;
505 }
506
507 return 0;
508 }
509
510 /* Support routines for instruction parsing. */
511 #define submask(x) ((1L << ((x) + 1)) - 1)
512 #define bit(obj,st) (((obj) >> (st)) & 1)
513 #define bits(obj,st,fn) (((obj) >> (st)) & submask ((fn) - (st)))
514 #define sbits(obj,st,fn) \
515 ((long) (bits(obj,st,fn) | ((long) bit(obj,fn) * ~ submask (fn - st))))
516 #define BranchDest(addr,instr) \
517 ((CORE_ADDR) (((unsigned long) (addr)) + 8 + (sbits (instr, 0, 23) << 2)))
518
519 /* Extract the immediate from instruction movw/movt of encoding T. INSN1 is
520 the first 16-bit of instruction, and INSN2 is the second 16-bit of
521 instruction. */
522 #define EXTRACT_MOVW_MOVT_IMM_T(insn1, insn2) \
523 ((bits ((insn1), 0, 3) << 12) \
524 | (bits ((insn1), 10, 10) << 11) \
525 | (bits ((insn2), 12, 14) << 8) \
526 | bits ((insn2), 0, 7))
527
528 /* Extract the immediate from instruction movw/movt of encoding A. INSN is
529 the 32-bit instruction. */
530 #define EXTRACT_MOVW_MOVT_IMM_A(insn) \
531 ((bits ((insn), 16, 19) << 12) \
532 | bits ((insn), 0, 11))
533
534 /* Decode immediate value; implements ThumbExpandImmediate pseudo-op. */
535
536 static unsigned int
537 thumb_expand_immediate (unsigned int imm)
538 {
539 unsigned int count = imm >> 7;
540
541 if (count < 8)
542 switch (count / 2)
543 {
544 case 0:
545 return imm & 0xff;
546 case 1:
547 return (imm & 0xff) | ((imm & 0xff) << 16);
548 case 2:
549 return ((imm & 0xff) << 8) | ((imm & 0xff) << 24);
550 case 3:
551 return (imm & 0xff) | ((imm & 0xff) << 8)
552 | ((imm & 0xff) << 16) | ((imm & 0xff) << 24);
553 }
554
555 return (0x80 | (imm & 0x7f)) << (32 - count);
556 }
557
558 /* Return 1 if the 16-bit Thumb instruction INST might change
559 control flow, 0 otherwise. */
560
561 static int
562 thumb_instruction_changes_pc (unsigned short inst)
563 {
564 if ((inst & 0xff00) == 0xbd00) /* pop {rlist, pc} */
565 return 1;
566
567 if ((inst & 0xf000) == 0xd000) /* conditional branch */
568 return 1;
569
570 if ((inst & 0xf800) == 0xe000) /* unconditional branch */
571 return 1;
572
573 if ((inst & 0xff00) == 0x4700) /* bx REG, blx REG */
574 return 1;
575
576 if ((inst & 0xff87) == 0x4687) /* mov pc, REG */
577 return 1;
578
579 if ((inst & 0xf500) == 0xb100) /* CBNZ or CBZ. */
580 return 1;
581
582 return 0;
583 }
584
585 /* Return 1 if the 32-bit Thumb instruction in INST1 and INST2
586 might change control flow, 0 otherwise. */
587
588 static int
589 thumb2_instruction_changes_pc (unsigned short inst1, unsigned short inst2)
590 {
591 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
592 {
593 /* Branches and miscellaneous control instructions. */
594
595 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
596 {
597 /* B, BL, BLX. */
598 return 1;
599 }
600 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
601 {
602 /* SUBS PC, LR, #imm8. */
603 return 1;
604 }
605 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
606 {
607 /* Conditional branch. */
608 return 1;
609 }
610
611 return 0;
612 }
613
614 if ((inst1 & 0xfe50) == 0xe810)
615 {
616 /* Load multiple or RFE. */
617
618 if (bit (inst1, 7) && !bit (inst1, 8))
619 {
620 /* LDMIA or POP */
621 if (bit (inst2, 15))
622 return 1;
623 }
624 else if (!bit (inst1, 7) && bit (inst1, 8))
625 {
626 /* LDMDB */
627 if (bit (inst2, 15))
628 return 1;
629 }
630 else if (bit (inst1, 7) && bit (inst1, 8))
631 {
632 /* RFEIA */
633 return 1;
634 }
635 else if (!bit (inst1, 7) && !bit (inst1, 8))
636 {
637 /* RFEDB */
638 return 1;
639 }
640
641 return 0;
642 }
643
644 if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
645 {
646 /* MOV PC or MOVS PC. */
647 return 1;
648 }
649
650 if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
651 {
652 /* LDR PC. */
653 if (bits (inst1, 0, 3) == 15)
654 return 1;
655 if (bit (inst1, 7))
656 return 1;
657 if (bit (inst2, 11))
658 return 1;
659 if ((inst2 & 0x0fc0) == 0x0000)
660 return 1;
661
662 return 0;
663 }
664
665 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
666 {
667 /* TBB. */
668 return 1;
669 }
670
671 if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
672 {
673 /* TBH. */
674 return 1;
675 }
676
677 return 0;
678 }
679
680 /* Return 1 if the 16-bit Thumb instruction INSN restores SP in
681 epilogue, 0 otherwise. */
682
683 static int
684 thumb_instruction_restores_sp (unsigned short insn)
685 {
686 return (insn == 0x46bd /* mov sp, r7 */
687 || (insn & 0xff80) == 0xb000 /* add sp, imm */
688 || (insn & 0xfe00) == 0xbc00); /* pop <registers> */
689 }
690
691 /* Analyze a Thumb prologue, looking for a recognizable stack frame
692 and frame pointer. Scan until we encounter a store that could
693 clobber the stack frame unexpectedly, or an unknown instruction.
694 Return the last address which is definitely safe to skip for an
695 initial breakpoint. */
696
697 static CORE_ADDR
698 thumb_analyze_prologue (struct gdbarch *gdbarch,
699 CORE_ADDR start, CORE_ADDR limit,
700 struct arm_prologue_cache *cache)
701 {
702 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
703 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
704 int i;
705 pv_t regs[16];
706 struct pv_area *stack;
707 struct cleanup *back_to;
708 CORE_ADDR offset;
709 CORE_ADDR unrecognized_pc = 0;
710
711 for (i = 0; i < 16; i++)
712 regs[i] = pv_register (i, 0);
713 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
714 back_to = make_cleanup_free_pv_area (stack);
715
716 while (start < limit)
717 {
718 unsigned short insn;
719
720 insn = read_memory_unsigned_integer (start, 2, byte_order_for_code);
721
722 if ((insn & 0xfe00) == 0xb400) /* push { rlist } */
723 {
724 int regno;
725 int mask;
726
727 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
728 break;
729
730 /* Bits 0-7 contain a mask for registers R0-R7. Bit 8 says
731 whether to save LR (R14). */
732 mask = (insn & 0xff) | ((insn & 0x100) << 6);
733
734 /* Calculate offsets of saved R0-R7 and LR. */
735 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
736 if (mask & (1 << regno))
737 {
738 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
739 -4);
740 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
741 }
742 }
743 else if ((insn & 0xff80) == 0xb080) /* sub sp, #imm */
744 {
745 offset = (insn & 0x7f) << 2; /* get scaled offset */
746 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM],
747 -offset);
748 }
749 else if (thumb_instruction_restores_sp (insn))
750 {
751 /* Don't scan past the epilogue. */
752 break;
753 }
754 else if ((insn & 0xf800) == 0xa800) /* add Rd, sp, #imm */
755 regs[bits (insn, 8, 10)] = pv_add_constant (regs[ARM_SP_REGNUM],
756 (insn & 0xff) << 2);
757 else if ((insn & 0xfe00) == 0x1c00 /* add Rd, Rn, #imm */
758 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
759 regs[bits (insn, 0, 2)] = pv_add_constant (regs[bits (insn, 3, 5)],
760 bits (insn, 6, 8));
761 else if ((insn & 0xf800) == 0x3000 /* add Rd, #imm */
762 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
763 regs[bits (insn, 8, 10)] = pv_add_constant (regs[bits (insn, 8, 10)],
764 bits (insn, 0, 7));
765 else if ((insn & 0xfe00) == 0x1800 /* add Rd, Rn, Rm */
766 && pv_is_register (regs[bits (insn, 6, 8)], ARM_SP_REGNUM)
767 && pv_is_constant (regs[bits (insn, 3, 5)]))
768 regs[bits (insn, 0, 2)] = pv_add (regs[bits (insn, 3, 5)],
769 regs[bits (insn, 6, 8)]);
770 else if ((insn & 0xff00) == 0x4400 /* add Rd, Rm */
771 && pv_is_constant (regs[bits (insn, 3, 6)]))
772 {
773 int rd = (bit (insn, 7) << 3) + bits (insn, 0, 2);
774 int rm = bits (insn, 3, 6);
775 regs[rd] = pv_add (regs[rd], regs[rm]);
776 }
777 else if ((insn & 0xff00) == 0x4600) /* mov hi, lo or mov lo, hi */
778 {
779 int dst_reg = (insn & 0x7) + ((insn & 0x80) >> 4);
780 int src_reg = (insn & 0x78) >> 3;
781 regs[dst_reg] = regs[src_reg];
782 }
783 else if ((insn & 0xf800) == 0x9000) /* str rd, [sp, #off] */
784 {
785 /* Handle stores to the stack. Normally pushes are used,
786 but with GCC -mtpcs-frame, there may be other stores
787 in the prologue to create the frame. */
788 int regno = (insn >> 8) & 0x7;
789 pv_t addr;
790
791 offset = (insn & 0xff) << 2;
792 addr = pv_add_constant (regs[ARM_SP_REGNUM], offset);
793
794 if (pv_area_store_would_trash (stack, addr))
795 break;
796
797 pv_area_store (stack, addr, 4, regs[regno]);
798 }
799 else if ((insn & 0xf800) == 0x6000) /* str rd, [rn, #off] */
800 {
801 int rd = bits (insn, 0, 2);
802 int rn = bits (insn, 3, 5);
803 pv_t addr;
804
805 offset = bits (insn, 6, 10) << 2;
806 addr = pv_add_constant (regs[rn], offset);
807
808 if (pv_area_store_would_trash (stack, addr))
809 break;
810
811 pv_area_store (stack, addr, 4, regs[rd]);
812 }
813 else if (((insn & 0xf800) == 0x7000 /* strb Rd, [Rn, #off] */
814 || (insn & 0xf800) == 0x8000) /* strh Rd, [Rn, #off] */
815 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM))
816 /* Ignore stores of argument registers to the stack. */
817 ;
818 else if ((insn & 0xf800) == 0xc800 /* ldmia Rn!, { registers } */
819 && pv_is_register (regs[bits (insn, 8, 10)], ARM_SP_REGNUM))
820 /* Ignore block loads from the stack, potentially copying
821 parameters from memory. */
822 ;
823 else if ((insn & 0xf800) == 0x9800 /* ldr Rd, [Rn, #immed] */
824 || ((insn & 0xf800) == 0x6800 /* ldr Rd, [sp, #immed] */
825 && pv_is_register (regs[bits (insn, 3, 5)], ARM_SP_REGNUM)))
826 /* Similarly ignore single loads from the stack. */
827 ;
828 else if ((insn & 0xffc0) == 0x0000 /* lsls Rd, Rm, #0 */
829 || (insn & 0xffc0) == 0x1c00) /* add Rd, Rn, #0 */
830 /* Skip register copies, i.e. saves to another register
831 instead of the stack. */
832 ;
833 else if ((insn & 0xf800) == 0x2000) /* movs Rd, #imm */
834 /* Recognize constant loads; even with small stacks these are necessary
835 on Thumb. */
836 regs[bits (insn, 8, 10)] = pv_constant (bits (insn, 0, 7));
837 else if ((insn & 0xf800) == 0x4800) /* ldr Rd, [pc, #imm] */
838 {
839 /* Constant pool loads, for the same reason. */
840 unsigned int constant;
841 CORE_ADDR loc;
842
843 loc = start + 4 + bits (insn, 0, 7) * 4;
844 constant = read_memory_unsigned_integer (loc, 4, byte_order);
845 regs[bits (insn, 8, 10)] = pv_constant (constant);
846 }
847 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instructions. */
848 {
849 unsigned short inst2;
850
851 inst2 = read_memory_unsigned_integer (start + 2, 2,
852 byte_order_for_code);
853
854 if ((insn & 0xf800) == 0xf000 && (inst2 & 0xe800) == 0xe800)
855 {
856 /* BL, BLX. Allow some special function calls when
857 skipping the prologue; GCC generates these before
858 storing arguments to the stack. */
859 CORE_ADDR nextpc;
860 int j1, j2, imm1, imm2;
861
862 imm1 = sbits (insn, 0, 10);
863 imm2 = bits (inst2, 0, 10);
864 j1 = bit (inst2, 13);
865 j2 = bit (inst2, 11);
866
867 offset = ((imm1 << 12) + (imm2 << 1));
868 offset ^= ((!j2) << 22) | ((!j1) << 23);
869
870 nextpc = start + 4 + offset;
871 /* For BLX make sure to clear the low bits. */
872 if (bit (inst2, 12) == 0)
873 nextpc = nextpc & 0xfffffffc;
874
875 if (!skip_prologue_function (gdbarch, nextpc,
876 bit (inst2, 12) != 0))
877 break;
878 }
879
880 else if ((insn & 0xffd0) == 0xe900 /* stmdb Rn{!},
881 { registers } */
882 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
883 {
884 pv_t addr = regs[bits (insn, 0, 3)];
885 int regno;
886
887 if (pv_area_store_would_trash (stack, addr))
888 break;
889
890 /* Calculate offsets of saved registers. */
891 for (regno = ARM_LR_REGNUM; regno >= 0; regno--)
892 if (inst2 & (1 << regno))
893 {
894 addr = pv_add_constant (addr, -4);
895 pv_area_store (stack, addr, 4, regs[regno]);
896 }
897
898 if (insn & 0x0020)
899 regs[bits (insn, 0, 3)] = addr;
900 }
901
902 else if ((insn & 0xff50) == 0xe940 /* strd Rt, Rt2,
903 [Rn, #+/-imm]{!} */
904 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
905 {
906 int regno1 = bits (inst2, 12, 15);
907 int regno2 = bits (inst2, 8, 11);
908 pv_t addr = regs[bits (insn, 0, 3)];
909
910 offset = inst2 & 0xff;
911 if (insn & 0x0080)
912 addr = pv_add_constant (addr, offset);
913 else
914 addr = pv_add_constant (addr, -offset);
915
916 if (pv_area_store_would_trash (stack, addr))
917 break;
918
919 pv_area_store (stack, addr, 4, regs[regno1]);
920 pv_area_store (stack, pv_add_constant (addr, 4),
921 4, regs[regno2]);
922
923 if (insn & 0x0020)
924 regs[bits (insn, 0, 3)] = addr;
925 }
926
927 else if ((insn & 0xfff0) == 0xf8c0 /* str Rt,[Rn,+/-#imm]{!} */
928 && (inst2 & 0x0c00) == 0x0c00
929 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
930 {
931 int regno = bits (inst2, 12, 15);
932 pv_t addr = regs[bits (insn, 0, 3)];
933
934 offset = inst2 & 0xff;
935 if (inst2 & 0x0200)
936 addr = pv_add_constant (addr, offset);
937 else
938 addr = pv_add_constant (addr, -offset);
939
940 if (pv_area_store_would_trash (stack, addr))
941 break;
942
943 pv_area_store (stack, addr, 4, regs[regno]);
944
945 if (inst2 & 0x0100)
946 regs[bits (insn, 0, 3)] = addr;
947 }
948
949 else if ((insn & 0xfff0) == 0xf8c0 /* str.w Rt,[Rn,#imm] */
950 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
951 {
952 int regno = bits (inst2, 12, 15);
953 pv_t addr;
954
955 offset = inst2 & 0xfff;
956 addr = pv_add_constant (regs[bits (insn, 0, 3)], offset);
957
958 if (pv_area_store_would_trash (stack, addr))
959 break;
960
961 pv_area_store (stack, addr, 4, regs[regno]);
962 }
963
964 else if ((insn & 0xffd0) == 0xf880 /* str{bh}.w Rt,[Rn,#imm] */
965 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
966 /* Ignore stores of argument registers to the stack. */
967 ;
968
969 else if ((insn & 0xffd0) == 0xf800 /* str{bh} Rt,[Rn,#+/-imm] */
970 && (inst2 & 0x0d00) == 0x0c00
971 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
972 /* Ignore stores of argument registers to the stack. */
973 ;
974
975 else if ((insn & 0xffd0) == 0xe890 /* ldmia Rn[!],
976 { registers } */
977 && (inst2 & 0x8000) == 0x0000
978 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
979 /* Ignore block loads from the stack, potentially copying
980 parameters from memory. */
981 ;
982
983 else if ((insn & 0xffb0) == 0xe950 /* ldrd Rt, Rt2,
984 [Rn, #+/-imm] */
985 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
986 /* Similarly ignore dual loads from the stack. */
987 ;
988
989 else if ((insn & 0xfff0) == 0xf850 /* ldr Rt,[Rn,#+/-imm] */
990 && (inst2 & 0x0d00) == 0x0c00
991 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
992 /* Similarly ignore single loads from the stack. */
993 ;
994
995 else if ((insn & 0xfff0) == 0xf8d0 /* ldr.w Rt,[Rn,#imm] */
996 && pv_is_register (regs[bits (insn, 0, 3)], ARM_SP_REGNUM))
997 /* Similarly ignore single loads from the stack. */
998 ;
999
1000 else if ((insn & 0xfbf0) == 0xf100 /* add.w Rd, Rn, #imm */
1001 && (inst2 & 0x8000) == 0x0000)
1002 {
1003 unsigned int imm = ((bits (insn, 10, 10) << 11)
1004 | (bits (inst2, 12, 14) << 8)
1005 | bits (inst2, 0, 7));
1006
1007 regs[bits (inst2, 8, 11)]
1008 = pv_add_constant (regs[bits (insn, 0, 3)],
1009 thumb_expand_immediate (imm));
1010 }
1011
1012 else if ((insn & 0xfbf0) == 0xf200 /* addw Rd, Rn, #imm */
1013 && (inst2 & 0x8000) == 0x0000)
1014 {
1015 unsigned int imm = ((bits (insn, 10, 10) << 11)
1016 | (bits (inst2, 12, 14) << 8)
1017 | bits (inst2, 0, 7));
1018
1019 regs[bits (inst2, 8, 11)]
1020 = pv_add_constant (regs[bits (insn, 0, 3)], imm);
1021 }
1022
1023 else if ((insn & 0xfbf0) == 0xf1a0 /* sub.w Rd, Rn, #imm */
1024 && (inst2 & 0x8000) == 0x0000)
1025 {
1026 unsigned int imm = ((bits (insn, 10, 10) << 11)
1027 | (bits (inst2, 12, 14) << 8)
1028 | bits (inst2, 0, 7));
1029
1030 regs[bits (inst2, 8, 11)]
1031 = pv_add_constant (regs[bits (insn, 0, 3)],
1032 - (CORE_ADDR) thumb_expand_immediate (imm));
1033 }
1034
1035 else if ((insn & 0xfbf0) == 0xf2a0 /* subw Rd, Rn, #imm */
1036 && (inst2 & 0x8000) == 0x0000)
1037 {
1038 unsigned int imm = ((bits (insn, 10, 10) << 11)
1039 | (bits (inst2, 12, 14) << 8)
1040 | bits (inst2, 0, 7));
1041
1042 regs[bits (inst2, 8, 11)]
1043 = pv_add_constant (regs[bits (insn, 0, 3)], - (CORE_ADDR) imm);
1044 }
1045
1046 else if ((insn & 0xfbff) == 0xf04f) /* mov.w Rd, #const */
1047 {
1048 unsigned int imm = ((bits (insn, 10, 10) << 11)
1049 | (bits (inst2, 12, 14) << 8)
1050 | bits (inst2, 0, 7));
1051
1052 regs[bits (inst2, 8, 11)]
1053 = pv_constant (thumb_expand_immediate (imm));
1054 }
1055
1056 else if ((insn & 0xfbf0) == 0xf240) /* movw Rd, #const */
1057 {
1058 unsigned int imm
1059 = EXTRACT_MOVW_MOVT_IMM_T (insn, inst2);
1060
1061 regs[bits (inst2, 8, 11)] = pv_constant (imm);
1062 }
1063
1064 else if (insn == 0xea5f /* mov.w Rd,Rm */
1065 && (inst2 & 0xf0f0) == 0)
1066 {
1067 int dst_reg = (inst2 & 0x0f00) >> 8;
1068 int src_reg = inst2 & 0xf;
1069 regs[dst_reg] = regs[src_reg];
1070 }
1071
1072 else if ((insn & 0xff7f) == 0xf85f) /* ldr.w Rt,<label> */
1073 {
1074 /* Constant pool loads. */
1075 unsigned int constant;
1076 CORE_ADDR loc;
1077
1078 offset = bits (inst2, 0, 11);
1079 if (insn & 0x0080)
1080 loc = start + 4 + offset;
1081 else
1082 loc = start + 4 - offset;
1083
1084 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1085 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1086 }
1087
1088 else if ((insn & 0xff7f) == 0xe95f) /* ldrd Rt,Rt2,<label> */
1089 {
1090 /* Constant pool loads. */
1091 unsigned int constant;
1092 CORE_ADDR loc;
1093
1094 offset = bits (inst2, 0, 7) << 2;
1095 if (insn & 0x0080)
1096 loc = start + 4 + offset;
1097 else
1098 loc = start + 4 - offset;
1099
1100 constant = read_memory_unsigned_integer (loc, 4, byte_order);
1101 regs[bits (inst2, 12, 15)] = pv_constant (constant);
1102
1103 constant = read_memory_unsigned_integer (loc + 4, 4, byte_order);
1104 regs[bits (inst2, 8, 11)] = pv_constant (constant);
1105 }
1106
1107 else if (thumb2_instruction_changes_pc (insn, inst2))
1108 {
1109 /* Don't scan past anything that might change control flow. */
1110 break;
1111 }
1112 else
1113 {
1114 /* The optimizer might shove anything into the prologue,
1115 so we just skip what we don't recognize. */
1116 unrecognized_pc = start;
1117 }
1118
1119 start += 2;
1120 }
1121 else if (thumb_instruction_changes_pc (insn))
1122 {
1123 /* Don't scan past anything that might change control flow. */
1124 break;
1125 }
1126 else
1127 {
1128 /* The optimizer might shove anything into the prologue,
1129 so we just skip what we don't recognize. */
1130 unrecognized_pc = start;
1131 }
1132
1133 start += 2;
1134 }
1135
1136 if (arm_debug)
1137 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1138 paddress (gdbarch, start));
1139
1140 if (unrecognized_pc == 0)
1141 unrecognized_pc = start;
1142
1143 if (cache == NULL)
1144 {
1145 do_cleanups (back_to);
1146 return unrecognized_pc;
1147 }
1148
1149 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1150 {
1151 /* Frame pointer is fp. Frame size is constant. */
1152 cache->framereg = ARM_FP_REGNUM;
1153 cache->framesize = -regs[ARM_FP_REGNUM].k;
1154 }
1155 else if (pv_is_register (regs[THUMB_FP_REGNUM], ARM_SP_REGNUM))
1156 {
1157 /* Frame pointer is r7. Frame size is constant. */
1158 cache->framereg = THUMB_FP_REGNUM;
1159 cache->framesize = -regs[THUMB_FP_REGNUM].k;
1160 }
1161 else
1162 {
1163 /* Try the stack pointer... this is a bit desperate. */
1164 cache->framereg = ARM_SP_REGNUM;
1165 cache->framesize = -regs[ARM_SP_REGNUM].k;
1166 }
1167
1168 for (i = 0; i < 16; i++)
1169 if (pv_area_find_reg (stack, gdbarch, i, &offset))
1170 cache->saved_regs[i].addr = offset;
1171
1172 do_cleanups (back_to);
1173 return unrecognized_pc;
1174 }
1175
1176
1177 /* Try to analyze the instructions starting from PC, which load symbol
1178 __stack_chk_guard. Return the address of instruction after loading this
1179 symbol, set the dest register number to *BASEREG, and set the size of
1180 instructions for loading symbol in OFFSET. Return 0 if instructions are
1181 not recognized. */
1182
1183 static CORE_ADDR
1184 arm_analyze_load_stack_chk_guard(CORE_ADDR pc, struct gdbarch *gdbarch,
1185 unsigned int *destreg, int *offset)
1186 {
1187 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1188 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1189 unsigned int low, high, address;
1190
1191 address = 0;
1192 if (is_thumb)
1193 {
1194 unsigned short insn1
1195 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
1196
1197 if ((insn1 & 0xf800) == 0x4800) /* ldr Rd, #immed */
1198 {
1199 *destreg = bits (insn1, 8, 10);
1200 *offset = 2;
1201 address = (pc & 0xfffffffc) + 4 + (bits (insn1, 0, 7) << 2);
1202 address = read_memory_unsigned_integer (address, 4,
1203 byte_order_for_code);
1204 }
1205 else if ((insn1 & 0xfbf0) == 0xf240) /* movw Rd, #const */
1206 {
1207 unsigned short insn2
1208 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
1209
1210 low = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1211
1212 insn1
1213 = read_memory_unsigned_integer (pc + 4, 2, byte_order_for_code);
1214 insn2
1215 = read_memory_unsigned_integer (pc + 6, 2, byte_order_for_code);
1216
1217 /* movt Rd, #const */
1218 if ((insn1 & 0xfbc0) == 0xf2c0)
1219 {
1220 high = EXTRACT_MOVW_MOVT_IMM_T (insn1, insn2);
1221 *destreg = bits (insn2, 8, 11);
1222 *offset = 8;
1223 address = (high << 16 | low);
1224 }
1225 }
1226 }
1227 else
1228 {
1229 unsigned int insn
1230 = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
1231
1232 if ((insn & 0x0e5f0000) == 0x041f0000) /* ldr Rd, [PC, #immed] */
1233 {
1234 address = bits (insn, 0, 11) + pc + 8;
1235 address = read_memory_unsigned_integer (address, 4,
1236 byte_order_for_code);
1237
1238 *destreg = bits (insn, 12, 15);
1239 *offset = 4;
1240 }
1241 else if ((insn & 0x0ff00000) == 0x03000000) /* movw Rd, #const */
1242 {
1243 low = EXTRACT_MOVW_MOVT_IMM_A (insn);
1244
1245 insn
1246 = read_memory_unsigned_integer (pc + 4, 4, byte_order_for_code);
1247
1248 if ((insn & 0x0ff00000) == 0x03400000) /* movt Rd, #const */
1249 {
1250 high = EXTRACT_MOVW_MOVT_IMM_A (insn);
1251 *destreg = bits (insn, 12, 15);
1252 *offset = 8;
1253 address = (high << 16 | low);
1254 }
1255 }
1256 }
1257
1258 return address;
1259 }
1260
1261 /* Try to skip a sequence of instructions used for stack protector. If PC
1262 points to the first instruction of this sequence, return the address of
1263 first instruction after this sequence, otherwise, return original PC.
1264
1265 On arm, this sequence of instructions is composed of mainly three steps,
1266 Step 1: load symbol __stack_chk_guard,
1267 Step 2: load from address of __stack_chk_guard,
1268 Step 3: store it to somewhere else.
1269
1270 Usually, instructions on step 2 and step 3 are the same on various ARM
1271 architectures. On step 2, it is one instruction 'ldr Rx, [Rn, #0]', and
1272 on step 3, it is also one instruction 'str Rx, [r7, #immd]'. However,
1273 instructions in step 1 vary from different ARM architectures. On ARMv7,
1274 they are,
1275
1276 movw Rn, #:lower16:__stack_chk_guard
1277 movt Rn, #:upper16:__stack_chk_guard
1278
1279 On ARMv5t, it is,
1280
1281 ldr Rn, .Label
1282 ....
1283 .Lable:
1284 .word __stack_chk_guard
1285
1286 Since ldr/str is a very popular instruction, we can't use them as
1287 'fingerprint' or 'signature' of stack protector sequence. Here we choose
1288 sequence {movw/movt, ldr}/ldr/str plus symbol __stack_chk_guard, if not
1289 stripped, as the 'fingerprint' of a stack protector cdoe sequence. */
1290
1291 static CORE_ADDR
1292 arm_skip_stack_protector(CORE_ADDR pc, struct gdbarch *gdbarch)
1293 {
1294 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1295 unsigned int basereg;
1296 struct bound_minimal_symbol stack_chk_guard;
1297 int offset;
1298 int is_thumb = arm_pc_is_thumb (gdbarch, pc);
1299 CORE_ADDR addr;
1300
1301 /* Try to parse the instructions in Step 1. */
1302 addr = arm_analyze_load_stack_chk_guard (pc, gdbarch,
1303 &basereg, &offset);
1304 if (!addr)
1305 return pc;
1306
1307 stack_chk_guard = lookup_minimal_symbol_by_pc (addr);
1308 /* ADDR must correspond to a symbol whose name is __stack_chk_guard.
1309 Otherwise, this sequence cannot be for stack protector. */
1310 if (stack_chk_guard.minsym == NULL
1311 || !startswith (MSYMBOL_LINKAGE_NAME (stack_chk_guard.minsym), "__stack_chk_guard"))
1312 return pc;
1313
1314 if (is_thumb)
1315 {
1316 unsigned int destreg;
1317 unsigned short insn
1318 = read_memory_unsigned_integer (pc + offset, 2, byte_order_for_code);
1319
1320 /* Step 2: ldr Rd, [Rn, #immed], encoding T1. */
1321 if ((insn & 0xf800) != 0x6800)
1322 return pc;
1323 if (bits (insn, 3, 5) != basereg)
1324 return pc;
1325 destreg = bits (insn, 0, 2);
1326
1327 insn = read_memory_unsigned_integer (pc + offset + 2, 2,
1328 byte_order_for_code);
1329 /* Step 3: str Rd, [Rn, #immed], encoding T1. */
1330 if ((insn & 0xf800) != 0x6000)
1331 return pc;
1332 if (destreg != bits (insn, 0, 2))
1333 return pc;
1334 }
1335 else
1336 {
1337 unsigned int destreg;
1338 unsigned int insn
1339 = read_memory_unsigned_integer (pc + offset, 4, byte_order_for_code);
1340
1341 /* Step 2: ldr Rd, [Rn, #immed], encoding A1. */
1342 if ((insn & 0x0e500000) != 0x04100000)
1343 return pc;
1344 if (bits (insn, 16, 19) != basereg)
1345 return pc;
1346 destreg = bits (insn, 12, 15);
1347 /* Step 3: str Rd, [Rn, #immed], encoding A1. */
1348 insn = read_memory_unsigned_integer (pc + offset + 4,
1349 4, byte_order_for_code);
1350 if ((insn & 0x0e500000) != 0x04000000)
1351 return pc;
1352 if (bits (insn, 12, 15) != destreg)
1353 return pc;
1354 }
1355 /* The size of total two instructions ldr/str is 4 on Thumb-2, while 8
1356 on arm. */
1357 if (is_thumb)
1358 return pc + offset + 4;
1359 else
1360 return pc + offset + 8;
1361 }
1362
1363 /* Advance the PC across any function entry prologue instructions to
1364 reach some "real" code.
1365
1366 The APCS (ARM Procedure Call Standard) defines the following
1367 prologue:
1368
1369 mov ip, sp
1370 [stmfd sp!, {a1,a2,a3,a4}]
1371 stmfd sp!, {...,fp,ip,lr,pc}
1372 [stfe f7, [sp, #-12]!]
1373 [stfe f6, [sp, #-12]!]
1374 [stfe f5, [sp, #-12]!]
1375 [stfe f4, [sp, #-12]!]
1376 sub fp, ip, #nn @@ nn == 20 or 4 depending on second insn. */
1377
1378 static CORE_ADDR
1379 arm_skip_prologue (struct gdbarch *gdbarch, CORE_ADDR pc)
1380 {
1381 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1382 unsigned long inst;
1383 CORE_ADDR func_addr, limit_pc;
1384
1385 /* See if we can determine the end of the prologue via the symbol table.
1386 If so, then return either PC, or the PC after the prologue, whichever
1387 is greater. */
1388 if (find_pc_partial_function (pc, NULL, &func_addr, NULL))
1389 {
1390 CORE_ADDR post_prologue_pc
1391 = skip_prologue_using_sal (gdbarch, func_addr);
1392 struct compunit_symtab *cust = find_pc_compunit_symtab (func_addr);
1393
1394 if (post_prologue_pc)
1395 post_prologue_pc
1396 = arm_skip_stack_protector (post_prologue_pc, gdbarch);
1397
1398
1399 /* GCC always emits a line note before the prologue and another
1400 one after, even if the two are at the same address or on the
1401 same line. Take advantage of this so that we do not need to
1402 know every instruction that might appear in the prologue. We
1403 will have producer information for most binaries; if it is
1404 missing (e.g. for -gstabs), assuming the GNU tools. */
1405 if (post_prologue_pc
1406 && (cust == NULL
1407 || COMPUNIT_PRODUCER (cust) == NULL
1408 || startswith (COMPUNIT_PRODUCER (cust), "GNU ")
1409 || startswith (COMPUNIT_PRODUCER (cust), "clang ")))
1410 return post_prologue_pc;
1411
1412 if (post_prologue_pc != 0)
1413 {
1414 CORE_ADDR analyzed_limit;
1415
1416 /* For non-GCC compilers, make sure the entire line is an
1417 acceptable prologue; GDB will round this function's
1418 return value up to the end of the following line so we
1419 can not skip just part of a line (and we do not want to).
1420
1421 RealView does not treat the prologue specially, but does
1422 associate prologue code with the opening brace; so this
1423 lets us skip the first line if we think it is the opening
1424 brace. */
1425 if (arm_pc_is_thumb (gdbarch, func_addr))
1426 analyzed_limit = thumb_analyze_prologue (gdbarch, func_addr,
1427 post_prologue_pc, NULL);
1428 else
1429 analyzed_limit = arm_analyze_prologue (gdbarch, func_addr,
1430 post_prologue_pc, NULL);
1431
1432 if (analyzed_limit != post_prologue_pc)
1433 return func_addr;
1434
1435 return post_prologue_pc;
1436 }
1437 }
1438
1439 /* Can't determine prologue from the symbol table, need to examine
1440 instructions. */
1441
1442 /* Find an upper limit on the function prologue using the debug
1443 information. If the debug information could not be used to provide
1444 that bound, then use an arbitrary large number as the upper bound. */
1445 /* Like arm_scan_prologue, stop no later than pc + 64. */
1446 limit_pc = skip_prologue_using_sal (gdbarch, pc);
1447 if (limit_pc == 0)
1448 limit_pc = pc + 64; /* Magic. */
1449
1450
1451 /* Check if this is Thumb code. */
1452 if (arm_pc_is_thumb (gdbarch, pc))
1453 return thumb_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1454 else
1455 return arm_analyze_prologue (gdbarch, pc, limit_pc, NULL);
1456 }
1457
1458 /* *INDENT-OFF* */
1459 /* Function: thumb_scan_prologue (helper function for arm_scan_prologue)
1460 This function decodes a Thumb function prologue to determine:
1461 1) the size of the stack frame
1462 2) which registers are saved on it
1463 3) the offsets of saved regs
1464 4) the offset from the stack pointer to the frame pointer
1465
1466 A typical Thumb function prologue would create this stack frame
1467 (offsets relative to FP)
1468 old SP -> 24 stack parameters
1469 20 LR
1470 16 R7
1471 R7 -> 0 local variables (16 bytes)
1472 SP -> -12 additional stack space (12 bytes)
1473 The frame size would thus be 36 bytes, and the frame offset would be
1474 12 bytes. The frame register is R7.
1475
1476 The comments for thumb_skip_prolog() describe the algorithm we use
1477 to detect the end of the prolog. */
1478 /* *INDENT-ON* */
1479
1480 static void
1481 thumb_scan_prologue (struct gdbarch *gdbarch, CORE_ADDR prev_pc,
1482 CORE_ADDR block_addr, struct arm_prologue_cache *cache)
1483 {
1484 CORE_ADDR prologue_start;
1485 CORE_ADDR prologue_end;
1486
1487 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1488 &prologue_end))
1489 {
1490 /* See comment in arm_scan_prologue for an explanation of
1491 this heuristics. */
1492 if (prologue_end > prologue_start + 64)
1493 {
1494 prologue_end = prologue_start + 64;
1495 }
1496 }
1497 else
1498 /* We're in the boondocks: we have no idea where the start of the
1499 function is. */
1500 return;
1501
1502 prologue_end = min (prologue_end, prev_pc);
1503
1504 thumb_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1505 }
1506
1507 /* Return 1 if THIS_INSTR might change control flow, 0 otherwise. */
1508
1509 static int
1510 arm_instruction_changes_pc (uint32_t this_instr)
1511 {
1512 if (bits (this_instr, 28, 31) == INST_NV)
1513 /* Unconditional instructions. */
1514 switch (bits (this_instr, 24, 27))
1515 {
1516 case 0xa:
1517 case 0xb:
1518 /* Branch with Link and change to Thumb. */
1519 return 1;
1520 case 0xc:
1521 case 0xd:
1522 case 0xe:
1523 /* Coprocessor register transfer. */
1524 if (bits (this_instr, 12, 15) == 15)
1525 error (_("Invalid update to pc in instruction"));
1526 return 0;
1527 default:
1528 return 0;
1529 }
1530 else
1531 switch (bits (this_instr, 25, 27))
1532 {
1533 case 0x0:
1534 if (bits (this_instr, 23, 24) == 2 && bit (this_instr, 20) == 0)
1535 {
1536 /* Multiplies and extra load/stores. */
1537 if (bit (this_instr, 4) == 1 && bit (this_instr, 7) == 1)
1538 /* Neither multiplies nor extension load/stores are allowed
1539 to modify PC. */
1540 return 0;
1541
1542 /* Otherwise, miscellaneous instructions. */
1543
1544 /* BX <reg>, BXJ <reg>, BLX <reg> */
1545 if (bits (this_instr, 4, 27) == 0x12fff1
1546 || bits (this_instr, 4, 27) == 0x12fff2
1547 || bits (this_instr, 4, 27) == 0x12fff3)
1548 return 1;
1549
1550 /* Other miscellaneous instructions are unpredictable if they
1551 modify PC. */
1552 return 0;
1553 }
1554 /* Data processing instruction. Fall through. */
1555
1556 case 0x1:
1557 if (bits (this_instr, 12, 15) == 15)
1558 return 1;
1559 else
1560 return 0;
1561
1562 case 0x2:
1563 case 0x3:
1564 /* Media instructions and architecturally undefined instructions. */
1565 if (bits (this_instr, 25, 27) == 3 && bit (this_instr, 4) == 1)
1566 return 0;
1567
1568 /* Stores. */
1569 if (bit (this_instr, 20) == 0)
1570 return 0;
1571
1572 /* Loads. */
1573 if (bits (this_instr, 12, 15) == ARM_PC_REGNUM)
1574 return 1;
1575 else
1576 return 0;
1577
1578 case 0x4:
1579 /* Load/store multiple. */
1580 if (bit (this_instr, 20) == 1 && bit (this_instr, 15) == 1)
1581 return 1;
1582 else
1583 return 0;
1584
1585 case 0x5:
1586 /* Branch and branch with link. */
1587 return 1;
1588
1589 case 0x6:
1590 case 0x7:
1591 /* Coprocessor transfers or SWIs can not affect PC. */
1592 return 0;
1593
1594 default:
1595 internal_error (__FILE__, __LINE__, _("bad value in switch"));
1596 }
1597 }
1598
1599 /* Return 1 if the ARM instruction INSN restores SP in epilogue, 0
1600 otherwise. */
1601
1602 static int
1603 arm_instruction_restores_sp (unsigned int insn)
1604 {
1605 if (bits (insn, 28, 31) != INST_NV)
1606 {
1607 if ((insn & 0x0df0f000) == 0x0080d000
1608 /* ADD SP (register or immediate). */
1609 || (insn & 0x0df0f000) == 0x0040d000
1610 /* SUB SP (register or immediate). */
1611 || (insn & 0x0ffffff0) == 0x01a0d000
1612 /* MOV SP. */
1613 || (insn & 0x0fff0000) == 0x08bd0000
1614 /* POP (LDMIA). */
1615 || (insn & 0x0fff0000) == 0x049d0000)
1616 /* POP of a single register. */
1617 return 1;
1618 }
1619
1620 return 0;
1621 }
1622
1623 /* Analyze an ARM mode prologue starting at PROLOGUE_START and
1624 continuing no further than PROLOGUE_END. If CACHE is non-NULL,
1625 fill it in. Return the first address not recognized as a prologue
1626 instruction.
1627
1628 We recognize all the instructions typically found in ARM prologues,
1629 plus harmless instructions which can be skipped (either for analysis
1630 purposes, or a more restrictive set that can be skipped when finding
1631 the end of the prologue). */
1632
1633 static CORE_ADDR
1634 arm_analyze_prologue (struct gdbarch *gdbarch,
1635 CORE_ADDR prologue_start, CORE_ADDR prologue_end,
1636 struct arm_prologue_cache *cache)
1637 {
1638 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1639 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
1640 int regno;
1641 CORE_ADDR offset, current_pc;
1642 pv_t regs[ARM_FPS_REGNUM];
1643 struct pv_area *stack;
1644 struct cleanup *back_to;
1645 CORE_ADDR unrecognized_pc = 0;
1646
1647 /* Search the prologue looking for instructions that set up the
1648 frame pointer, adjust the stack pointer, and save registers.
1649
1650 Be careful, however, and if it doesn't look like a prologue,
1651 don't try to scan it. If, for instance, a frameless function
1652 begins with stmfd sp!, then we will tell ourselves there is
1653 a frame, which will confuse stack traceback, as well as "finish"
1654 and other operations that rely on a knowledge of the stack
1655 traceback. */
1656
1657 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1658 regs[regno] = pv_register (regno, 0);
1659 stack = make_pv_area (ARM_SP_REGNUM, gdbarch_addr_bit (gdbarch));
1660 back_to = make_cleanup_free_pv_area (stack);
1661
1662 for (current_pc = prologue_start;
1663 current_pc < prologue_end;
1664 current_pc += 4)
1665 {
1666 unsigned int insn
1667 = read_memory_unsigned_integer (current_pc, 4, byte_order_for_code);
1668
1669 if (insn == 0xe1a0c00d) /* mov ip, sp */
1670 {
1671 regs[ARM_IP_REGNUM] = regs[ARM_SP_REGNUM];
1672 continue;
1673 }
1674 else if ((insn & 0xfff00000) == 0xe2800000 /* add Rd, Rn, #n */
1675 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1676 {
1677 unsigned imm = insn & 0xff; /* immediate value */
1678 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1679 int rd = bits (insn, 12, 15);
1680 imm = (imm >> rot) | (imm << (32 - rot));
1681 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], imm);
1682 continue;
1683 }
1684 else if ((insn & 0xfff00000) == 0xe2400000 /* sub Rd, Rn, #n */
1685 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1686 {
1687 unsigned imm = insn & 0xff; /* immediate value */
1688 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1689 int rd = bits (insn, 12, 15);
1690 imm = (imm >> rot) | (imm << (32 - rot));
1691 regs[rd] = pv_add_constant (regs[bits (insn, 16, 19)], -imm);
1692 continue;
1693 }
1694 else if ((insn & 0xffff0fff) == 0xe52d0004) /* str Rd,
1695 [sp, #-4]! */
1696 {
1697 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1698 break;
1699 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1700 pv_area_store (stack, regs[ARM_SP_REGNUM], 4,
1701 regs[bits (insn, 12, 15)]);
1702 continue;
1703 }
1704 else if ((insn & 0xffff0000) == 0xe92d0000)
1705 /* stmfd sp!, {..., fp, ip, lr, pc}
1706 or
1707 stmfd sp!, {a1, a2, a3, a4} */
1708 {
1709 int mask = insn & 0xffff;
1710
1711 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1712 break;
1713
1714 /* Calculate offsets of saved registers. */
1715 for (regno = ARM_PC_REGNUM; regno >= 0; regno--)
1716 if (mask & (1 << regno))
1717 {
1718 regs[ARM_SP_REGNUM]
1719 = pv_add_constant (regs[ARM_SP_REGNUM], -4);
1720 pv_area_store (stack, regs[ARM_SP_REGNUM], 4, regs[regno]);
1721 }
1722 }
1723 else if ((insn & 0xffff0000) == 0xe54b0000 /* strb rx,[r11,#-n] */
1724 || (insn & 0xffff00f0) == 0xe14b00b0 /* strh rx,[r11,#-n] */
1725 || (insn & 0xffffc000) == 0xe50b0000) /* str rx,[r11,#-n] */
1726 {
1727 /* No need to add this to saved_regs -- it's just an arg reg. */
1728 continue;
1729 }
1730 else if ((insn & 0xffff0000) == 0xe5cd0000 /* strb rx,[sp,#n] */
1731 || (insn & 0xffff00f0) == 0xe1cd00b0 /* strh rx,[sp,#n] */
1732 || (insn & 0xffffc000) == 0xe58d0000) /* str rx,[sp,#n] */
1733 {
1734 /* No need to add this to saved_regs -- it's just an arg reg. */
1735 continue;
1736 }
1737 else if ((insn & 0xfff00000) == 0xe8800000 /* stm Rn,
1738 { registers } */
1739 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1740 {
1741 /* No need to add this to saved_regs -- it's just arg regs. */
1742 continue;
1743 }
1744 else if ((insn & 0xfffff000) == 0xe24cb000) /* sub fp, ip #n */
1745 {
1746 unsigned imm = insn & 0xff; /* immediate value */
1747 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1748 imm = (imm >> rot) | (imm << (32 - rot));
1749 regs[ARM_FP_REGNUM] = pv_add_constant (regs[ARM_IP_REGNUM], -imm);
1750 }
1751 else if ((insn & 0xfffff000) == 0xe24dd000) /* sub sp, sp #n */
1752 {
1753 unsigned imm = insn & 0xff; /* immediate value */
1754 unsigned rot = (insn & 0xf00) >> 7; /* rotate amount */
1755 imm = (imm >> rot) | (imm << (32 - rot));
1756 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -imm);
1757 }
1758 else if ((insn & 0xffff7fff) == 0xed6d0103 /* stfe f?,
1759 [sp, -#c]! */
1760 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1761 {
1762 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1763 break;
1764
1765 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1766 regno = ARM_F0_REGNUM + ((insn >> 12) & 0x07);
1767 pv_area_store (stack, regs[ARM_SP_REGNUM], 12, regs[regno]);
1768 }
1769 else if ((insn & 0xffbf0fff) == 0xec2d0200 /* sfmfd f0, 4,
1770 [sp!] */
1771 && gdbarch_tdep (gdbarch)->have_fpa_registers)
1772 {
1773 int n_saved_fp_regs;
1774 unsigned int fp_start_reg, fp_bound_reg;
1775
1776 if (pv_area_store_would_trash (stack, regs[ARM_SP_REGNUM]))
1777 break;
1778
1779 if ((insn & 0x800) == 0x800) /* N0 is set */
1780 {
1781 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1782 n_saved_fp_regs = 3;
1783 else
1784 n_saved_fp_regs = 1;
1785 }
1786 else
1787 {
1788 if ((insn & 0x40000) == 0x40000) /* N1 is set */
1789 n_saved_fp_regs = 2;
1790 else
1791 n_saved_fp_regs = 4;
1792 }
1793
1794 fp_start_reg = ARM_F0_REGNUM + ((insn >> 12) & 0x7);
1795 fp_bound_reg = fp_start_reg + n_saved_fp_regs;
1796 for (; fp_start_reg < fp_bound_reg; fp_start_reg++)
1797 {
1798 regs[ARM_SP_REGNUM] = pv_add_constant (regs[ARM_SP_REGNUM], -12);
1799 pv_area_store (stack, regs[ARM_SP_REGNUM], 12,
1800 regs[fp_start_reg++]);
1801 }
1802 }
1803 else if ((insn & 0xff000000) == 0xeb000000 && cache == NULL) /* bl */
1804 {
1805 /* Allow some special function calls when skipping the
1806 prologue; GCC generates these before storing arguments to
1807 the stack. */
1808 CORE_ADDR dest = BranchDest (current_pc, insn);
1809
1810 if (skip_prologue_function (gdbarch, dest, 0))
1811 continue;
1812 else
1813 break;
1814 }
1815 else if ((insn & 0xf0000000) != 0xe0000000)
1816 break; /* Condition not true, exit early. */
1817 else if (arm_instruction_changes_pc (insn))
1818 /* Don't scan past anything that might change control flow. */
1819 break;
1820 else if (arm_instruction_restores_sp (insn))
1821 {
1822 /* Don't scan past the epilogue. */
1823 break;
1824 }
1825 else if ((insn & 0xfe500000) == 0xe8100000 /* ldm */
1826 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1827 /* Ignore block loads from the stack, potentially copying
1828 parameters from memory. */
1829 continue;
1830 else if ((insn & 0xfc500000) == 0xe4100000
1831 && pv_is_register (regs[bits (insn, 16, 19)], ARM_SP_REGNUM))
1832 /* Similarly ignore single loads from the stack. */
1833 continue;
1834 else if ((insn & 0xffff0ff0) == 0xe1a00000)
1835 /* MOV Rd, Rm. Skip register copies, i.e. saves to another
1836 register instead of the stack. */
1837 continue;
1838 else
1839 {
1840 /* The optimizer might shove anything into the prologue, if
1841 we build up cache (cache != NULL) from scanning prologue,
1842 we just skip what we don't recognize and scan further to
1843 make cache as complete as possible. However, if we skip
1844 prologue, we'll stop immediately on unrecognized
1845 instruction. */
1846 unrecognized_pc = current_pc;
1847 if (cache != NULL)
1848 continue;
1849 else
1850 break;
1851 }
1852 }
1853
1854 if (unrecognized_pc == 0)
1855 unrecognized_pc = current_pc;
1856
1857 if (cache)
1858 {
1859 int framereg, framesize;
1860
1861 /* The frame size is just the distance from the frame register
1862 to the original stack pointer. */
1863 if (pv_is_register (regs[ARM_FP_REGNUM], ARM_SP_REGNUM))
1864 {
1865 /* Frame pointer is fp. */
1866 framereg = ARM_FP_REGNUM;
1867 framesize = -regs[ARM_FP_REGNUM].k;
1868 }
1869 else
1870 {
1871 /* Try the stack pointer... this is a bit desperate. */
1872 framereg = ARM_SP_REGNUM;
1873 framesize = -regs[ARM_SP_REGNUM].k;
1874 }
1875
1876 cache->framereg = framereg;
1877 cache->framesize = framesize;
1878
1879 for (regno = 0; regno < ARM_FPS_REGNUM; regno++)
1880 if (pv_area_find_reg (stack, gdbarch, regno, &offset))
1881 cache->saved_regs[regno].addr = offset;
1882 }
1883
1884 if (arm_debug)
1885 fprintf_unfiltered (gdb_stdlog, "Prologue scan stopped at %s\n",
1886 paddress (gdbarch, unrecognized_pc));
1887
1888 do_cleanups (back_to);
1889 return unrecognized_pc;
1890 }
1891
1892 static void
1893 arm_scan_prologue (struct frame_info *this_frame,
1894 struct arm_prologue_cache *cache)
1895 {
1896 struct gdbarch *gdbarch = get_frame_arch (this_frame);
1897 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
1898 int regno;
1899 CORE_ADDR prologue_start, prologue_end, current_pc;
1900 CORE_ADDR prev_pc = get_frame_pc (this_frame);
1901 CORE_ADDR block_addr = get_frame_address_in_block (this_frame);
1902 pv_t regs[ARM_FPS_REGNUM];
1903 struct pv_area *stack;
1904 struct cleanup *back_to;
1905 CORE_ADDR offset;
1906
1907 /* Assume there is no frame until proven otherwise. */
1908 cache->framereg = ARM_SP_REGNUM;
1909 cache->framesize = 0;
1910
1911 /* Check for Thumb prologue. */
1912 if (arm_frame_is_thumb (this_frame))
1913 {
1914 thumb_scan_prologue (gdbarch, prev_pc, block_addr, cache);
1915 return;
1916 }
1917
1918 /* Find the function prologue. If we can't find the function in
1919 the symbol table, peek in the stack frame to find the PC. */
1920 if (find_pc_partial_function (block_addr, NULL, &prologue_start,
1921 &prologue_end))
1922 {
1923 /* One way to find the end of the prologue (which works well
1924 for unoptimized code) is to do the following:
1925
1926 struct symtab_and_line sal = find_pc_line (prologue_start, 0);
1927
1928 if (sal.line == 0)
1929 prologue_end = prev_pc;
1930 else if (sal.end < prologue_end)
1931 prologue_end = sal.end;
1932
1933 This mechanism is very accurate so long as the optimizer
1934 doesn't move any instructions from the function body into the
1935 prologue. If this happens, sal.end will be the last
1936 instruction in the first hunk of prologue code just before
1937 the first instruction that the scheduler has moved from
1938 the body to the prologue.
1939
1940 In order to make sure that we scan all of the prologue
1941 instructions, we use a slightly less accurate mechanism which
1942 may scan more than necessary. To help compensate for this
1943 lack of accuracy, the prologue scanning loop below contains
1944 several clauses which'll cause the loop to terminate early if
1945 an implausible prologue instruction is encountered.
1946
1947 The expression
1948
1949 prologue_start + 64
1950
1951 is a suitable endpoint since it accounts for the largest
1952 possible prologue plus up to five instructions inserted by
1953 the scheduler. */
1954
1955 if (prologue_end > prologue_start + 64)
1956 {
1957 prologue_end = prologue_start + 64; /* See above. */
1958 }
1959 }
1960 else
1961 {
1962 /* We have no symbol information. Our only option is to assume this
1963 function has a standard stack frame and the normal frame register.
1964 Then, we can find the value of our frame pointer on entrance to
1965 the callee (or at the present moment if this is the innermost frame).
1966 The value stored there should be the address of the stmfd + 8. */
1967 CORE_ADDR frame_loc;
1968 LONGEST return_value;
1969
1970 frame_loc = get_frame_register_unsigned (this_frame, ARM_FP_REGNUM);
1971 if (!safe_read_memory_integer (frame_loc, 4, byte_order, &return_value))
1972 return;
1973 else
1974 {
1975 prologue_start = gdbarch_addr_bits_remove
1976 (gdbarch, return_value) - 8;
1977 prologue_end = prologue_start + 64; /* See above. */
1978 }
1979 }
1980
1981 if (prev_pc < prologue_end)
1982 prologue_end = prev_pc;
1983
1984 arm_analyze_prologue (gdbarch, prologue_start, prologue_end, cache);
1985 }
1986
1987 static struct arm_prologue_cache *
1988 arm_make_prologue_cache (struct frame_info *this_frame)
1989 {
1990 int reg;
1991 struct arm_prologue_cache *cache;
1992 CORE_ADDR unwound_fp;
1993
1994 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
1995 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
1996
1997 arm_scan_prologue (this_frame, cache);
1998
1999 unwound_fp = get_frame_register_unsigned (this_frame, cache->framereg);
2000 if (unwound_fp == 0)
2001 return cache;
2002
2003 cache->prev_sp = unwound_fp + cache->framesize;
2004
2005 /* Calculate actual addresses of saved registers using offsets
2006 determined by arm_scan_prologue. */
2007 for (reg = 0; reg < gdbarch_num_regs (get_frame_arch (this_frame)); reg++)
2008 if (trad_frame_addr_p (cache->saved_regs, reg))
2009 cache->saved_regs[reg].addr += cache->prev_sp;
2010
2011 return cache;
2012 }
2013
2014 /* Implementation of the stop_reason hook for arm_prologue frames. */
2015
2016 static enum unwind_stop_reason
2017 arm_prologue_unwind_stop_reason (struct frame_info *this_frame,
2018 void **this_cache)
2019 {
2020 struct arm_prologue_cache *cache;
2021 CORE_ADDR pc;
2022
2023 if (*this_cache == NULL)
2024 *this_cache = arm_make_prologue_cache (this_frame);
2025 cache = (struct arm_prologue_cache *) *this_cache;
2026
2027 /* This is meant to halt the backtrace at "_start". */
2028 pc = get_frame_pc (this_frame);
2029 if (pc <= gdbarch_tdep (get_frame_arch (this_frame))->lowest_pc)
2030 return UNWIND_OUTERMOST;
2031
2032 /* If we've hit a wall, stop. */
2033 if (cache->prev_sp == 0)
2034 return UNWIND_OUTERMOST;
2035
2036 return UNWIND_NO_REASON;
2037 }
2038
2039 /* Our frame ID for a normal frame is the current function's starting PC
2040 and the caller's SP when we were called. */
2041
2042 static void
2043 arm_prologue_this_id (struct frame_info *this_frame,
2044 void **this_cache,
2045 struct frame_id *this_id)
2046 {
2047 struct arm_prologue_cache *cache;
2048 struct frame_id id;
2049 CORE_ADDR pc, func;
2050
2051 if (*this_cache == NULL)
2052 *this_cache = arm_make_prologue_cache (this_frame);
2053 cache = (struct arm_prologue_cache *) *this_cache;
2054
2055 /* Use function start address as part of the frame ID. If we cannot
2056 identify the start address (due to missing symbol information),
2057 fall back to just using the current PC. */
2058 pc = get_frame_pc (this_frame);
2059 func = get_frame_func (this_frame);
2060 if (!func)
2061 func = pc;
2062
2063 id = frame_id_build (cache->prev_sp, func);
2064 *this_id = id;
2065 }
2066
2067 static struct value *
2068 arm_prologue_prev_register (struct frame_info *this_frame,
2069 void **this_cache,
2070 int prev_regnum)
2071 {
2072 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2073 struct arm_prologue_cache *cache;
2074
2075 if (*this_cache == NULL)
2076 *this_cache = arm_make_prologue_cache (this_frame);
2077 cache = (struct arm_prologue_cache *) *this_cache;
2078
2079 /* If we are asked to unwind the PC, then we need to return the LR
2080 instead. The prologue may save PC, but it will point into this
2081 frame's prologue, not the next frame's resume location. Also
2082 strip the saved T bit. A valid LR may have the low bit set, but
2083 a valid PC never does. */
2084 if (prev_regnum == ARM_PC_REGNUM)
2085 {
2086 CORE_ADDR lr;
2087
2088 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2089 return frame_unwind_got_constant (this_frame, prev_regnum,
2090 arm_addr_bits_remove (gdbarch, lr));
2091 }
2092
2093 /* SP is generally not saved to the stack, but this frame is
2094 identified by the next frame's stack pointer at the time of the call.
2095 The value was already reconstructed into PREV_SP. */
2096 if (prev_regnum == ARM_SP_REGNUM)
2097 return frame_unwind_got_constant (this_frame, prev_regnum, cache->prev_sp);
2098
2099 /* The CPSR may have been changed by the call instruction and by the
2100 called function. The only bit we can reconstruct is the T bit,
2101 by checking the low bit of LR as of the call. This is a reliable
2102 indicator of Thumb-ness except for some ARM v4T pre-interworking
2103 Thumb code, which could get away with a clear low bit as long as
2104 the called function did not use bx. Guess that all other
2105 bits are unchanged; the condition flags are presumably lost,
2106 but the processor status is likely valid. */
2107 if (prev_regnum == ARM_PS_REGNUM)
2108 {
2109 CORE_ADDR lr, cpsr;
2110 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
2111
2112 cpsr = get_frame_register_unsigned (this_frame, prev_regnum);
2113 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
2114 if (IS_THUMB_ADDR (lr))
2115 cpsr |= t_bit;
2116 else
2117 cpsr &= ~t_bit;
2118 return frame_unwind_got_constant (this_frame, prev_regnum, cpsr);
2119 }
2120
2121 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
2122 prev_regnum);
2123 }
2124
2125 struct frame_unwind arm_prologue_unwind = {
2126 NORMAL_FRAME,
2127 arm_prologue_unwind_stop_reason,
2128 arm_prologue_this_id,
2129 arm_prologue_prev_register,
2130 NULL,
2131 default_frame_sniffer
2132 };
2133
2134 /* Maintain a list of ARM exception table entries per objfile, similar to the
2135 list of mapping symbols. We only cache entries for standard ARM-defined
2136 personality routines; the cache will contain only the frame unwinding
2137 instructions associated with the entry (not the descriptors). */
2138
2139 static const struct objfile_data *arm_exidx_data_key;
2140
2141 struct arm_exidx_entry
2142 {
2143 bfd_vma addr;
2144 gdb_byte *entry;
2145 };
2146 typedef struct arm_exidx_entry arm_exidx_entry_s;
2147 DEF_VEC_O(arm_exidx_entry_s);
2148
2149 struct arm_exidx_data
2150 {
2151 VEC(arm_exidx_entry_s) **section_maps;
2152 };
2153
2154 static void
2155 arm_exidx_data_free (struct objfile *objfile, void *arg)
2156 {
2157 struct arm_exidx_data *data = (struct arm_exidx_data *) arg;
2158 unsigned int i;
2159
2160 for (i = 0; i < objfile->obfd->section_count; i++)
2161 VEC_free (arm_exidx_entry_s, data->section_maps[i]);
2162 }
2163
2164 static inline int
2165 arm_compare_exidx_entries (const struct arm_exidx_entry *lhs,
2166 const struct arm_exidx_entry *rhs)
2167 {
2168 return lhs->addr < rhs->addr;
2169 }
2170
2171 static struct obj_section *
2172 arm_obj_section_from_vma (struct objfile *objfile, bfd_vma vma)
2173 {
2174 struct obj_section *osect;
2175
2176 ALL_OBJFILE_OSECTIONS (objfile, osect)
2177 if (bfd_get_section_flags (objfile->obfd,
2178 osect->the_bfd_section) & SEC_ALLOC)
2179 {
2180 bfd_vma start, size;
2181 start = bfd_get_section_vma (objfile->obfd, osect->the_bfd_section);
2182 size = bfd_get_section_size (osect->the_bfd_section);
2183
2184 if (start <= vma && vma < start + size)
2185 return osect;
2186 }
2187
2188 return NULL;
2189 }
2190
2191 /* Parse contents of exception table and exception index sections
2192 of OBJFILE, and fill in the exception table entry cache.
2193
2194 For each entry that refers to a standard ARM-defined personality
2195 routine, extract the frame unwinding instructions (from either
2196 the index or the table section). The unwinding instructions
2197 are normalized by:
2198 - extracting them from the rest of the table data
2199 - converting to host endianness
2200 - appending the implicit 0xb0 ("Finish") code
2201
2202 The extracted and normalized instructions are stored for later
2203 retrieval by the arm_find_exidx_entry routine. */
2204
2205 static void
2206 arm_exidx_new_objfile (struct objfile *objfile)
2207 {
2208 struct cleanup *cleanups;
2209 struct arm_exidx_data *data;
2210 asection *exidx, *extab;
2211 bfd_vma exidx_vma = 0, extab_vma = 0;
2212 bfd_size_type exidx_size = 0, extab_size = 0;
2213 gdb_byte *exidx_data = NULL, *extab_data = NULL;
2214 LONGEST i;
2215
2216 /* If we've already touched this file, do nothing. */
2217 if (!objfile || objfile_data (objfile, arm_exidx_data_key) != NULL)
2218 return;
2219 cleanups = make_cleanup (null_cleanup, NULL);
2220
2221 /* Read contents of exception table and index. */
2222 exidx = bfd_get_section_by_name (objfile->obfd, ELF_STRING_ARM_unwind);
2223 if (exidx)
2224 {
2225 exidx_vma = bfd_section_vma (objfile->obfd, exidx);
2226 exidx_size = bfd_get_section_size (exidx);
2227 exidx_data = (gdb_byte *) xmalloc (exidx_size);
2228 make_cleanup (xfree, exidx_data);
2229
2230 if (!bfd_get_section_contents (objfile->obfd, exidx,
2231 exidx_data, 0, exidx_size))
2232 {
2233 do_cleanups (cleanups);
2234 return;
2235 }
2236 }
2237
2238 extab = bfd_get_section_by_name (objfile->obfd, ".ARM.extab");
2239 if (extab)
2240 {
2241 extab_vma = bfd_section_vma (objfile->obfd, extab);
2242 extab_size = bfd_get_section_size (extab);
2243 extab_data = (gdb_byte *) xmalloc (extab_size);
2244 make_cleanup (xfree, extab_data);
2245
2246 if (!bfd_get_section_contents (objfile->obfd, extab,
2247 extab_data, 0, extab_size))
2248 {
2249 do_cleanups (cleanups);
2250 return;
2251 }
2252 }
2253
2254 /* Allocate exception table data structure. */
2255 data = OBSTACK_ZALLOC (&objfile->objfile_obstack, struct arm_exidx_data);
2256 set_objfile_data (objfile, arm_exidx_data_key, data);
2257 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
2258 objfile->obfd->section_count,
2259 VEC(arm_exidx_entry_s) *);
2260
2261 /* Fill in exception table. */
2262 for (i = 0; i < exidx_size / 8; i++)
2263 {
2264 struct arm_exidx_entry new_exidx_entry;
2265 bfd_vma idx = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8);
2266 bfd_vma val = bfd_h_get_32 (objfile->obfd, exidx_data + i * 8 + 4);
2267 bfd_vma addr = 0, word = 0;
2268 int n_bytes = 0, n_words = 0;
2269 struct obj_section *sec;
2270 gdb_byte *entry = NULL;
2271
2272 /* Extract address of start of function. */
2273 idx = ((idx & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2274 idx += exidx_vma + i * 8;
2275
2276 /* Find section containing function and compute section offset. */
2277 sec = arm_obj_section_from_vma (objfile, idx);
2278 if (sec == NULL)
2279 continue;
2280 idx -= bfd_get_section_vma (objfile->obfd, sec->the_bfd_section);
2281
2282 /* Determine address of exception table entry. */
2283 if (val == 1)
2284 {
2285 /* EXIDX_CANTUNWIND -- no exception table entry present. */
2286 }
2287 else if ((val & 0xff000000) == 0x80000000)
2288 {
2289 /* Exception table entry embedded in .ARM.exidx
2290 -- must be short form. */
2291 word = val;
2292 n_bytes = 3;
2293 }
2294 else if (!(val & 0x80000000))
2295 {
2296 /* Exception table entry in .ARM.extab. */
2297 addr = ((val & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2298 addr += exidx_vma + i * 8 + 4;
2299
2300 if (addr >= extab_vma && addr + 4 <= extab_vma + extab_size)
2301 {
2302 word = bfd_h_get_32 (objfile->obfd,
2303 extab_data + addr - extab_vma);
2304 addr += 4;
2305
2306 if ((word & 0xff000000) == 0x80000000)
2307 {
2308 /* Short form. */
2309 n_bytes = 3;
2310 }
2311 else if ((word & 0xff000000) == 0x81000000
2312 || (word & 0xff000000) == 0x82000000)
2313 {
2314 /* Long form. */
2315 n_bytes = 2;
2316 n_words = ((word >> 16) & 0xff);
2317 }
2318 else if (!(word & 0x80000000))
2319 {
2320 bfd_vma pers;
2321 struct obj_section *pers_sec;
2322 int gnu_personality = 0;
2323
2324 /* Custom personality routine. */
2325 pers = ((word & 0x7fffffff) ^ 0x40000000) - 0x40000000;
2326 pers = UNMAKE_THUMB_ADDR (pers + addr - 4);
2327
2328 /* Check whether we've got one of the variants of the
2329 GNU personality routines. */
2330 pers_sec = arm_obj_section_from_vma (objfile, pers);
2331 if (pers_sec)
2332 {
2333 static const char *personality[] =
2334 {
2335 "__gcc_personality_v0",
2336 "__gxx_personality_v0",
2337 "__gcj_personality_v0",
2338 "__gnu_objc_personality_v0",
2339 NULL
2340 };
2341
2342 CORE_ADDR pc = pers + obj_section_offset (pers_sec);
2343 int k;
2344
2345 for (k = 0; personality[k]; k++)
2346 if (lookup_minimal_symbol_by_pc_name
2347 (pc, personality[k], objfile))
2348 {
2349 gnu_personality = 1;
2350 break;
2351 }
2352 }
2353
2354 /* If so, the next word contains a word count in the high
2355 byte, followed by the same unwind instructions as the
2356 pre-defined forms. */
2357 if (gnu_personality
2358 && addr + 4 <= extab_vma + extab_size)
2359 {
2360 word = bfd_h_get_32 (objfile->obfd,
2361 extab_data + addr - extab_vma);
2362 addr += 4;
2363 n_bytes = 3;
2364 n_words = ((word >> 24) & 0xff);
2365 }
2366 }
2367 }
2368 }
2369
2370 /* Sanity check address. */
2371 if (n_words)
2372 if (addr < extab_vma || addr + 4 * n_words > extab_vma + extab_size)
2373 n_words = n_bytes = 0;
2374
2375 /* The unwind instructions reside in WORD (only the N_BYTES least
2376 significant bytes are valid), followed by N_WORDS words in the
2377 extab section starting at ADDR. */
2378 if (n_bytes || n_words)
2379 {
2380 gdb_byte *p = entry
2381 = (gdb_byte *) obstack_alloc (&objfile->objfile_obstack,
2382 n_bytes + n_words * 4 + 1);
2383
2384 while (n_bytes--)
2385 *p++ = (gdb_byte) ((word >> (8 * n_bytes)) & 0xff);
2386
2387 while (n_words--)
2388 {
2389 word = bfd_h_get_32 (objfile->obfd,
2390 extab_data + addr - extab_vma);
2391 addr += 4;
2392
2393 *p++ = (gdb_byte) ((word >> 24) & 0xff);
2394 *p++ = (gdb_byte) ((word >> 16) & 0xff);
2395 *p++ = (gdb_byte) ((word >> 8) & 0xff);
2396 *p++ = (gdb_byte) (word & 0xff);
2397 }
2398
2399 /* Implied "Finish" to terminate the list. */
2400 *p++ = 0xb0;
2401 }
2402
2403 /* Push entry onto vector. They are guaranteed to always
2404 appear in order of increasing addresses. */
2405 new_exidx_entry.addr = idx;
2406 new_exidx_entry.entry = entry;
2407 VEC_safe_push (arm_exidx_entry_s,
2408 data->section_maps[sec->the_bfd_section->index],
2409 &new_exidx_entry);
2410 }
2411
2412 do_cleanups (cleanups);
2413 }
2414
2415 /* Search for the exception table entry covering MEMADDR. If one is found,
2416 return a pointer to its data. Otherwise, return 0. If START is non-NULL,
2417 set *START to the start of the region covered by this entry. */
2418
2419 static gdb_byte *
2420 arm_find_exidx_entry (CORE_ADDR memaddr, CORE_ADDR *start)
2421 {
2422 struct obj_section *sec;
2423
2424 sec = find_pc_section (memaddr);
2425 if (sec != NULL)
2426 {
2427 struct arm_exidx_data *data;
2428 VEC(arm_exidx_entry_s) *map;
2429 struct arm_exidx_entry map_key = { memaddr - obj_section_addr (sec), 0 };
2430 unsigned int idx;
2431
2432 data = ((struct arm_exidx_data *)
2433 objfile_data (sec->objfile, arm_exidx_data_key));
2434 if (data != NULL)
2435 {
2436 map = data->section_maps[sec->the_bfd_section->index];
2437 if (!VEC_empty (arm_exidx_entry_s, map))
2438 {
2439 struct arm_exidx_entry *map_sym;
2440
2441 idx = VEC_lower_bound (arm_exidx_entry_s, map, &map_key,
2442 arm_compare_exidx_entries);
2443
2444 /* VEC_lower_bound finds the earliest ordered insertion
2445 point. If the following symbol starts at this exact
2446 address, we use that; otherwise, the preceding
2447 exception table entry covers this address. */
2448 if (idx < VEC_length (arm_exidx_entry_s, map))
2449 {
2450 map_sym = VEC_index (arm_exidx_entry_s, map, idx);
2451 if (map_sym->addr == map_key.addr)
2452 {
2453 if (start)
2454 *start = map_sym->addr + obj_section_addr (sec);
2455 return map_sym->entry;
2456 }
2457 }
2458
2459 if (idx > 0)
2460 {
2461 map_sym = VEC_index (arm_exidx_entry_s, map, idx - 1);
2462 if (start)
2463 *start = map_sym->addr + obj_section_addr (sec);
2464 return map_sym->entry;
2465 }
2466 }
2467 }
2468 }
2469
2470 return NULL;
2471 }
2472
2473 /* Given the current frame THIS_FRAME, and its associated frame unwinding
2474 instruction list from the ARM exception table entry ENTRY, allocate and
2475 return a prologue cache structure describing how to unwind this frame.
2476
2477 Return NULL if the unwinding instruction list contains a "spare",
2478 "reserved" or "refuse to unwind" instruction as defined in section
2479 "9.3 Frame unwinding instructions" of the "Exception Handling ABI
2480 for the ARM Architecture" document. */
2481
2482 static struct arm_prologue_cache *
2483 arm_exidx_fill_cache (struct frame_info *this_frame, gdb_byte *entry)
2484 {
2485 CORE_ADDR vsp = 0;
2486 int vsp_valid = 0;
2487
2488 struct arm_prologue_cache *cache;
2489 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2490 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2491
2492 for (;;)
2493 {
2494 gdb_byte insn;
2495
2496 /* Whenever we reload SP, we actually have to retrieve its
2497 actual value in the current frame. */
2498 if (!vsp_valid)
2499 {
2500 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2501 {
2502 int reg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2503 vsp = get_frame_register_unsigned (this_frame, reg);
2504 }
2505 else
2506 {
2507 CORE_ADDR addr = cache->saved_regs[ARM_SP_REGNUM].addr;
2508 vsp = get_frame_memory_unsigned (this_frame, addr, 4);
2509 }
2510
2511 vsp_valid = 1;
2512 }
2513
2514 /* Decode next unwind instruction. */
2515 insn = *entry++;
2516
2517 if ((insn & 0xc0) == 0)
2518 {
2519 int offset = insn & 0x3f;
2520 vsp += (offset << 2) + 4;
2521 }
2522 else if ((insn & 0xc0) == 0x40)
2523 {
2524 int offset = insn & 0x3f;
2525 vsp -= (offset << 2) + 4;
2526 }
2527 else if ((insn & 0xf0) == 0x80)
2528 {
2529 int mask = ((insn & 0xf) << 8) | *entry++;
2530 int i;
2531
2532 /* The special case of an all-zero mask identifies
2533 "Refuse to unwind". We return NULL to fall back
2534 to the prologue analyzer. */
2535 if (mask == 0)
2536 return NULL;
2537
2538 /* Pop registers r4..r15 under mask. */
2539 for (i = 0; i < 12; i++)
2540 if (mask & (1 << i))
2541 {
2542 cache->saved_regs[4 + i].addr = vsp;
2543 vsp += 4;
2544 }
2545
2546 /* Special-case popping SP -- we need to reload vsp. */
2547 if (mask & (1 << (ARM_SP_REGNUM - 4)))
2548 vsp_valid = 0;
2549 }
2550 else if ((insn & 0xf0) == 0x90)
2551 {
2552 int reg = insn & 0xf;
2553
2554 /* Reserved cases. */
2555 if (reg == ARM_SP_REGNUM || reg == ARM_PC_REGNUM)
2556 return NULL;
2557
2558 /* Set SP from another register and mark VSP for reload. */
2559 cache->saved_regs[ARM_SP_REGNUM] = cache->saved_regs[reg];
2560 vsp_valid = 0;
2561 }
2562 else if ((insn & 0xf0) == 0xa0)
2563 {
2564 int count = insn & 0x7;
2565 int pop_lr = (insn & 0x8) != 0;
2566 int i;
2567
2568 /* Pop r4..r[4+count]. */
2569 for (i = 0; i <= count; i++)
2570 {
2571 cache->saved_regs[4 + i].addr = vsp;
2572 vsp += 4;
2573 }
2574
2575 /* If indicated by flag, pop LR as well. */
2576 if (pop_lr)
2577 {
2578 cache->saved_regs[ARM_LR_REGNUM].addr = vsp;
2579 vsp += 4;
2580 }
2581 }
2582 else if (insn == 0xb0)
2583 {
2584 /* We could only have updated PC by popping into it; if so, it
2585 will show up as address. Otherwise, copy LR into PC. */
2586 if (!trad_frame_addr_p (cache->saved_regs, ARM_PC_REGNUM))
2587 cache->saved_regs[ARM_PC_REGNUM]
2588 = cache->saved_regs[ARM_LR_REGNUM];
2589
2590 /* We're done. */
2591 break;
2592 }
2593 else if (insn == 0xb1)
2594 {
2595 int mask = *entry++;
2596 int i;
2597
2598 /* All-zero mask and mask >= 16 is "spare". */
2599 if (mask == 0 || mask >= 16)
2600 return NULL;
2601
2602 /* Pop r0..r3 under mask. */
2603 for (i = 0; i < 4; i++)
2604 if (mask & (1 << i))
2605 {
2606 cache->saved_regs[i].addr = vsp;
2607 vsp += 4;
2608 }
2609 }
2610 else if (insn == 0xb2)
2611 {
2612 ULONGEST offset = 0;
2613 unsigned shift = 0;
2614
2615 do
2616 {
2617 offset |= (*entry & 0x7f) << shift;
2618 shift += 7;
2619 }
2620 while (*entry++ & 0x80);
2621
2622 vsp += 0x204 + (offset << 2);
2623 }
2624 else if (insn == 0xb3)
2625 {
2626 int start = *entry >> 4;
2627 int count = (*entry++) & 0xf;
2628 int i;
2629
2630 /* Only registers D0..D15 are valid here. */
2631 if (start + count >= 16)
2632 return NULL;
2633
2634 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2635 for (i = 0; i <= count; i++)
2636 {
2637 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2638 vsp += 8;
2639 }
2640
2641 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2642 vsp += 4;
2643 }
2644 else if ((insn & 0xf8) == 0xb8)
2645 {
2646 int count = insn & 0x7;
2647 int i;
2648
2649 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2650 for (i = 0; i <= count; i++)
2651 {
2652 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2653 vsp += 8;
2654 }
2655
2656 /* Add an extra 4 bytes for FSTMFDX-style stack. */
2657 vsp += 4;
2658 }
2659 else if (insn == 0xc6)
2660 {
2661 int start = *entry >> 4;
2662 int count = (*entry++) & 0xf;
2663 int i;
2664
2665 /* Only registers WR0..WR15 are valid. */
2666 if (start + count >= 16)
2667 return NULL;
2668
2669 /* Pop iwmmx registers WR[start]..WR[start+count]. */
2670 for (i = 0; i <= count; i++)
2671 {
2672 cache->saved_regs[ARM_WR0_REGNUM + start + i].addr = vsp;
2673 vsp += 8;
2674 }
2675 }
2676 else if (insn == 0xc7)
2677 {
2678 int mask = *entry++;
2679 int i;
2680
2681 /* All-zero mask and mask >= 16 is "spare". */
2682 if (mask == 0 || mask >= 16)
2683 return NULL;
2684
2685 /* Pop iwmmx general-purpose registers WCGR0..WCGR3 under mask. */
2686 for (i = 0; i < 4; i++)
2687 if (mask & (1 << i))
2688 {
2689 cache->saved_regs[ARM_WCGR0_REGNUM + i].addr = vsp;
2690 vsp += 4;
2691 }
2692 }
2693 else if ((insn & 0xf8) == 0xc0)
2694 {
2695 int count = insn & 0x7;
2696 int i;
2697
2698 /* Pop iwmmx registers WR[10]..WR[10+count]. */
2699 for (i = 0; i <= count; i++)
2700 {
2701 cache->saved_regs[ARM_WR0_REGNUM + 10 + i].addr = vsp;
2702 vsp += 8;
2703 }
2704 }
2705 else if (insn == 0xc8)
2706 {
2707 int start = *entry >> 4;
2708 int count = (*entry++) & 0xf;
2709 int i;
2710
2711 /* Only registers D0..D31 are valid. */
2712 if (start + count >= 16)
2713 return NULL;
2714
2715 /* Pop VFP double-precision registers
2716 D[16+start]..D[16+start+count]. */
2717 for (i = 0; i <= count; i++)
2718 {
2719 cache->saved_regs[ARM_D0_REGNUM + 16 + start + i].addr = vsp;
2720 vsp += 8;
2721 }
2722 }
2723 else if (insn == 0xc9)
2724 {
2725 int start = *entry >> 4;
2726 int count = (*entry++) & 0xf;
2727 int i;
2728
2729 /* Pop VFP double-precision registers D[start]..D[start+count]. */
2730 for (i = 0; i <= count; i++)
2731 {
2732 cache->saved_regs[ARM_D0_REGNUM + start + i].addr = vsp;
2733 vsp += 8;
2734 }
2735 }
2736 else if ((insn & 0xf8) == 0xd0)
2737 {
2738 int count = insn & 0x7;
2739 int i;
2740
2741 /* Pop VFP double-precision registers D[8]..D[8+count]. */
2742 for (i = 0; i <= count; i++)
2743 {
2744 cache->saved_regs[ARM_D0_REGNUM + 8 + i].addr = vsp;
2745 vsp += 8;
2746 }
2747 }
2748 else
2749 {
2750 /* Everything else is "spare". */
2751 return NULL;
2752 }
2753 }
2754
2755 /* If we restore SP from a register, assume this was the frame register.
2756 Otherwise just fall back to SP as frame register. */
2757 if (trad_frame_realreg_p (cache->saved_regs, ARM_SP_REGNUM))
2758 cache->framereg = cache->saved_regs[ARM_SP_REGNUM].realreg;
2759 else
2760 cache->framereg = ARM_SP_REGNUM;
2761
2762 /* Determine offset to previous frame. */
2763 cache->framesize
2764 = vsp - get_frame_register_unsigned (this_frame, cache->framereg);
2765
2766 /* We already got the previous SP. */
2767 cache->prev_sp = vsp;
2768
2769 return cache;
2770 }
2771
2772 /* Unwinding via ARM exception table entries. Note that the sniffer
2773 already computes a filled-in prologue cache, which is then used
2774 with the same arm_prologue_this_id and arm_prologue_prev_register
2775 routines also used for prologue-parsing based unwinding. */
2776
2777 static int
2778 arm_exidx_unwind_sniffer (const struct frame_unwind *self,
2779 struct frame_info *this_frame,
2780 void **this_prologue_cache)
2781 {
2782 struct gdbarch *gdbarch = get_frame_arch (this_frame);
2783 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
2784 CORE_ADDR addr_in_block, exidx_region, func_start;
2785 struct arm_prologue_cache *cache;
2786 gdb_byte *entry;
2787
2788 /* See if we have an ARM exception table entry covering this address. */
2789 addr_in_block = get_frame_address_in_block (this_frame);
2790 entry = arm_find_exidx_entry (addr_in_block, &exidx_region);
2791 if (!entry)
2792 return 0;
2793
2794 /* The ARM exception table does not describe unwind information
2795 for arbitrary PC values, but is guaranteed to be correct only
2796 at call sites. We have to decide here whether we want to use
2797 ARM exception table information for this frame, or fall back
2798 to using prologue parsing. (Note that if we have DWARF CFI,
2799 this sniffer isn't even called -- CFI is always preferred.)
2800
2801 Before we make this decision, however, we check whether we
2802 actually have *symbol* information for the current frame.
2803 If not, prologue parsing would not work anyway, so we might
2804 as well use the exception table and hope for the best. */
2805 if (find_pc_partial_function (addr_in_block, NULL, &func_start, NULL))
2806 {
2807 int exc_valid = 0;
2808
2809 /* If the next frame is "normal", we are at a call site in this
2810 frame, so exception information is guaranteed to be valid. */
2811 if (get_next_frame (this_frame)
2812 && get_frame_type (get_next_frame (this_frame)) == NORMAL_FRAME)
2813 exc_valid = 1;
2814
2815 /* We also assume exception information is valid if we're currently
2816 blocked in a system call. The system library is supposed to
2817 ensure this, so that e.g. pthread cancellation works. */
2818 if (arm_frame_is_thumb (this_frame))
2819 {
2820 LONGEST insn;
2821
2822 if (safe_read_memory_integer (get_frame_pc (this_frame) - 2, 2,
2823 byte_order_for_code, &insn)
2824 && (insn & 0xff00) == 0xdf00 /* svc */)
2825 exc_valid = 1;
2826 }
2827 else
2828 {
2829 LONGEST insn;
2830
2831 if (safe_read_memory_integer (get_frame_pc (this_frame) - 4, 4,
2832 byte_order_for_code, &insn)
2833 && (insn & 0x0f000000) == 0x0f000000 /* svc */)
2834 exc_valid = 1;
2835 }
2836
2837 /* Bail out if we don't know that exception information is valid. */
2838 if (!exc_valid)
2839 return 0;
2840
2841 /* The ARM exception index does not mark the *end* of the region
2842 covered by the entry, and some functions will not have any entry.
2843 To correctly recognize the end of the covered region, the linker
2844 should have inserted dummy records with a CANTUNWIND marker.
2845
2846 Unfortunately, current versions of GNU ld do not reliably do
2847 this, and thus we may have found an incorrect entry above.
2848 As a (temporary) sanity check, we only use the entry if it
2849 lies *within* the bounds of the function. Note that this check
2850 might reject perfectly valid entries that just happen to cover
2851 multiple functions; therefore this check ought to be removed
2852 once the linker is fixed. */
2853 if (func_start > exidx_region)
2854 return 0;
2855 }
2856
2857 /* Decode the list of unwinding instructions into a prologue cache.
2858 Note that this may fail due to e.g. a "refuse to unwind" code. */
2859 cache = arm_exidx_fill_cache (this_frame, entry);
2860 if (!cache)
2861 return 0;
2862
2863 *this_prologue_cache = cache;
2864 return 1;
2865 }
2866
2867 struct frame_unwind arm_exidx_unwind = {
2868 NORMAL_FRAME,
2869 default_frame_unwind_stop_reason,
2870 arm_prologue_this_id,
2871 arm_prologue_prev_register,
2872 NULL,
2873 arm_exidx_unwind_sniffer
2874 };
2875
2876 /* Recognize GCC's trampoline for thumb call-indirect. If we are in a
2877 trampoline, return the target PC. Otherwise return 0.
2878
2879 void call0a (char c, short s, int i, long l) {}
2880
2881 int main (void)
2882 {
2883 (*pointer_to_call0a) (c, s, i, l);
2884 }
2885
2886 Instead of calling a stub library function _call_via_xx (xx is
2887 the register name), GCC may inline the trampoline in the object
2888 file as below (register r2 has the address of call0a).
2889
2890 .global main
2891 .type main, %function
2892 ...
2893 bl .L1
2894 ...
2895 .size main, .-main
2896
2897 .L1:
2898 bx r2
2899
2900 The trampoline 'bx r2' doesn't belong to main. */
2901
2902 static CORE_ADDR
2903 arm_skip_bx_reg (struct frame_info *frame, CORE_ADDR pc)
2904 {
2905 /* The heuristics of recognizing such trampoline is that FRAME is
2906 executing in Thumb mode and the instruction on PC is 'bx Rm'. */
2907 if (arm_frame_is_thumb (frame))
2908 {
2909 gdb_byte buf[2];
2910
2911 if (target_read_memory (pc, buf, 2) == 0)
2912 {
2913 struct gdbarch *gdbarch = get_frame_arch (frame);
2914 enum bfd_endian byte_order_for_code
2915 = gdbarch_byte_order_for_code (gdbarch);
2916 uint16_t insn
2917 = extract_unsigned_integer (buf, 2, byte_order_for_code);
2918
2919 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
2920 {
2921 CORE_ADDR dest
2922 = get_frame_register_unsigned (frame, bits (insn, 3, 6));
2923
2924 /* Clear the LSB so that gdb core sets step-resume
2925 breakpoint at the right address. */
2926 return UNMAKE_THUMB_ADDR (dest);
2927 }
2928 }
2929 }
2930
2931 return 0;
2932 }
2933
2934 static struct arm_prologue_cache *
2935 arm_make_stub_cache (struct frame_info *this_frame)
2936 {
2937 struct arm_prologue_cache *cache;
2938
2939 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
2940 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
2941
2942 cache->prev_sp = get_frame_register_unsigned (this_frame, ARM_SP_REGNUM);
2943
2944 return cache;
2945 }
2946
2947 /* Our frame ID for a stub frame is the current SP and LR. */
2948
2949 static void
2950 arm_stub_this_id (struct frame_info *this_frame,
2951 void **this_cache,
2952 struct frame_id *this_id)
2953 {
2954 struct arm_prologue_cache *cache;
2955
2956 if (*this_cache == NULL)
2957 *this_cache = arm_make_stub_cache (this_frame);
2958 cache = (struct arm_prologue_cache *) *this_cache;
2959
2960 *this_id = frame_id_build (cache->prev_sp, get_frame_pc (this_frame));
2961 }
2962
2963 static int
2964 arm_stub_unwind_sniffer (const struct frame_unwind *self,
2965 struct frame_info *this_frame,
2966 void **this_prologue_cache)
2967 {
2968 CORE_ADDR addr_in_block;
2969 gdb_byte dummy[4];
2970 CORE_ADDR pc, start_addr;
2971 const char *name;
2972
2973 addr_in_block = get_frame_address_in_block (this_frame);
2974 pc = get_frame_pc (this_frame);
2975 if (in_plt_section (addr_in_block)
2976 /* We also use the stub winder if the target memory is unreadable
2977 to avoid having the prologue unwinder trying to read it. */
2978 || target_read_memory (pc, dummy, 4) != 0)
2979 return 1;
2980
2981 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0
2982 && arm_skip_bx_reg (this_frame, pc) != 0)
2983 return 1;
2984
2985 return 0;
2986 }
2987
2988 struct frame_unwind arm_stub_unwind = {
2989 NORMAL_FRAME,
2990 default_frame_unwind_stop_reason,
2991 arm_stub_this_id,
2992 arm_prologue_prev_register,
2993 NULL,
2994 arm_stub_unwind_sniffer
2995 };
2996
2997 /* Put here the code to store, into CACHE->saved_regs, the addresses
2998 of the saved registers of frame described by THIS_FRAME. CACHE is
2999 returned. */
3000
3001 static struct arm_prologue_cache *
3002 arm_m_exception_cache (struct frame_info *this_frame)
3003 {
3004 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3005 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3006 struct arm_prologue_cache *cache;
3007 CORE_ADDR unwound_sp;
3008 LONGEST xpsr;
3009
3010 cache = FRAME_OBSTACK_ZALLOC (struct arm_prologue_cache);
3011 cache->saved_regs = trad_frame_alloc_saved_regs (this_frame);
3012
3013 unwound_sp = get_frame_register_unsigned (this_frame,
3014 ARM_SP_REGNUM);
3015
3016 /* The hardware saves eight 32-bit words, comprising xPSR,
3017 ReturnAddress, LR (R14), R12, R3, R2, R1, R0. See details in
3018 "B1.5.6 Exception entry behavior" in
3019 "ARMv7-M Architecture Reference Manual". */
3020 cache->saved_regs[0].addr = unwound_sp;
3021 cache->saved_regs[1].addr = unwound_sp + 4;
3022 cache->saved_regs[2].addr = unwound_sp + 8;
3023 cache->saved_regs[3].addr = unwound_sp + 12;
3024 cache->saved_regs[12].addr = unwound_sp + 16;
3025 cache->saved_regs[14].addr = unwound_sp + 20;
3026 cache->saved_regs[15].addr = unwound_sp + 24;
3027 cache->saved_regs[ARM_PS_REGNUM].addr = unwound_sp + 28;
3028
3029 /* If bit 9 of the saved xPSR is set, then there is a four-byte
3030 aligner between the top of the 32-byte stack frame and the
3031 previous context's stack pointer. */
3032 cache->prev_sp = unwound_sp + 32;
3033 if (safe_read_memory_integer (unwound_sp + 28, 4, byte_order, &xpsr)
3034 && (xpsr & (1 << 9)) != 0)
3035 cache->prev_sp += 4;
3036
3037 return cache;
3038 }
3039
3040 /* Implementation of function hook 'this_id' in
3041 'struct frame_uwnind'. */
3042
3043 static void
3044 arm_m_exception_this_id (struct frame_info *this_frame,
3045 void **this_cache,
3046 struct frame_id *this_id)
3047 {
3048 struct arm_prologue_cache *cache;
3049
3050 if (*this_cache == NULL)
3051 *this_cache = arm_m_exception_cache (this_frame);
3052 cache = (struct arm_prologue_cache *) *this_cache;
3053
3054 /* Our frame ID for a stub frame is the current SP and LR. */
3055 *this_id = frame_id_build (cache->prev_sp,
3056 get_frame_pc (this_frame));
3057 }
3058
3059 /* Implementation of function hook 'prev_register' in
3060 'struct frame_uwnind'. */
3061
3062 static struct value *
3063 arm_m_exception_prev_register (struct frame_info *this_frame,
3064 void **this_cache,
3065 int prev_regnum)
3066 {
3067 struct gdbarch *gdbarch = get_frame_arch (this_frame);
3068 struct arm_prologue_cache *cache;
3069
3070 if (*this_cache == NULL)
3071 *this_cache = arm_m_exception_cache (this_frame);
3072 cache = (struct arm_prologue_cache *) *this_cache;
3073
3074 /* The value was already reconstructed into PREV_SP. */
3075 if (prev_regnum == ARM_SP_REGNUM)
3076 return frame_unwind_got_constant (this_frame, prev_regnum,
3077 cache->prev_sp);
3078
3079 return trad_frame_get_prev_register (this_frame, cache->saved_regs,
3080 prev_regnum);
3081 }
3082
3083 /* Implementation of function hook 'sniffer' in
3084 'struct frame_uwnind'. */
3085
3086 static int
3087 arm_m_exception_unwind_sniffer (const struct frame_unwind *self,
3088 struct frame_info *this_frame,
3089 void **this_prologue_cache)
3090 {
3091 CORE_ADDR this_pc = get_frame_pc (this_frame);
3092
3093 /* No need to check is_m; this sniffer is only registered for
3094 M-profile architectures. */
3095
3096 /* Exception frames return to one of these magic PCs. Other values
3097 are not defined as of v7-M. See details in "B1.5.8 Exception
3098 return behavior" in "ARMv7-M Architecture Reference Manual". */
3099 if (this_pc == 0xfffffff1 || this_pc == 0xfffffff9
3100 || this_pc == 0xfffffffd)
3101 return 1;
3102
3103 return 0;
3104 }
3105
3106 /* Frame unwinder for M-profile exceptions. */
3107
3108 struct frame_unwind arm_m_exception_unwind =
3109 {
3110 SIGTRAMP_FRAME,
3111 default_frame_unwind_stop_reason,
3112 arm_m_exception_this_id,
3113 arm_m_exception_prev_register,
3114 NULL,
3115 arm_m_exception_unwind_sniffer
3116 };
3117
3118 static CORE_ADDR
3119 arm_normal_frame_base (struct frame_info *this_frame, void **this_cache)
3120 {
3121 struct arm_prologue_cache *cache;
3122
3123 if (*this_cache == NULL)
3124 *this_cache = arm_make_prologue_cache (this_frame);
3125 cache = (struct arm_prologue_cache *) *this_cache;
3126
3127 return cache->prev_sp - cache->framesize;
3128 }
3129
3130 struct frame_base arm_normal_base = {
3131 &arm_prologue_unwind,
3132 arm_normal_frame_base,
3133 arm_normal_frame_base,
3134 arm_normal_frame_base
3135 };
3136
3137 /* Assuming THIS_FRAME is a dummy, return the frame ID of that
3138 dummy frame. The frame ID's base needs to match the TOS value
3139 saved by save_dummy_frame_tos() and returned from
3140 arm_push_dummy_call, and the PC needs to match the dummy frame's
3141 breakpoint. */
3142
3143 static struct frame_id
3144 arm_dummy_id (struct gdbarch *gdbarch, struct frame_info *this_frame)
3145 {
3146 return frame_id_build (get_frame_register_unsigned (this_frame,
3147 ARM_SP_REGNUM),
3148 get_frame_pc (this_frame));
3149 }
3150
3151 /* Given THIS_FRAME, find the previous frame's resume PC (which will
3152 be used to construct the previous frame's ID, after looking up the
3153 containing function). */
3154
3155 static CORE_ADDR
3156 arm_unwind_pc (struct gdbarch *gdbarch, struct frame_info *this_frame)
3157 {
3158 CORE_ADDR pc;
3159 pc = frame_unwind_register_unsigned (this_frame, ARM_PC_REGNUM);
3160 return arm_addr_bits_remove (gdbarch, pc);
3161 }
3162
3163 static CORE_ADDR
3164 arm_unwind_sp (struct gdbarch *gdbarch, struct frame_info *this_frame)
3165 {
3166 return frame_unwind_register_unsigned (this_frame, ARM_SP_REGNUM);
3167 }
3168
3169 static struct value *
3170 arm_dwarf2_prev_register (struct frame_info *this_frame, void **this_cache,
3171 int regnum)
3172 {
3173 struct gdbarch * gdbarch = get_frame_arch (this_frame);
3174 CORE_ADDR lr, cpsr;
3175 ULONGEST t_bit = arm_psr_thumb_bit (gdbarch);
3176
3177 switch (regnum)
3178 {
3179 case ARM_PC_REGNUM:
3180 /* The PC is normally copied from the return column, which
3181 describes saves of LR. However, that version may have an
3182 extra bit set to indicate Thumb state. The bit is not
3183 part of the PC. */
3184 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3185 return frame_unwind_got_constant (this_frame, regnum,
3186 arm_addr_bits_remove (gdbarch, lr));
3187
3188 case ARM_PS_REGNUM:
3189 /* Reconstruct the T bit; see arm_prologue_prev_register for details. */
3190 cpsr = get_frame_register_unsigned (this_frame, regnum);
3191 lr = frame_unwind_register_unsigned (this_frame, ARM_LR_REGNUM);
3192 if (IS_THUMB_ADDR (lr))
3193 cpsr |= t_bit;
3194 else
3195 cpsr &= ~t_bit;
3196 return frame_unwind_got_constant (this_frame, regnum, cpsr);
3197
3198 default:
3199 internal_error (__FILE__, __LINE__,
3200 _("Unexpected register %d"), regnum);
3201 }
3202 }
3203
3204 static void
3205 arm_dwarf2_frame_init_reg (struct gdbarch *gdbarch, int regnum,
3206 struct dwarf2_frame_state_reg *reg,
3207 struct frame_info *this_frame)
3208 {
3209 switch (regnum)
3210 {
3211 case ARM_PC_REGNUM:
3212 case ARM_PS_REGNUM:
3213 reg->how = DWARF2_FRAME_REG_FN;
3214 reg->loc.fn = arm_dwarf2_prev_register;
3215 break;
3216 case ARM_SP_REGNUM:
3217 reg->how = DWARF2_FRAME_REG_CFA;
3218 break;
3219 }
3220 }
3221
3222 /* Implement the stack_frame_destroyed_p gdbarch method. */
3223
3224 static int
3225 thumb_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3226 {
3227 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3228 unsigned int insn, insn2;
3229 int found_return = 0, found_stack_adjust = 0;
3230 CORE_ADDR func_start, func_end;
3231 CORE_ADDR scan_pc;
3232 gdb_byte buf[4];
3233
3234 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3235 return 0;
3236
3237 /* The epilogue is a sequence of instructions along the following lines:
3238
3239 - add stack frame size to SP or FP
3240 - [if frame pointer used] restore SP from FP
3241 - restore registers from SP [may include PC]
3242 - a return-type instruction [if PC wasn't already restored]
3243
3244 In a first pass, we scan forward from the current PC and verify the
3245 instructions we find as compatible with this sequence, ending in a
3246 return instruction.
3247
3248 However, this is not sufficient to distinguish indirect function calls
3249 within a function from indirect tail calls in the epilogue in some cases.
3250 Therefore, if we didn't already find any SP-changing instruction during
3251 forward scan, we add a backward scanning heuristic to ensure we actually
3252 are in the epilogue. */
3253
3254 scan_pc = pc;
3255 while (scan_pc < func_end && !found_return)
3256 {
3257 if (target_read_memory (scan_pc, buf, 2))
3258 break;
3259
3260 scan_pc += 2;
3261 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3262
3263 if ((insn & 0xff80) == 0x4700) /* bx <Rm> */
3264 found_return = 1;
3265 else if (insn == 0x46f7) /* mov pc, lr */
3266 found_return = 1;
3267 else if (thumb_instruction_restores_sp (insn))
3268 {
3269 if ((insn & 0xff00) == 0xbd00) /* pop <registers, PC> */
3270 found_return = 1;
3271 }
3272 else if (thumb_insn_size (insn) == 4) /* 32-bit Thumb-2 instruction */
3273 {
3274 if (target_read_memory (scan_pc, buf, 2))
3275 break;
3276
3277 scan_pc += 2;
3278 insn2 = extract_unsigned_integer (buf, 2, byte_order_for_code);
3279
3280 if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3281 {
3282 if (insn2 & 0x8000) /* <registers> include PC. */
3283 found_return = 1;
3284 }
3285 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3286 && (insn2 & 0x0fff) == 0x0b04)
3287 {
3288 if ((insn2 & 0xf000) == 0xf000) /* <Rt> is PC. */
3289 found_return = 1;
3290 }
3291 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3292 && (insn2 & 0x0e00) == 0x0a00)
3293 ;
3294 else
3295 break;
3296 }
3297 else
3298 break;
3299 }
3300
3301 if (!found_return)
3302 return 0;
3303
3304 /* Since any instruction in the epilogue sequence, with the possible
3305 exception of return itself, updates the stack pointer, we need to
3306 scan backwards for at most one instruction. Try either a 16-bit or
3307 a 32-bit instruction. This is just a heuristic, so we do not worry
3308 too much about false positives. */
3309
3310 if (pc - 4 < func_start)
3311 return 0;
3312 if (target_read_memory (pc - 4, buf, 4))
3313 return 0;
3314
3315 insn = extract_unsigned_integer (buf, 2, byte_order_for_code);
3316 insn2 = extract_unsigned_integer (buf + 2, 2, byte_order_for_code);
3317
3318 if (thumb_instruction_restores_sp (insn2))
3319 found_stack_adjust = 1;
3320 else if (insn == 0xe8bd) /* ldm.w sp!, <registers> */
3321 found_stack_adjust = 1;
3322 else if (insn == 0xf85d /* ldr.w <Rt>, [sp], #4 */
3323 && (insn2 & 0x0fff) == 0x0b04)
3324 found_stack_adjust = 1;
3325 else if ((insn & 0xffbf) == 0xecbd /* vldm sp!, <list> */
3326 && (insn2 & 0x0e00) == 0x0a00)
3327 found_stack_adjust = 1;
3328
3329 return found_stack_adjust;
3330 }
3331
3332 /* Implement the stack_frame_destroyed_p gdbarch method. */
3333
3334 static int
3335 arm_stack_frame_destroyed_p (struct gdbarch *gdbarch, CORE_ADDR pc)
3336 {
3337 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
3338 unsigned int insn;
3339 int found_return;
3340 CORE_ADDR func_start, func_end;
3341
3342 if (arm_pc_is_thumb (gdbarch, pc))
3343 return thumb_stack_frame_destroyed_p (gdbarch, pc);
3344
3345 if (!find_pc_partial_function (pc, NULL, &func_start, &func_end))
3346 return 0;
3347
3348 /* We are in the epilogue if the previous instruction was a stack
3349 adjustment and the next instruction is a possible return (bx, mov
3350 pc, or pop). We could have to scan backwards to find the stack
3351 adjustment, or forwards to find the return, but this is a decent
3352 approximation. First scan forwards. */
3353
3354 found_return = 0;
3355 insn = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
3356 if (bits (insn, 28, 31) != INST_NV)
3357 {
3358 if ((insn & 0x0ffffff0) == 0x012fff10)
3359 /* BX. */
3360 found_return = 1;
3361 else if ((insn & 0x0ffffff0) == 0x01a0f000)
3362 /* MOV PC. */
3363 found_return = 1;
3364 else if ((insn & 0x0fff0000) == 0x08bd0000
3365 && (insn & 0x0000c000) != 0)
3366 /* POP (LDMIA), including PC or LR. */
3367 found_return = 1;
3368 }
3369
3370 if (!found_return)
3371 return 0;
3372
3373 /* Scan backwards. This is just a heuristic, so do not worry about
3374 false positives from mode changes. */
3375
3376 if (pc < func_start + 4)
3377 return 0;
3378
3379 insn = read_memory_unsigned_integer (pc - 4, 4, byte_order_for_code);
3380 if (arm_instruction_restores_sp (insn))
3381 return 1;
3382
3383 return 0;
3384 }
3385
3386
3387 /* When arguments must be pushed onto the stack, they go on in reverse
3388 order. The code below implements a FILO (stack) to do this. */
3389
3390 struct stack_item
3391 {
3392 int len;
3393 struct stack_item *prev;
3394 gdb_byte *data;
3395 };
3396
3397 static struct stack_item *
3398 push_stack_item (struct stack_item *prev, const gdb_byte *contents, int len)
3399 {
3400 struct stack_item *si;
3401 si = XNEW (struct stack_item);
3402 si->data = (gdb_byte *) xmalloc (len);
3403 si->len = len;
3404 si->prev = prev;
3405 memcpy (si->data, contents, len);
3406 return si;
3407 }
3408
3409 static struct stack_item *
3410 pop_stack_item (struct stack_item *si)
3411 {
3412 struct stack_item *dead = si;
3413 si = si->prev;
3414 xfree (dead->data);
3415 xfree (dead);
3416 return si;
3417 }
3418
3419
3420 /* Return the alignment (in bytes) of the given type. */
3421
3422 static int
3423 arm_type_align (struct type *t)
3424 {
3425 int n;
3426 int align;
3427 int falign;
3428
3429 t = check_typedef (t);
3430 switch (TYPE_CODE (t))
3431 {
3432 default:
3433 /* Should never happen. */
3434 internal_error (__FILE__, __LINE__, _("unknown type alignment"));
3435 return 4;
3436
3437 case TYPE_CODE_PTR:
3438 case TYPE_CODE_ENUM:
3439 case TYPE_CODE_INT:
3440 case TYPE_CODE_FLT:
3441 case TYPE_CODE_SET:
3442 case TYPE_CODE_RANGE:
3443 case TYPE_CODE_REF:
3444 case TYPE_CODE_CHAR:
3445 case TYPE_CODE_BOOL:
3446 return TYPE_LENGTH (t);
3447
3448 case TYPE_CODE_ARRAY:
3449 if (TYPE_VECTOR (t))
3450 {
3451 /* Use the natural alignment for vector types (the same for
3452 scalar type), but the maximum alignment is 64-bit. */
3453 if (TYPE_LENGTH (t) > 8)
3454 return 8;
3455 else
3456 return TYPE_LENGTH (t);
3457 }
3458 else
3459 return arm_type_align (TYPE_TARGET_TYPE (t));
3460 case TYPE_CODE_COMPLEX:
3461 return arm_type_align (TYPE_TARGET_TYPE (t));
3462
3463 case TYPE_CODE_STRUCT:
3464 case TYPE_CODE_UNION:
3465 align = 1;
3466 for (n = 0; n < TYPE_NFIELDS (t); n++)
3467 {
3468 falign = arm_type_align (TYPE_FIELD_TYPE (t, n));
3469 if (falign > align)
3470 align = falign;
3471 }
3472 return align;
3473 }
3474 }
3475
3476 /* Possible base types for a candidate for passing and returning in
3477 VFP registers. */
3478
3479 enum arm_vfp_cprc_base_type
3480 {
3481 VFP_CPRC_UNKNOWN,
3482 VFP_CPRC_SINGLE,
3483 VFP_CPRC_DOUBLE,
3484 VFP_CPRC_VEC64,
3485 VFP_CPRC_VEC128
3486 };
3487
3488 /* The length of one element of base type B. */
3489
3490 static unsigned
3491 arm_vfp_cprc_unit_length (enum arm_vfp_cprc_base_type b)
3492 {
3493 switch (b)
3494 {
3495 case VFP_CPRC_SINGLE:
3496 return 4;
3497 case VFP_CPRC_DOUBLE:
3498 return 8;
3499 case VFP_CPRC_VEC64:
3500 return 8;
3501 case VFP_CPRC_VEC128:
3502 return 16;
3503 default:
3504 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3505 (int) b);
3506 }
3507 }
3508
3509 /* The character ('s', 'd' or 'q') for the type of VFP register used
3510 for passing base type B. */
3511
3512 static int
3513 arm_vfp_cprc_reg_char (enum arm_vfp_cprc_base_type b)
3514 {
3515 switch (b)
3516 {
3517 case VFP_CPRC_SINGLE:
3518 return 's';
3519 case VFP_CPRC_DOUBLE:
3520 return 'd';
3521 case VFP_CPRC_VEC64:
3522 return 'd';
3523 case VFP_CPRC_VEC128:
3524 return 'q';
3525 default:
3526 internal_error (__FILE__, __LINE__, _("Invalid VFP CPRC type: %d."),
3527 (int) b);
3528 }
3529 }
3530
3531 /* Determine whether T may be part of a candidate for passing and
3532 returning in VFP registers, ignoring the limit on the total number
3533 of components. If *BASE_TYPE is VFP_CPRC_UNKNOWN, set it to the
3534 classification of the first valid component found; if it is not
3535 VFP_CPRC_UNKNOWN, all components must have the same classification
3536 as *BASE_TYPE. If it is found that T contains a type not permitted
3537 for passing and returning in VFP registers, a type differently
3538 classified from *BASE_TYPE, or two types differently classified
3539 from each other, return -1, otherwise return the total number of
3540 base-type elements found (possibly 0 in an empty structure or
3541 array). Vector types are not currently supported, matching the
3542 generic AAPCS support. */
3543
3544 static int
3545 arm_vfp_cprc_sub_candidate (struct type *t,
3546 enum arm_vfp_cprc_base_type *base_type)
3547 {
3548 t = check_typedef (t);
3549 switch (TYPE_CODE (t))
3550 {
3551 case TYPE_CODE_FLT:
3552 switch (TYPE_LENGTH (t))
3553 {
3554 case 4:
3555 if (*base_type == VFP_CPRC_UNKNOWN)
3556 *base_type = VFP_CPRC_SINGLE;
3557 else if (*base_type != VFP_CPRC_SINGLE)
3558 return -1;
3559 return 1;
3560
3561 case 8:
3562 if (*base_type == VFP_CPRC_UNKNOWN)
3563 *base_type = VFP_CPRC_DOUBLE;
3564 else if (*base_type != VFP_CPRC_DOUBLE)
3565 return -1;
3566 return 1;
3567
3568 default:
3569 return -1;
3570 }
3571 break;
3572
3573 case TYPE_CODE_COMPLEX:
3574 /* Arguments of complex T where T is one of the types float or
3575 double get treated as if they are implemented as:
3576
3577 struct complexT
3578 {
3579 T real;
3580 T imag;
3581 };
3582
3583 */
3584 switch (TYPE_LENGTH (t))
3585 {
3586 case 8:
3587 if (*base_type == VFP_CPRC_UNKNOWN)
3588 *base_type = VFP_CPRC_SINGLE;
3589 else if (*base_type != VFP_CPRC_SINGLE)
3590 return -1;
3591 return 2;
3592
3593 case 16:
3594 if (*base_type == VFP_CPRC_UNKNOWN)
3595 *base_type = VFP_CPRC_DOUBLE;
3596 else if (*base_type != VFP_CPRC_DOUBLE)
3597 return -1;
3598 return 2;
3599
3600 default:
3601 return -1;
3602 }
3603 break;
3604
3605 case TYPE_CODE_ARRAY:
3606 {
3607 if (TYPE_VECTOR (t))
3608 {
3609 /* A 64-bit or 128-bit containerized vector type are VFP
3610 CPRCs. */
3611 switch (TYPE_LENGTH (t))
3612 {
3613 case 8:
3614 if (*base_type == VFP_CPRC_UNKNOWN)
3615 *base_type = VFP_CPRC_VEC64;
3616 return 1;
3617 case 16:
3618 if (*base_type == VFP_CPRC_UNKNOWN)
3619 *base_type = VFP_CPRC_VEC128;
3620 return 1;
3621 default:
3622 return -1;
3623 }
3624 }
3625 else
3626 {
3627 int count;
3628 unsigned unitlen;
3629
3630 count = arm_vfp_cprc_sub_candidate (TYPE_TARGET_TYPE (t),
3631 base_type);
3632 if (count == -1)
3633 return -1;
3634 if (TYPE_LENGTH (t) == 0)
3635 {
3636 gdb_assert (count == 0);
3637 return 0;
3638 }
3639 else if (count == 0)
3640 return -1;
3641 unitlen = arm_vfp_cprc_unit_length (*base_type);
3642 gdb_assert ((TYPE_LENGTH (t) % unitlen) == 0);
3643 return TYPE_LENGTH (t) / unitlen;
3644 }
3645 }
3646 break;
3647
3648 case TYPE_CODE_STRUCT:
3649 {
3650 int count = 0;
3651 unsigned unitlen;
3652 int i;
3653 for (i = 0; i < TYPE_NFIELDS (t); i++)
3654 {
3655 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3656 base_type);
3657 if (sub_count == -1)
3658 return -1;
3659 count += sub_count;
3660 }
3661 if (TYPE_LENGTH (t) == 0)
3662 {
3663 gdb_assert (count == 0);
3664 return 0;
3665 }
3666 else if (count == 0)
3667 return -1;
3668 unitlen = arm_vfp_cprc_unit_length (*base_type);
3669 if (TYPE_LENGTH (t) != unitlen * count)
3670 return -1;
3671 return count;
3672 }
3673
3674 case TYPE_CODE_UNION:
3675 {
3676 int count = 0;
3677 unsigned unitlen;
3678 int i;
3679 for (i = 0; i < TYPE_NFIELDS (t); i++)
3680 {
3681 int sub_count = arm_vfp_cprc_sub_candidate (TYPE_FIELD_TYPE (t, i),
3682 base_type);
3683 if (sub_count == -1)
3684 return -1;
3685 count = (count > sub_count ? count : sub_count);
3686 }
3687 if (TYPE_LENGTH (t) == 0)
3688 {
3689 gdb_assert (count == 0);
3690 return 0;
3691 }
3692 else if (count == 0)
3693 return -1;
3694 unitlen = arm_vfp_cprc_unit_length (*base_type);
3695 if (TYPE_LENGTH (t) != unitlen * count)
3696 return -1;
3697 return count;
3698 }
3699
3700 default:
3701 break;
3702 }
3703
3704 return -1;
3705 }
3706
3707 /* Determine whether T is a VFP co-processor register candidate (CPRC)
3708 if passed to or returned from a non-variadic function with the VFP
3709 ABI in effect. Return 1 if it is, 0 otherwise. If it is, set
3710 *BASE_TYPE to the base type for T and *COUNT to the number of
3711 elements of that base type before returning. */
3712
3713 static int
3714 arm_vfp_call_candidate (struct type *t, enum arm_vfp_cprc_base_type *base_type,
3715 int *count)
3716 {
3717 enum arm_vfp_cprc_base_type b = VFP_CPRC_UNKNOWN;
3718 int c = arm_vfp_cprc_sub_candidate (t, &b);
3719 if (c <= 0 || c > 4)
3720 return 0;
3721 *base_type = b;
3722 *count = c;
3723 return 1;
3724 }
3725
3726 /* Return 1 if the VFP ABI should be used for passing arguments to and
3727 returning values from a function of type FUNC_TYPE, 0
3728 otherwise. */
3729
3730 static int
3731 arm_vfp_abi_for_function (struct gdbarch *gdbarch, struct type *func_type)
3732 {
3733 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
3734 /* Variadic functions always use the base ABI. Assume that functions
3735 without debug info are not variadic. */
3736 if (func_type && TYPE_VARARGS (check_typedef (func_type)))
3737 return 0;
3738 /* The VFP ABI is only supported as a variant of AAPCS. */
3739 if (tdep->arm_abi != ARM_ABI_AAPCS)
3740 return 0;
3741 return gdbarch_tdep (gdbarch)->fp_model == ARM_FLOAT_VFP;
3742 }
3743
3744 /* We currently only support passing parameters in integer registers, which
3745 conforms with GCC's default model, and VFP argument passing following
3746 the VFP variant of AAPCS. Several other variants exist and
3747 we should probably support some of them based on the selected ABI. */
3748
3749 static CORE_ADDR
3750 arm_push_dummy_call (struct gdbarch *gdbarch, struct value *function,
3751 struct regcache *regcache, CORE_ADDR bp_addr, int nargs,
3752 struct value **args, CORE_ADDR sp, int struct_return,
3753 CORE_ADDR struct_addr)
3754 {
3755 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
3756 int argnum;
3757 int argreg;
3758 int nstack;
3759 struct stack_item *si = NULL;
3760 int use_vfp_abi;
3761 struct type *ftype;
3762 unsigned vfp_regs_free = (1 << 16) - 1;
3763
3764 /* Determine the type of this function and whether the VFP ABI
3765 applies. */
3766 ftype = check_typedef (value_type (function));
3767 if (TYPE_CODE (ftype) == TYPE_CODE_PTR)
3768 ftype = check_typedef (TYPE_TARGET_TYPE (ftype));
3769 use_vfp_abi = arm_vfp_abi_for_function (gdbarch, ftype);
3770
3771 /* Set the return address. For the ARM, the return breakpoint is
3772 always at BP_ADDR. */
3773 if (arm_pc_is_thumb (gdbarch, bp_addr))
3774 bp_addr |= 1;
3775 regcache_cooked_write_unsigned (regcache, ARM_LR_REGNUM, bp_addr);
3776
3777 /* Walk through the list of args and determine how large a temporary
3778 stack is required. Need to take care here as structs may be
3779 passed on the stack, and we have to push them. */
3780 nstack = 0;
3781
3782 argreg = ARM_A1_REGNUM;
3783 nstack = 0;
3784
3785 /* The struct_return pointer occupies the first parameter
3786 passing register. */
3787 if (struct_return)
3788 {
3789 if (arm_debug)
3790 fprintf_unfiltered (gdb_stdlog, "struct return in %s = %s\n",
3791 gdbarch_register_name (gdbarch, argreg),
3792 paddress (gdbarch, struct_addr));
3793 regcache_cooked_write_unsigned (regcache, argreg, struct_addr);
3794 argreg++;
3795 }
3796
3797 for (argnum = 0; argnum < nargs; argnum++)
3798 {
3799 int len;
3800 struct type *arg_type;
3801 struct type *target_type;
3802 enum type_code typecode;
3803 const bfd_byte *val;
3804 int align;
3805 enum arm_vfp_cprc_base_type vfp_base_type;
3806 int vfp_base_count;
3807 int may_use_core_reg = 1;
3808
3809 arg_type = check_typedef (value_type (args[argnum]));
3810 len = TYPE_LENGTH (arg_type);
3811 target_type = TYPE_TARGET_TYPE (arg_type);
3812 typecode = TYPE_CODE (arg_type);
3813 val = value_contents (args[argnum]);
3814
3815 align = arm_type_align (arg_type);
3816 /* Round alignment up to a whole number of words. */
3817 align = (align + INT_REGISTER_SIZE - 1) & ~(INT_REGISTER_SIZE - 1);
3818 /* Different ABIs have different maximum alignments. */
3819 if (gdbarch_tdep (gdbarch)->arm_abi == ARM_ABI_APCS)
3820 {
3821 /* The APCS ABI only requires word alignment. */
3822 align = INT_REGISTER_SIZE;
3823 }
3824 else
3825 {
3826 /* The AAPCS requires at most doubleword alignment. */
3827 if (align > INT_REGISTER_SIZE * 2)
3828 align = INT_REGISTER_SIZE * 2;
3829 }
3830
3831 if (use_vfp_abi
3832 && arm_vfp_call_candidate (arg_type, &vfp_base_type,
3833 &vfp_base_count))
3834 {
3835 int regno;
3836 int unit_length;
3837 int shift;
3838 unsigned mask;
3839
3840 /* Because this is a CPRC it cannot go in a core register or
3841 cause a core register to be skipped for alignment.
3842 Either it goes in VFP registers and the rest of this loop
3843 iteration is skipped for this argument, or it goes on the
3844 stack (and the stack alignment code is correct for this
3845 case). */
3846 may_use_core_reg = 0;
3847
3848 unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
3849 shift = unit_length / 4;
3850 mask = (1 << (shift * vfp_base_count)) - 1;
3851 for (regno = 0; regno < 16; regno += shift)
3852 if (((vfp_regs_free >> regno) & mask) == mask)
3853 break;
3854
3855 if (regno < 16)
3856 {
3857 int reg_char;
3858 int reg_scaled;
3859 int i;
3860
3861 vfp_regs_free &= ~(mask << regno);
3862 reg_scaled = regno / shift;
3863 reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
3864 for (i = 0; i < vfp_base_count; i++)
3865 {
3866 char name_buf[4];
3867 int regnum;
3868 if (reg_char == 'q')
3869 arm_neon_quad_write (gdbarch, regcache, reg_scaled + i,
3870 val + i * unit_length);
3871 else
3872 {
3873 xsnprintf (name_buf, sizeof (name_buf), "%c%d",
3874 reg_char, reg_scaled + i);
3875 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
3876 strlen (name_buf));
3877 regcache_cooked_write (regcache, regnum,
3878 val + i * unit_length);
3879 }
3880 }
3881 continue;
3882 }
3883 else
3884 {
3885 /* This CPRC could not go in VFP registers, so all VFP
3886 registers are now marked as used. */
3887 vfp_regs_free = 0;
3888 }
3889 }
3890
3891 /* Push stack padding for dowubleword alignment. */
3892 if (nstack & (align - 1))
3893 {
3894 si = push_stack_item (si, val, INT_REGISTER_SIZE);
3895 nstack += INT_REGISTER_SIZE;
3896 }
3897
3898 /* Doubleword aligned quantities must go in even register pairs. */
3899 if (may_use_core_reg
3900 && argreg <= ARM_LAST_ARG_REGNUM
3901 && align > INT_REGISTER_SIZE
3902 && argreg & 1)
3903 argreg++;
3904
3905 /* If the argument is a pointer to a function, and it is a
3906 Thumb function, create a LOCAL copy of the value and set
3907 the THUMB bit in it. */
3908 if (TYPE_CODE_PTR == typecode
3909 && target_type != NULL
3910 && TYPE_CODE_FUNC == TYPE_CODE (check_typedef (target_type)))
3911 {
3912 CORE_ADDR regval = extract_unsigned_integer (val, len, byte_order);
3913 if (arm_pc_is_thumb (gdbarch, regval))
3914 {
3915 bfd_byte *copy = (bfd_byte *) alloca (len);
3916 store_unsigned_integer (copy, len, byte_order,
3917 MAKE_THUMB_ADDR (regval));
3918 val = copy;
3919 }
3920 }
3921
3922 /* Copy the argument to general registers or the stack in
3923 register-sized pieces. Large arguments are split between
3924 registers and stack. */
3925 while (len > 0)
3926 {
3927 int partial_len = len < INT_REGISTER_SIZE ? len : INT_REGISTER_SIZE;
3928 CORE_ADDR regval
3929 = extract_unsigned_integer (val, partial_len, byte_order);
3930
3931 if (may_use_core_reg && argreg <= ARM_LAST_ARG_REGNUM)
3932 {
3933 /* The argument is being passed in a general purpose
3934 register. */
3935 if (byte_order == BFD_ENDIAN_BIG)
3936 regval <<= (INT_REGISTER_SIZE - partial_len) * 8;
3937 if (arm_debug)
3938 fprintf_unfiltered (gdb_stdlog, "arg %d in %s = 0x%s\n",
3939 argnum,
3940 gdbarch_register_name
3941 (gdbarch, argreg),
3942 phex (regval, INT_REGISTER_SIZE));
3943 regcache_cooked_write_unsigned (regcache, argreg, regval);
3944 argreg++;
3945 }
3946 else
3947 {
3948 gdb_byte buf[INT_REGISTER_SIZE];
3949
3950 memset (buf, 0, sizeof (buf));
3951 store_unsigned_integer (buf, partial_len, byte_order, regval);
3952
3953 /* Push the arguments onto the stack. */
3954 if (arm_debug)
3955 fprintf_unfiltered (gdb_stdlog, "arg %d @ sp + %d\n",
3956 argnum, nstack);
3957 si = push_stack_item (si, buf, INT_REGISTER_SIZE);
3958 nstack += INT_REGISTER_SIZE;
3959 }
3960
3961 len -= partial_len;
3962 val += partial_len;
3963 }
3964 }
3965 /* If we have an odd number of words to push, then decrement the stack
3966 by one word now, so first stack argument will be dword aligned. */
3967 if (nstack & 4)
3968 sp -= 4;
3969
3970 while (si)
3971 {
3972 sp -= si->len;
3973 write_memory (sp, si->data, si->len);
3974 si = pop_stack_item (si);
3975 }
3976
3977 /* Finally, update teh SP register. */
3978 regcache_cooked_write_unsigned (regcache, ARM_SP_REGNUM, sp);
3979
3980 return sp;
3981 }
3982
3983
3984 /* Always align the frame to an 8-byte boundary. This is required on
3985 some platforms and harmless on the rest. */
3986
3987 static CORE_ADDR
3988 arm_frame_align (struct gdbarch *gdbarch, CORE_ADDR sp)
3989 {
3990 /* Align the stack to eight bytes. */
3991 return sp & ~ (CORE_ADDR) 7;
3992 }
3993
3994 static void
3995 print_fpu_flags (struct ui_file *file, int flags)
3996 {
3997 if (flags & (1 << 0))
3998 fputs_filtered ("IVO ", file);
3999 if (flags & (1 << 1))
4000 fputs_filtered ("DVZ ", file);
4001 if (flags & (1 << 2))
4002 fputs_filtered ("OFL ", file);
4003 if (flags & (1 << 3))
4004 fputs_filtered ("UFL ", file);
4005 if (flags & (1 << 4))
4006 fputs_filtered ("INX ", file);
4007 fputc_filtered ('\n', file);
4008 }
4009
4010 /* Print interesting information about the floating point processor
4011 (if present) or emulator. */
4012 static void
4013 arm_print_float_info (struct gdbarch *gdbarch, struct ui_file *file,
4014 struct frame_info *frame, const char *args)
4015 {
4016 unsigned long status = get_frame_register_unsigned (frame, ARM_FPS_REGNUM);
4017 int type;
4018
4019 type = (status >> 24) & 127;
4020 if (status & (1 << 31))
4021 fprintf_filtered (file, _("Hardware FPU type %d\n"), type);
4022 else
4023 fprintf_filtered (file, _("Software FPU type %d\n"), type);
4024 /* i18n: [floating point unit] mask */
4025 fputs_filtered (_("mask: "), file);
4026 print_fpu_flags (file, status >> 16);
4027 /* i18n: [floating point unit] flags */
4028 fputs_filtered (_("flags: "), file);
4029 print_fpu_flags (file, status);
4030 }
4031
4032 /* Construct the ARM extended floating point type. */
4033 static struct type *
4034 arm_ext_type (struct gdbarch *gdbarch)
4035 {
4036 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4037
4038 if (!tdep->arm_ext_type)
4039 tdep->arm_ext_type
4040 = arch_float_type (gdbarch, -1, "builtin_type_arm_ext",
4041 floatformats_arm_ext);
4042
4043 return tdep->arm_ext_type;
4044 }
4045
4046 static struct type *
4047 arm_neon_double_type (struct gdbarch *gdbarch)
4048 {
4049 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4050
4051 if (tdep->neon_double_type == NULL)
4052 {
4053 struct type *t, *elem;
4054
4055 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_d",
4056 TYPE_CODE_UNION);
4057 elem = builtin_type (gdbarch)->builtin_uint8;
4058 append_composite_type_field (t, "u8", init_vector_type (elem, 8));
4059 elem = builtin_type (gdbarch)->builtin_uint16;
4060 append_composite_type_field (t, "u16", init_vector_type (elem, 4));
4061 elem = builtin_type (gdbarch)->builtin_uint32;
4062 append_composite_type_field (t, "u32", init_vector_type (elem, 2));
4063 elem = builtin_type (gdbarch)->builtin_uint64;
4064 append_composite_type_field (t, "u64", elem);
4065 elem = builtin_type (gdbarch)->builtin_float;
4066 append_composite_type_field (t, "f32", init_vector_type (elem, 2));
4067 elem = builtin_type (gdbarch)->builtin_double;
4068 append_composite_type_field (t, "f64", elem);
4069
4070 TYPE_VECTOR (t) = 1;
4071 TYPE_NAME (t) = "neon_d";
4072 tdep->neon_double_type = t;
4073 }
4074
4075 return tdep->neon_double_type;
4076 }
4077
4078 /* FIXME: The vector types are not correctly ordered on big-endian
4079 targets. Just as s0 is the low bits of d0, d0[0] is also the low
4080 bits of d0 - regardless of what unit size is being held in d0. So
4081 the offset of the first uint8 in d0 is 7, but the offset of the
4082 first float is 4. This code works as-is for little-endian
4083 targets. */
4084
4085 static struct type *
4086 arm_neon_quad_type (struct gdbarch *gdbarch)
4087 {
4088 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
4089
4090 if (tdep->neon_quad_type == NULL)
4091 {
4092 struct type *t, *elem;
4093
4094 t = arch_composite_type (gdbarch, "__gdb_builtin_type_neon_q",
4095 TYPE_CODE_UNION);
4096 elem = builtin_type (gdbarch)->builtin_uint8;
4097 append_composite_type_field (t, "u8", init_vector_type (elem, 16));
4098 elem = builtin_type (gdbarch)->builtin_uint16;
4099 append_composite_type_field (t, "u16", init_vector_type (elem, 8));
4100 elem = builtin_type (gdbarch)->builtin_uint32;
4101 append_composite_type_field (t, "u32", init_vector_type (elem, 4));
4102 elem = builtin_type (gdbarch)->builtin_uint64;
4103 append_composite_type_field (t, "u64", init_vector_type (elem, 2));
4104 elem = builtin_type (gdbarch)->builtin_float;
4105 append_composite_type_field (t, "f32", init_vector_type (elem, 4));
4106 elem = builtin_type (gdbarch)->builtin_double;
4107 append_composite_type_field (t, "f64", init_vector_type (elem, 2));
4108
4109 TYPE_VECTOR (t) = 1;
4110 TYPE_NAME (t) = "neon_q";
4111 tdep->neon_quad_type = t;
4112 }
4113
4114 return tdep->neon_quad_type;
4115 }
4116
4117 /* Return the GDB type object for the "standard" data type of data in
4118 register N. */
4119
4120 static struct type *
4121 arm_register_type (struct gdbarch *gdbarch, int regnum)
4122 {
4123 int num_regs = gdbarch_num_regs (gdbarch);
4124
4125 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
4126 && regnum >= num_regs && regnum < num_regs + 32)
4127 return builtin_type (gdbarch)->builtin_float;
4128
4129 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
4130 && regnum >= num_regs + 32 && regnum < num_regs + 32 + 16)
4131 return arm_neon_quad_type (gdbarch);
4132
4133 /* If the target description has register information, we are only
4134 in this function so that we can override the types of
4135 double-precision registers for NEON. */
4136 if (tdesc_has_registers (gdbarch_target_desc (gdbarch)))
4137 {
4138 struct type *t = tdesc_register_type (gdbarch, regnum);
4139
4140 if (regnum >= ARM_D0_REGNUM && regnum < ARM_D0_REGNUM + 32
4141 && TYPE_CODE (t) == TYPE_CODE_FLT
4142 && gdbarch_tdep (gdbarch)->have_neon)
4143 return arm_neon_double_type (gdbarch);
4144 else
4145 return t;
4146 }
4147
4148 if (regnum >= ARM_F0_REGNUM && regnum < ARM_F0_REGNUM + NUM_FREGS)
4149 {
4150 if (!gdbarch_tdep (gdbarch)->have_fpa_registers)
4151 return builtin_type (gdbarch)->builtin_void;
4152
4153 return arm_ext_type (gdbarch);
4154 }
4155 else if (regnum == ARM_SP_REGNUM)
4156 return builtin_type (gdbarch)->builtin_data_ptr;
4157 else if (regnum == ARM_PC_REGNUM)
4158 return builtin_type (gdbarch)->builtin_func_ptr;
4159 else if (regnum >= ARRAY_SIZE (arm_register_names))
4160 /* These registers are only supported on targets which supply
4161 an XML description. */
4162 return builtin_type (gdbarch)->builtin_int0;
4163 else
4164 return builtin_type (gdbarch)->builtin_uint32;
4165 }
4166
4167 /* Map a DWARF register REGNUM onto the appropriate GDB register
4168 number. */
4169
4170 static int
4171 arm_dwarf_reg_to_regnum (struct gdbarch *gdbarch, int reg)
4172 {
4173 /* Core integer regs. */
4174 if (reg >= 0 && reg <= 15)
4175 return reg;
4176
4177 /* Legacy FPA encoding. These were once used in a way which
4178 overlapped with VFP register numbering, so their use is
4179 discouraged, but GDB doesn't support the ARM toolchain
4180 which used them for VFP. */
4181 if (reg >= 16 && reg <= 23)
4182 return ARM_F0_REGNUM + reg - 16;
4183
4184 /* New assignments for the FPA registers. */
4185 if (reg >= 96 && reg <= 103)
4186 return ARM_F0_REGNUM + reg - 96;
4187
4188 /* WMMX register assignments. */
4189 if (reg >= 104 && reg <= 111)
4190 return ARM_WCGR0_REGNUM + reg - 104;
4191
4192 if (reg >= 112 && reg <= 127)
4193 return ARM_WR0_REGNUM + reg - 112;
4194
4195 if (reg >= 192 && reg <= 199)
4196 return ARM_WC0_REGNUM + reg - 192;
4197
4198 /* VFP v2 registers. A double precision value is actually
4199 in d1 rather than s2, but the ABI only defines numbering
4200 for the single precision registers. This will "just work"
4201 in GDB for little endian targets (we'll read eight bytes,
4202 starting in s0 and then progressing to s1), but will be
4203 reversed on big endian targets with VFP. This won't
4204 be a problem for the new Neon quad registers; you're supposed
4205 to use DW_OP_piece for those. */
4206 if (reg >= 64 && reg <= 95)
4207 {
4208 char name_buf[4];
4209
4210 xsnprintf (name_buf, sizeof (name_buf), "s%d", reg - 64);
4211 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4212 strlen (name_buf));
4213 }
4214
4215 /* VFP v3 / Neon registers. This range is also used for VFP v2
4216 registers, except that it now describes d0 instead of s0. */
4217 if (reg >= 256 && reg <= 287)
4218 {
4219 char name_buf[4];
4220
4221 xsnprintf (name_buf, sizeof (name_buf), "d%d", reg - 256);
4222 return user_reg_map_name_to_regnum (gdbarch, name_buf,
4223 strlen (name_buf));
4224 }
4225
4226 return -1;
4227 }
4228
4229 /* Map GDB internal REGNUM onto the Arm simulator register numbers. */
4230 static int
4231 arm_register_sim_regno (struct gdbarch *gdbarch, int regnum)
4232 {
4233 int reg = regnum;
4234 gdb_assert (reg >= 0 && reg < gdbarch_num_regs (gdbarch));
4235
4236 if (regnum >= ARM_WR0_REGNUM && regnum <= ARM_WR15_REGNUM)
4237 return regnum - ARM_WR0_REGNUM + SIM_ARM_IWMMXT_COP0R0_REGNUM;
4238
4239 if (regnum >= ARM_WC0_REGNUM && regnum <= ARM_WC7_REGNUM)
4240 return regnum - ARM_WC0_REGNUM + SIM_ARM_IWMMXT_COP1R0_REGNUM;
4241
4242 if (regnum >= ARM_WCGR0_REGNUM && regnum <= ARM_WCGR7_REGNUM)
4243 return regnum - ARM_WCGR0_REGNUM + SIM_ARM_IWMMXT_COP1R8_REGNUM;
4244
4245 if (reg < NUM_GREGS)
4246 return SIM_ARM_R0_REGNUM + reg;
4247 reg -= NUM_GREGS;
4248
4249 if (reg < NUM_FREGS)
4250 return SIM_ARM_FP0_REGNUM + reg;
4251 reg -= NUM_FREGS;
4252
4253 if (reg < NUM_SREGS)
4254 return SIM_ARM_FPS_REGNUM + reg;
4255 reg -= NUM_SREGS;
4256
4257 internal_error (__FILE__, __LINE__, _("Bad REGNUM %d"), regnum);
4258 }
4259
4260 /* NOTE: cagney/2001-08-20: Both convert_from_extended() and
4261 convert_to_extended() use floatformat_arm_ext_littlebyte_bigword.
4262 It is thought that this is is the floating-point register format on
4263 little-endian systems. */
4264
4265 static void
4266 convert_from_extended (const struct floatformat *fmt, const void *ptr,
4267 void *dbl, int endianess)
4268 {
4269 DOUBLEST d;
4270
4271 if (endianess == BFD_ENDIAN_BIG)
4272 floatformat_to_doublest (&floatformat_arm_ext_big, ptr, &d);
4273 else
4274 floatformat_to_doublest (&floatformat_arm_ext_littlebyte_bigword,
4275 ptr, &d);
4276 floatformat_from_doublest (fmt, &d, dbl);
4277 }
4278
4279 static void
4280 convert_to_extended (const struct floatformat *fmt, void *dbl, const void *ptr,
4281 int endianess)
4282 {
4283 DOUBLEST d;
4284
4285 floatformat_to_doublest (fmt, ptr, &d);
4286 if (endianess == BFD_ENDIAN_BIG)
4287 floatformat_from_doublest (&floatformat_arm_ext_big, &d, dbl);
4288 else
4289 floatformat_from_doublest (&floatformat_arm_ext_littlebyte_bigword,
4290 &d, dbl);
4291 }
4292
4293 static int
4294 condition_true (unsigned long cond, unsigned long status_reg)
4295 {
4296 if (cond == INST_AL || cond == INST_NV)
4297 return 1;
4298
4299 switch (cond)
4300 {
4301 case INST_EQ:
4302 return ((status_reg & FLAG_Z) != 0);
4303 case INST_NE:
4304 return ((status_reg & FLAG_Z) == 0);
4305 case INST_CS:
4306 return ((status_reg & FLAG_C) != 0);
4307 case INST_CC:
4308 return ((status_reg & FLAG_C) == 0);
4309 case INST_MI:
4310 return ((status_reg & FLAG_N) != 0);
4311 case INST_PL:
4312 return ((status_reg & FLAG_N) == 0);
4313 case INST_VS:
4314 return ((status_reg & FLAG_V) != 0);
4315 case INST_VC:
4316 return ((status_reg & FLAG_V) == 0);
4317 case INST_HI:
4318 return ((status_reg & (FLAG_C | FLAG_Z)) == FLAG_C);
4319 case INST_LS:
4320 return ((status_reg & (FLAG_C | FLAG_Z)) != FLAG_C);
4321 case INST_GE:
4322 return (((status_reg & FLAG_N) == 0) == ((status_reg & FLAG_V) == 0));
4323 case INST_LT:
4324 return (((status_reg & FLAG_N) == 0) != ((status_reg & FLAG_V) == 0));
4325 case INST_GT:
4326 return (((status_reg & FLAG_Z) == 0)
4327 && (((status_reg & FLAG_N) == 0)
4328 == ((status_reg & FLAG_V) == 0)));
4329 case INST_LE:
4330 return (((status_reg & FLAG_Z) != 0)
4331 || (((status_reg & FLAG_N) == 0)
4332 != ((status_reg & FLAG_V) == 0)));
4333 }
4334 return 1;
4335 }
4336
4337 static unsigned long
4338 shifted_reg_val (struct frame_info *frame, unsigned long inst, int carry,
4339 unsigned long pc_val, unsigned long status_reg)
4340 {
4341 unsigned long res, shift;
4342 int rm = bits (inst, 0, 3);
4343 unsigned long shifttype = bits (inst, 5, 6);
4344
4345 if (bit (inst, 4))
4346 {
4347 int rs = bits (inst, 8, 11);
4348 shift = (rs == 15 ? pc_val + 8
4349 : get_frame_register_unsigned (frame, rs)) & 0xFF;
4350 }
4351 else
4352 shift = bits (inst, 7, 11);
4353
4354 res = (rm == ARM_PC_REGNUM
4355 ? (pc_val + (bit (inst, 4) ? 12 : 8))
4356 : get_frame_register_unsigned (frame, rm));
4357
4358 switch (shifttype)
4359 {
4360 case 0: /* LSL */
4361 res = shift >= 32 ? 0 : res << shift;
4362 break;
4363
4364 case 1: /* LSR */
4365 res = shift >= 32 ? 0 : res >> shift;
4366 break;
4367
4368 case 2: /* ASR */
4369 if (shift >= 32)
4370 shift = 31;
4371 res = ((res & 0x80000000L)
4372 ? ~((~res) >> shift) : res >> shift);
4373 break;
4374
4375 case 3: /* ROR/RRX */
4376 shift &= 31;
4377 if (shift == 0)
4378 res = (res >> 1) | (carry ? 0x80000000L : 0);
4379 else
4380 res = (res >> shift) | (res << (32 - shift));
4381 break;
4382 }
4383
4384 return res & 0xffffffff;
4385 }
4386
4387 /* Return number of 1-bits in VAL. */
4388
4389 static int
4390 bitcount (unsigned long val)
4391 {
4392 int nbits;
4393 for (nbits = 0; val != 0; nbits++)
4394 val &= val - 1; /* Delete rightmost 1-bit in val. */
4395 return nbits;
4396 }
4397
4398 static int
4399 thumb_advance_itstate (unsigned int itstate)
4400 {
4401 /* Preserve IT[7:5], the first three bits of the condition. Shift
4402 the upcoming condition flags left by one bit. */
4403 itstate = (itstate & 0xe0) | ((itstate << 1) & 0x1f);
4404
4405 /* If we have finished the IT block, clear the state. */
4406 if ((itstate & 0x0f) == 0)
4407 itstate = 0;
4408
4409 return itstate;
4410 }
4411
4412 /* Find the next PC after the current instruction executes. In some
4413 cases we can not statically determine the answer (see the IT state
4414 handling in this function); in that case, a breakpoint may be
4415 inserted in addition to the returned PC, which will be used to set
4416 another breakpoint by our caller. */
4417
4418 static CORE_ADDR
4419 thumb_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4420 {
4421 struct gdbarch *gdbarch = get_frame_arch (frame);
4422 struct address_space *aspace = get_frame_address_space (frame);
4423 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4424 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4425 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
4426 unsigned short inst1;
4427 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
4428 unsigned long offset;
4429 ULONGEST status, itstate;
4430
4431 nextpc = MAKE_THUMB_ADDR (nextpc);
4432 pc_val = MAKE_THUMB_ADDR (pc_val);
4433
4434 inst1 = read_memory_unsigned_integer (pc, 2, byte_order_for_code);
4435
4436 /* Thumb-2 conditional execution support. There are eight bits in
4437 the CPSR which describe conditional execution state. Once
4438 reconstructed (they're in a funny order), the low five bits
4439 describe the low bit of the condition for each instruction and
4440 how many instructions remain. The high three bits describe the
4441 base condition. One of the low four bits will be set if an IT
4442 block is active. These bits read as zero on earlier
4443 processors. */
4444 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4445 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
4446
4447 /* If-Then handling. On GNU/Linux, where this routine is used, we
4448 use an undefined instruction as a breakpoint. Unlike BKPT, IT
4449 can disable execution of the undefined instruction. So we might
4450 miss the breakpoint if we set it on a skipped conditional
4451 instruction. Because conditional instructions can change the
4452 flags, affecting the execution of further instructions, we may
4453 need to set two breakpoints. */
4454
4455 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint != NULL)
4456 {
4457 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
4458 {
4459 /* An IT instruction. Because this instruction does not
4460 modify the flags, we can accurately predict the next
4461 executed instruction. */
4462 itstate = inst1 & 0x00ff;
4463 pc += thumb_insn_size (inst1);
4464
4465 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4466 {
4467 inst1 = read_memory_unsigned_integer (pc, 2,
4468 byte_order_for_code);
4469 pc += thumb_insn_size (inst1);
4470 itstate = thumb_advance_itstate (itstate);
4471 }
4472
4473 return MAKE_THUMB_ADDR (pc);
4474 }
4475 else if (itstate != 0)
4476 {
4477 /* We are in a conditional block. Check the condition. */
4478 if (! condition_true (itstate >> 4, status))
4479 {
4480 /* Advance to the next executed instruction. */
4481 pc += thumb_insn_size (inst1);
4482 itstate = thumb_advance_itstate (itstate);
4483
4484 while (itstate != 0 && ! condition_true (itstate >> 4, status))
4485 {
4486 inst1 = read_memory_unsigned_integer (pc, 2,
4487 byte_order_for_code);
4488 pc += thumb_insn_size (inst1);
4489 itstate = thumb_advance_itstate (itstate);
4490 }
4491
4492 return MAKE_THUMB_ADDR (pc);
4493 }
4494 else if ((itstate & 0x0f) == 0x08)
4495 {
4496 /* This is the last instruction of the conditional
4497 block, and it is executed. We can handle it normally
4498 because the following instruction is not conditional,
4499 and we must handle it normally because it is
4500 permitted to branch. Fall through. */
4501 }
4502 else
4503 {
4504 int cond_negated;
4505
4506 /* There are conditional instructions after this one.
4507 If this instruction modifies the flags, then we can
4508 not predict what the next executed instruction will
4509 be. Fortunately, this instruction is architecturally
4510 forbidden to branch; we know it will fall through.
4511 Start by skipping past it. */
4512 pc += thumb_insn_size (inst1);
4513 itstate = thumb_advance_itstate (itstate);
4514
4515 /* Set a breakpoint on the following instruction. */
4516 gdb_assert ((itstate & 0x0f) != 0);
4517 arm_insert_single_step_breakpoint (gdbarch, aspace,
4518 MAKE_THUMB_ADDR (pc));
4519 cond_negated = (itstate >> 4) & 1;
4520
4521 /* Skip all following instructions with the same
4522 condition. If there is a later instruction in the IT
4523 block with the opposite condition, set the other
4524 breakpoint there. If not, then set a breakpoint on
4525 the instruction after the IT block. */
4526 do
4527 {
4528 inst1 = read_memory_unsigned_integer (pc, 2,
4529 byte_order_for_code);
4530 pc += thumb_insn_size (inst1);
4531 itstate = thumb_advance_itstate (itstate);
4532 }
4533 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
4534
4535 return MAKE_THUMB_ADDR (pc);
4536 }
4537 }
4538 }
4539 else if (itstate & 0x0f)
4540 {
4541 /* We are in a conditional block. Check the condition. */
4542 int cond = itstate >> 4;
4543
4544 if (! condition_true (cond, status))
4545 /* Advance to the next instruction. All the 32-bit
4546 instructions share a common prefix. */
4547 return MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1));
4548
4549 /* Otherwise, handle the instruction normally. */
4550 }
4551
4552 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
4553 {
4554 CORE_ADDR sp;
4555
4556 /* Fetch the saved PC from the stack. It's stored above
4557 all of the other registers. */
4558 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
4559 sp = get_frame_register_unsigned (frame, ARM_SP_REGNUM);
4560 nextpc = read_memory_unsigned_integer (sp + offset, 4, byte_order);
4561 }
4562 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
4563 {
4564 unsigned long cond = bits (inst1, 8, 11);
4565 if (cond == 0x0f) /* 0x0f = SWI */
4566 {
4567 struct gdbarch_tdep *tdep;
4568 tdep = gdbarch_tdep (gdbarch);
4569
4570 if (tdep->syscall_next_pc != NULL)
4571 nextpc = tdep->syscall_next_pc (frame);
4572
4573 }
4574 else if (cond != 0x0f && condition_true (cond, status))
4575 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
4576 }
4577 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
4578 {
4579 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
4580 }
4581 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
4582 {
4583 unsigned short inst2;
4584 inst2 = read_memory_unsigned_integer (pc + 2, 2, byte_order_for_code);
4585
4586 /* Default to the next instruction. */
4587 nextpc = pc + 4;
4588 nextpc = MAKE_THUMB_ADDR (nextpc);
4589
4590 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
4591 {
4592 /* Branches and miscellaneous control instructions. */
4593
4594 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
4595 {
4596 /* B, BL, BLX. */
4597 int j1, j2, imm1, imm2;
4598
4599 imm1 = sbits (inst1, 0, 10);
4600 imm2 = bits (inst2, 0, 10);
4601 j1 = bit (inst2, 13);
4602 j2 = bit (inst2, 11);
4603
4604 offset = ((imm1 << 12) + (imm2 << 1));
4605 offset ^= ((!j2) << 22) | ((!j1) << 23);
4606
4607 nextpc = pc_val + offset;
4608 /* For BLX make sure to clear the low bits. */
4609 if (bit (inst2, 12) == 0)
4610 nextpc = nextpc & 0xfffffffc;
4611 }
4612 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
4613 {
4614 /* SUBS PC, LR, #imm8. */
4615 nextpc = get_frame_register_unsigned (frame, ARM_LR_REGNUM);
4616 nextpc -= inst2 & 0x00ff;
4617 }
4618 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
4619 {
4620 /* Conditional branch. */
4621 if (condition_true (bits (inst1, 6, 9), status))
4622 {
4623 int sign, j1, j2, imm1, imm2;
4624
4625 sign = sbits (inst1, 10, 10);
4626 imm1 = bits (inst1, 0, 5);
4627 imm2 = bits (inst2, 0, 10);
4628 j1 = bit (inst2, 13);
4629 j2 = bit (inst2, 11);
4630
4631 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
4632 offset += (imm1 << 12) + (imm2 << 1);
4633
4634 nextpc = pc_val + offset;
4635 }
4636 }
4637 }
4638 else if ((inst1 & 0xfe50) == 0xe810)
4639 {
4640 /* Load multiple or RFE. */
4641 int rn, offset, load_pc = 1;
4642
4643 rn = bits (inst1, 0, 3);
4644 if (bit (inst1, 7) && !bit (inst1, 8))
4645 {
4646 /* LDMIA or POP */
4647 if (!bit (inst2, 15))
4648 load_pc = 0;
4649 offset = bitcount (inst2) * 4 - 4;
4650 }
4651 else if (!bit (inst1, 7) && bit (inst1, 8))
4652 {
4653 /* LDMDB */
4654 if (!bit (inst2, 15))
4655 load_pc = 0;
4656 offset = -4;
4657 }
4658 else if (bit (inst1, 7) && bit (inst1, 8))
4659 {
4660 /* RFEIA */
4661 offset = 0;
4662 }
4663 else if (!bit (inst1, 7) && !bit (inst1, 8))
4664 {
4665 /* RFEDB */
4666 offset = -8;
4667 }
4668 else
4669 load_pc = 0;
4670
4671 if (load_pc)
4672 {
4673 CORE_ADDR addr = get_frame_register_unsigned (frame, rn);
4674 nextpc = get_frame_memory_unsigned (frame, addr + offset, 4);
4675 }
4676 }
4677 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
4678 {
4679 /* MOV PC or MOVS PC. */
4680 nextpc = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4681 nextpc = MAKE_THUMB_ADDR (nextpc);
4682 }
4683 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
4684 {
4685 /* LDR PC. */
4686 CORE_ADDR base;
4687 int rn, load_pc = 1;
4688
4689 rn = bits (inst1, 0, 3);
4690 base = get_frame_register_unsigned (frame, rn);
4691 if (rn == ARM_PC_REGNUM)
4692 {
4693 base = (base + 4) & ~(CORE_ADDR) 0x3;
4694 if (bit (inst1, 7))
4695 base += bits (inst2, 0, 11);
4696 else
4697 base -= bits (inst2, 0, 11);
4698 }
4699 else if (bit (inst1, 7))
4700 base += bits (inst2, 0, 11);
4701 else if (bit (inst2, 11))
4702 {
4703 if (bit (inst2, 10))
4704 {
4705 if (bit (inst2, 9))
4706 base += bits (inst2, 0, 7);
4707 else
4708 base -= bits (inst2, 0, 7);
4709 }
4710 }
4711 else if ((inst2 & 0x0fc0) == 0x0000)
4712 {
4713 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
4714 base += get_frame_register_unsigned (frame, rm) << shift;
4715 }
4716 else
4717 /* Reserved. */
4718 load_pc = 0;
4719
4720 if (load_pc)
4721 nextpc = get_frame_memory_unsigned (frame, base, 4);
4722 }
4723 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
4724 {
4725 /* TBB. */
4726 CORE_ADDR tbl_reg, table, offset, length;
4727
4728 tbl_reg = bits (inst1, 0, 3);
4729 if (tbl_reg == 0x0f)
4730 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4731 else
4732 table = get_frame_register_unsigned (frame, tbl_reg);
4733
4734 offset = get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4735 length = 2 * get_frame_memory_unsigned (frame, table + offset, 1);
4736 nextpc = pc_val + length;
4737 }
4738 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
4739 {
4740 /* TBH. */
4741 CORE_ADDR tbl_reg, table, offset, length;
4742
4743 tbl_reg = bits (inst1, 0, 3);
4744 if (tbl_reg == 0x0f)
4745 table = pc + 4; /* Regcache copy of PC isn't right yet. */
4746 else
4747 table = get_frame_register_unsigned (frame, tbl_reg);
4748
4749 offset = 2 * get_frame_register_unsigned (frame, bits (inst2, 0, 3));
4750 length = 2 * get_frame_memory_unsigned (frame, table + offset, 2);
4751 nextpc = pc_val + length;
4752 }
4753 }
4754 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
4755 {
4756 if (bits (inst1, 3, 6) == 0x0f)
4757 nextpc = UNMAKE_THUMB_ADDR (pc_val);
4758 else
4759 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4760 }
4761 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
4762 {
4763 if (bits (inst1, 3, 6) == 0x0f)
4764 nextpc = pc_val;
4765 else
4766 nextpc = get_frame_register_unsigned (frame, bits (inst1, 3, 6));
4767
4768 nextpc = MAKE_THUMB_ADDR (nextpc);
4769 }
4770 else if ((inst1 & 0xf500) == 0xb100)
4771 {
4772 /* CBNZ or CBZ. */
4773 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
4774 ULONGEST reg = get_frame_register_unsigned (frame, bits (inst1, 0, 2));
4775
4776 if (bit (inst1, 11) && reg != 0)
4777 nextpc = pc_val + imm;
4778 else if (!bit (inst1, 11) && reg == 0)
4779 nextpc = pc_val + imm;
4780 }
4781 return nextpc;
4782 }
4783
4784 /* Get the raw next address. PC is the current program counter, in
4785 FRAME, which is assumed to be executing in ARM mode.
4786
4787 The value returned has the execution state of the next instruction
4788 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
4789 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
4790 address. */
4791
4792 static CORE_ADDR
4793 arm_get_next_pc_raw (struct frame_info *frame, CORE_ADDR pc)
4794 {
4795 struct gdbarch *gdbarch = get_frame_arch (frame);
4796 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
4797 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
4798 unsigned long pc_val;
4799 unsigned long this_instr;
4800 unsigned long status;
4801 CORE_ADDR nextpc;
4802
4803 pc_val = (unsigned long) pc;
4804 this_instr = read_memory_unsigned_integer (pc, 4, byte_order_for_code);
4805
4806 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
4807 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
4808
4809 if (bits (this_instr, 28, 31) == INST_NV)
4810 switch (bits (this_instr, 24, 27))
4811 {
4812 case 0xa:
4813 case 0xb:
4814 {
4815 /* Branch with Link and change to Thumb. */
4816 nextpc = BranchDest (pc, this_instr);
4817 nextpc |= bit (this_instr, 24) << 1;
4818 nextpc = MAKE_THUMB_ADDR (nextpc);
4819 break;
4820 }
4821 case 0xc:
4822 case 0xd:
4823 case 0xe:
4824 /* Coprocessor register transfer. */
4825 if (bits (this_instr, 12, 15) == 15)
4826 error (_("Invalid update to pc in instruction"));
4827 break;
4828 }
4829 else if (condition_true (bits (this_instr, 28, 31), status))
4830 {
4831 switch (bits (this_instr, 24, 27))
4832 {
4833 case 0x0:
4834 case 0x1: /* data processing */
4835 case 0x2:
4836 case 0x3:
4837 {
4838 unsigned long operand1, operand2, result = 0;
4839 unsigned long rn;
4840 int c;
4841
4842 if (bits (this_instr, 12, 15) != 15)
4843 break;
4844
4845 if (bits (this_instr, 22, 25) == 0
4846 && bits (this_instr, 4, 7) == 9) /* multiply */
4847 error (_("Invalid update to pc in instruction"));
4848
4849 /* BX <reg>, BLX <reg> */
4850 if (bits (this_instr, 4, 27) == 0x12fff1
4851 || bits (this_instr, 4, 27) == 0x12fff3)
4852 {
4853 rn = bits (this_instr, 0, 3);
4854 nextpc = ((rn == ARM_PC_REGNUM)
4855 ? (pc_val + 8)
4856 : get_frame_register_unsigned (frame, rn));
4857
4858 return nextpc;
4859 }
4860
4861 /* Multiply into PC. */
4862 c = (status & FLAG_C) ? 1 : 0;
4863 rn = bits (this_instr, 16, 19);
4864 operand1 = ((rn == ARM_PC_REGNUM)
4865 ? (pc_val + 8)
4866 : get_frame_register_unsigned (frame, rn));
4867
4868 if (bit (this_instr, 25))
4869 {
4870 unsigned long immval = bits (this_instr, 0, 7);
4871 unsigned long rotate = 2 * bits (this_instr, 8, 11);
4872 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
4873 & 0xffffffff;
4874 }
4875 else /* operand 2 is a shifted register. */
4876 operand2 = shifted_reg_val (frame, this_instr, c,
4877 pc_val, status);
4878
4879 switch (bits (this_instr, 21, 24))
4880 {
4881 case 0x0: /*and */
4882 result = operand1 & operand2;
4883 break;
4884
4885 case 0x1: /*eor */
4886 result = operand1 ^ operand2;
4887 break;
4888
4889 case 0x2: /*sub */
4890 result = operand1 - operand2;
4891 break;
4892
4893 case 0x3: /*rsb */
4894 result = operand2 - operand1;
4895 break;
4896
4897 case 0x4: /*add */
4898 result = operand1 + operand2;
4899 break;
4900
4901 case 0x5: /*adc */
4902 result = operand1 + operand2 + c;
4903 break;
4904
4905 case 0x6: /*sbc */
4906 result = operand1 - operand2 + c;
4907 break;
4908
4909 case 0x7: /*rsc */
4910 result = operand2 - operand1 + c;
4911 break;
4912
4913 case 0x8:
4914 case 0x9:
4915 case 0xa:
4916 case 0xb: /* tst, teq, cmp, cmn */
4917 result = (unsigned long) nextpc;
4918 break;
4919
4920 case 0xc: /*orr */
4921 result = operand1 | operand2;
4922 break;
4923
4924 case 0xd: /*mov */
4925 /* Always step into a function. */
4926 result = operand2;
4927 break;
4928
4929 case 0xe: /*bic */
4930 result = operand1 & ~operand2;
4931 break;
4932
4933 case 0xf: /*mvn */
4934 result = ~operand2;
4935 break;
4936 }
4937
4938 /* In 26-bit APCS the bottom two bits of the result are
4939 ignored, and we always end up in ARM state. */
4940 if (!arm_apcs_32)
4941 nextpc = arm_addr_bits_remove (gdbarch, result);
4942 else
4943 nextpc = result;
4944
4945 break;
4946 }
4947
4948 case 0x4:
4949 case 0x5: /* data transfer */
4950 case 0x6:
4951 case 0x7:
4952 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
4953 {
4954 /* Media instructions and architecturally undefined
4955 instructions. */
4956 break;
4957 }
4958
4959 if (bit (this_instr, 20))
4960 {
4961 /* load */
4962 if (bits (this_instr, 12, 15) == 15)
4963 {
4964 /* rd == pc */
4965 unsigned long rn;
4966 unsigned long base;
4967
4968 if (bit (this_instr, 22))
4969 error (_("Invalid update to pc in instruction"));
4970
4971 /* byte write to PC */
4972 rn = bits (this_instr, 16, 19);
4973 base = ((rn == ARM_PC_REGNUM)
4974 ? (pc_val + 8)
4975 : get_frame_register_unsigned (frame, rn));
4976
4977 if (bit (this_instr, 24))
4978 {
4979 /* pre-indexed */
4980 int c = (status & FLAG_C) ? 1 : 0;
4981 unsigned long offset =
4982 (bit (this_instr, 25)
4983 ? shifted_reg_val (frame, this_instr, c, pc_val, status)
4984 : bits (this_instr, 0, 11));
4985
4986 if (bit (this_instr, 23))
4987 base += offset;
4988 else
4989 base -= offset;
4990 }
4991 nextpc =
4992 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR) base,
4993 4, byte_order);
4994 }
4995 }
4996 break;
4997
4998 case 0x8:
4999 case 0x9: /* block transfer */
5000 if (bit (this_instr, 20))
5001 {
5002 /* LDM */
5003 if (bit (this_instr, 15))
5004 {
5005 /* loading pc */
5006 int offset = 0;
5007 unsigned long rn_val
5008 = get_frame_register_unsigned (frame,
5009 bits (this_instr, 16, 19));
5010
5011 if (bit (this_instr, 23))
5012 {
5013 /* up */
5014 unsigned long reglist = bits (this_instr, 0, 14);
5015 offset = bitcount (reglist) * 4;
5016 if (bit (this_instr, 24)) /* pre */
5017 offset += 4;
5018 }
5019 else if (bit (this_instr, 24))
5020 offset = -4;
5021
5022 nextpc =
5023 (CORE_ADDR) read_memory_unsigned_integer ((CORE_ADDR)
5024 (rn_val + offset),
5025 4, byte_order);
5026 }
5027 }
5028 break;
5029
5030 case 0xb: /* branch & link */
5031 case 0xa: /* branch */
5032 {
5033 nextpc = BranchDest (pc, this_instr);
5034 break;
5035 }
5036
5037 case 0xc:
5038 case 0xd:
5039 case 0xe: /* coproc ops */
5040 break;
5041 case 0xf: /* SWI */
5042 {
5043 struct gdbarch_tdep *tdep;
5044 tdep = gdbarch_tdep (gdbarch);
5045
5046 if (tdep->syscall_next_pc != NULL)
5047 nextpc = tdep->syscall_next_pc (frame);
5048
5049 }
5050 break;
5051
5052 default:
5053 fprintf_filtered (gdb_stderr, _("Bad bit-field extraction\n"));
5054 return (pc);
5055 }
5056 }
5057
5058 return nextpc;
5059 }
5060
5061 /* Determine next PC after current instruction executes. Will call either
5062 arm_get_next_pc_raw or thumb_get_next_pc_raw. Error out if infinite
5063 loop is detected. */
5064
5065 CORE_ADDR
5066 arm_get_next_pc (struct frame_info *frame, CORE_ADDR pc)
5067 {
5068 CORE_ADDR nextpc;
5069
5070 if (arm_frame_is_thumb (frame))
5071 nextpc = thumb_get_next_pc_raw (frame, pc);
5072 else
5073 nextpc = arm_get_next_pc_raw (frame, pc);
5074
5075 return nextpc;
5076 }
5077
5078 /* Like insert_single_step_breakpoint, but make sure we use a breakpoint
5079 of the appropriate mode (as encoded in the PC value), even if this
5080 differs from what would be expected according to the symbol tables. */
5081
5082 void
5083 arm_insert_single_step_breakpoint (struct gdbarch *gdbarch,
5084 struct address_space *aspace,
5085 CORE_ADDR pc)
5086 {
5087 struct cleanup *old_chain
5088 = make_cleanup_restore_integer (&arm_override_mode);
5089
5090 arm_override_mode = IS_THUMB_ADDR (pc);
5091 pc = gdbarch_addr_bits_remove (gdbarch, pc);
5092
5093 insert_single_step_breakpoint (gdbarch, aspace, pc);
5094
5095 do_cleanups (old_chain);
5096 }
5097
5098 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
5099 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
5100 is found, attempt to step through it. A breakpoint is placed at the end of
5101 the sequence. */
5102
5103 static int
5104 thumb_deal_with_atomic_sequence_raw (struct frame_info *frame)
5105 {
5106 struct gdbarch *gdbarch = get_frame_arch (frame);
5107 struct address_space *aspace = get_frame_address_space (frame);
5108 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5109 CORE_ADDR pc = get_frame_pc (frame);
5110 CORE_ADDR breaks[2] = {-1, -1};
5111 CORE_ADDR loc = pc;
5112 unsigned short insn1, insn2;
5113 int insn_count;
5114 int index;
5115 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5116 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5117 ULONGEST status, itstate;
5118
5119 /* We currently do not support atomic sequences within an IT block. */
5120 status = get_frame_register_unsigned (frame, ARM_PS_REGNUM);
5121 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
5122 if (itstate & 0x0f)
5123 return 0;
5124
5125 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
5126 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5127 loc += 2;
5128 if (thumb_insn_size (insn1) != 4)
5129 return 0;
5130
5131 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5132 loc += 2;
5133 if (!((insn1 & 0xfff0) == 0xe850
5134 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
5135 return 0;
5136
5137 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5138 instructions. */
5139 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5140 {
5141 insn1 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5142 loc += 2;
5143
5144 if (thumb_insn_size (insn1) != 4)
5145 {
5146 /* Assume that there is at most one conditional branch in the
5147 atomic sequence. If a conditional branch is found, put a
5148 breakpoint in its destination address. */
5149 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
5150 {
5151 if (last_breakpoint > 0)
5152 return 0; /* More than one conditional branch found,
5153 fallback to the standard code. */
5154
5155 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
5156 last_breakpoint++;
5157 }
5158
5159 /* We do not support atomic sequences that use any *other*
5160 instructions but conditional branches to change the PC.
5161 Fall back to standard code to avoid losing control of
5162 execution. */
5163 else if (thumb_instruction_changes_pc (insn1))
5164 return 0;
5165 }
5166 else
5167 {
5168 insn2 = read_memory_unsigned_integer (loc, 2, byte_order_for_code);
5169 loc += 2;
5170
5171 /* Assume that there is at most one conditional branch in the
5172 atomic sequence. If a conditional branch is found, put a
5173 breakpoint in its destination address. */
5174 if ((insn1 & 0xf800) == 0xf000
5175 && (insn2 & 0xd000) == 0x8000
5176 && (insn1 & 0x0380) != 0x0380)
5177 {
5178 int sign, j1, j2, imm1, imm2;
5179 unsigned int offset;
5180
5181 sign = sbits (insn1, 10, 10);
5182 imm1 = bits (insn1, 0, 5);
5183 imm2 = bits (insn2, 0, 10);
5184 j1 = bit (insn2, 13);
5185 j2 = bit (insn2, 11);
5186
5187 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
5188 offset += (imm1 << 12) + (imm2 << 1);
5189
5190 if (last_breakpoint > 0)
5191 return 0; /* More than one conditional branch found,
5192 fallback to the standard code. */
5193
5194 breaks[1] = loc + offset;
5195 last_breakpoint++;
5196 }
5197
5198 /* We do not support atomic sequences that use any *other*
5199 instructions but conditional branches to change the PC.
5200 Fall back to standard code to avoid losing control of
5201 execution. */
5202 else if (thumb2_instruction_changes_pc (insn1, insn2))
5203 return 0;
5204
5205 /* If we find a strex{,b,h,d}, we're done. */
5206 if ((insn1 & 0xfff0) == 0xe840
5207 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
5208 break;
5209 }
5210 }
5211
5212 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5213 if (insn_count == atomic_sequence_length)
5214 return 0;
5215
5216 /* Insert a breakpoint right after the end of the atomic sequence. */
5217 breaks[0] = loc;
5218
5219 /* Check for duplicated breakpoints. Check also for a breakpoint
5220 placed (branch instruction's destination) anywhere in sequence. */
5221 if (last_breakpoint
5222 && (breaks[1] == breaks[0]
5223 || (breaks[1] >= pc && breaks[1] < loc)))
5224 last_breakpoint = 0;
5225
5226 /* Effectively inserts the breakpoints. */
5227 for (index = 0; index <= last_breakpoint; index++)
5228 arm_insert_single_step_breakpoint (gdbarch, aspace,
5229 MAKE_THUMB_ADDR (breaks[index]));
5230
5231 return 1;
5232 }
5233
5234 static int
5235 arm_deal_with_atomic_sequence_raw (struct frame_info *frame)
5236 {
5237 struct gdbarch *gdbarch = get_frame_arch (frame);
5238 struct address_space *aspace = get_frame_address_space (frame);
5239 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
5240 CORE_ADDR pc = get_frame_pc (frame);
5241 CORE_ADDR breaks[2] = {-1, -1};
5242 CORE_ADDR loc = pc;
5243 unsigned int insn;
5244 int insn_count;
5245 int index;
5246 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
5247 const int atomic_sequence_length = 16; /* Instruction sequence length. */
5248
5249 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
5250 Note that we do not currently support conditionally executed atomic
5251 instructions. */
5252 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5253 loc += 4;
5254 if ((insn & 0xff9000f0) != 0xe1900090)
5255 return 0;
5256
5257 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
5258 instructions. */
5259 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
5260 {
5261 insn = read_memory_unsigned_integer (loc, 4, byte_order_for_code);
5262 loc += 4;
5263
5264 /* Assume that there is at most one conditional branch in the atomic
5265 sequence. If a conditional branch is found, put a breakpoint in
5266 its destination address. */
5267 if (bits (insn, 24, 27) == 0xa)
5268 {
5269 if (last_breakpoint > 0)
5270 return 0; /* More than one conditional branch found, fallback
5271 to the standard single-step code. */
5272
5273 breaks[1] = BranchDest (loc - 4, insn);
5274 last_breakpoint++;
5275 }
5276
5277 /* We do not support atomic sequences that use any *other* instructions
5278 but conditional branches to change the PC. Fall back to standard
5279 code to avoid losing control of execution. */
5280 else if (arm_instruction_changes_pc (insn))
5281 return 0;
5282
5283 /* If we find a strex{,b,h,d}, we're done. */
5284 if ((insn & 0xff9000f0) == 0xe1800090)
5285 break;
5286 }
5287
5288 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
5289 if (insn_count == atomic_sequence_length)
5290 return 0;
5291
5292 /* Insert a breakpoint right after the end of the atomic sequence. */
5293 breaks[0] = loc;
5294
5295 /* Check for duplicated breakpoints. Check also for a breakpoint
5296 placed (branch instruction's destination) anywhere in sequence. */
5297 if (last_breakpoint
5298 && (breaks[1] == breaks[0]
5299 || (breaks[1] >= pc && breaks[1] < loc)))
5300 last_breakpoint = 0;
5301
5302 /* Effectively inserts the breakpoints. */
5303 for (index = 0; index <= last_breakpoint; index++)
5304 arm_insert_single_step_breakpoint (gdbarch, aspace, breaks[index]);
5305
5306 return 1;
5307 }
5308
5309 int
5310 arm_deal_with_atomic_sequence (struct frame_info *frame)
5311 {
5312 if (arm_frame_is_thumb (frame))
5313 return thumb_deal_with_atomic_sequence_raw (frame);
5314 else
5315 return arm_deal_with_atomic_sequence_raw (frame);
5316 }
5317
5318 /* single_step() is called just before we want to resume the inferior,
5319 if we want to single-step it but there is no hardware or kernel
5320 single-step support. We find the target of the coming instruction
5321 and breakpoint it. */
5322
5323 int
5324 arm_software_single_step (struct frame_info *frame)
5325 {
5326 struct gdbarch *gdbarch = get_frame_arch (frame);
5327 struct address_space *aspace = get_frame_address_space (frame);
5328 CORE_ADDR next_pc;
5329
5330 if (arm_deal_with_atomic_sequence (frame))
5331 return 1;
5332
5333 next_pc = arm_get_next_pc (frame, get_frame_pc (frame));
5334 arm_insert_single_step_breakpoint (gdbarch, aspace, next_pc);
5335
5336 return 1;
5337 }
5338
5339 /* Given BUF, which is OLD_LEN bytes ending at ENDADDR, expand
5340 the buffer to be NEW_LEN bytes ending at ENDADDR. Return
5341 NULL if an error occurs. BUF is freed. */
5342
5343 static gdb_byte *
5344 extend_buffer_earlier (gdb_byte *buf, CORE_ADDR endaddr,
5345 int old_len, int new_len)
5346 {
5347 gdb_byte *new_buf;
5348 int bytes_to_read = new_len - old_len;
5349
5350 new_buf = (gdb_byte *) xmalloc (new_len);
5351 memcpy (new_buf + bytes_to_read, buf, old_len);
5352 xfree (buf);
5353 if (target_read_memory (endaddr - new_len, new_buf, bytes_to_read) != 0)
5354 {
5355 xfree (new_buf);
5356 return NULL;
5357 }
5358 return new_buf;
5359 }
5360
5361 /* An IT block is at most the 2-byte IT instruction followed by
5362 four 4-byte instructions. The furthest back we must search to
5363 find an IT block that affects the current instruction is thus
5364 2 + 3 * 4 == 14 bytes. */
5365 #define MAX_IT_BLOCK_PREFIX 14
5366
5367 /* Use a quick scan if there are more than this many bytes of
5368 code. */
5369 #define IT_SCAN_THRESHOLD 32
5370
5371 /* Adjust a breakpoint's address to move breakpoints out of IT blocks.
5372 A breakpoint in an IT block may not be hit, depending on the
5373 condition flags. */
5374 static CORE_ADDR
5375 arm_adjust_breakpoint_address (struct gdbarch *gdbarch, CORE_ADDR bpaddr)
5376 {
5377 gdb_byte *buf;
5378 char map_type;
5379 CORE_ADDR boundary, func_start;
5380 int buf_len;
5381 enum bfd_endian order = gdbarch_byte_order_for_code (gdbarch);
5382 int i, any, last_it, last_it_count;
5383
5384 /* If we are using BKPT breakpoints, none of this is necessary. */
5385 if (gdbarch_tdep (gdbarch)->thumb2_breakpoint == NULL)
5386 return bpaddr;
5387
5388 /* ARM mode does not have this problem. */
5389 if (!arm_pc_is_thumb (gdbarch, bpaddr))
5390 return bpaddr;
5391
5392 /* We are setting a breakpoint in Thumb code that could potentially
5393 contain an IT block. The first step is to find how much Thumb
5394 code there is; we do not need to read outside of known Thumb
5395 sequences. */
5396 map_type = arm_find_mapping_symbol (bpaddr, &boundary);
5397 if (map_type == 0)
5398 /* Thumb-2 code must have mapping symbols to have a chance. */
5399 return bpaddr;
5400
5401 bpaddr = gdbarch_addr_bits_remove (gdbarch, bpaddr);
5402
5403 if (find_pc_partial_function (bpaddr, NULL, &func_start, NULL)
5404 && func_start > boundary)
5405 boundary = func_start;
5406
5407 /* Search for a candidate IT instruction. We have to do some fancy
5408 footwork to distinguish a real IT instruction from the second
5409 half of a 32-bit instruction, but there is no need for that if
5410 there's no candidate. */
5411 buf_len = min (bpaddr - boundary, MAX_IT_BLOCK_PREFIX);
5412 if (buf_len == 0)
5413 /* No room for an IT instruction. */
5414 return bpaddr;
5415
5416 buf = (gdb_byte *) xmalloc (buf_len);
5417 if (target_read_memory (bpaddr - buf_len, buf, buf_len) != 0)
5418 return bpaddr;
5419 any = 0;
5420 for (i = 0; i < buf_len; i += 2)
5421 {
5422 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5423 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5424 {
5425 any = 1;
5426 break;
5427 }
5428 }
5429 if (any == 0)
5430 {
5431 xfree (buf);
5432 return bpaddr;
5433 }
5434
5435 /* OK, the code bytes before this instruction contain at least one
5436 halfword which resembles an IT instruction. We know that it's
5437 Thumb code, but there are still two possibilities. Either the
5438 halfword really is an IT instruction, or it is the second half of
5439 a 32-bit Thumb instruction. The only way we can tell is to
5440 scan forwards from a known instruction boundary. */
5441 if (bpaddr - boundary > IT_SCAN_THRESHOLD)
5442 {
5443 int definite;
5444
5445 /* There's a lot of code before this instruction. Start with an
5446 optimistic search; it's easy to recognize halfwords that can
5447 not be the start of a 32-bit instruction, and use that to
5448 lock on to the instruction boundaries. */
5449 buf = extend_buffer_earlier (buf, bpaddr, buf_len, IT_SCAN_THRESHOLD);
5450 if (buf == NULL)
5451 return bpaddr;
5452 buf_len = IT_SCAN_THRESHOLD;
5453
5454 definite = 0;
5455 for (i = 0; i < buf_len - sizeof (buf) && ! definite; i += 2)
5456 {
5457 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5458 if (thumb_insn_size (inst1) == 2)
5459 {
5460 definite = 1;
5461 break;
5462 }
5463 }
5464
5465 /* At this point, if DEFINITE, BUF[I] is the first place we
5466 are sure that we know the instruction boundaries, and it is far
5467 enough from BPADDR that we could not miss an IT instruction
5468 affecting BPADDR. If ! DEFINITE, give up - start from a
5469 known boundary. */
5470 if (! definite)
5471 {
5472 buf = extend_buffer_earlier (buf, bpaddr, buf_len,
5473 bpaddr - boundary);
5474 if (buf == NULL)
5475 return bpaddr;
5476 buf_len = bpaddr - boundary;
5477 i = 0;
5478 }
5479 }
5480 else
5481 {
5482 buf = extend_buffer_earlier (buf, bpaddr, buf_len, bpaddr - boundary);
5483 if (buf == NULL)
5484 return bpaddr;
5485 buf_len = bpaddr - boundary;
5486 i = 0;
5487 }
5488
5489 /* Scan forwards. Find the last IT instruction before BPADDR. */
5490 last_it = -1;
5491 last_it_count = 0;
5492 while (i < buf_len)
5493 {
5494 unsigned short inst1 = extract_unsigned_integer (&buf[i], 2, order);
5495 last_it_count--;
5496 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
5497 {
5498 last_it = i;
5499 if (inst1 & 0x0001)
5500 last_it_count = 4;
5501 else if (inst1 & 0x0002)
5502 last_it_count = 3;
5503 else if (inst1 & 0x0004)
5504 last_it_count = 2;
5505 else
5506 last_it_count = 1;
5507 }
5508 i += thumb_insn_size (inst1);
5509 }
5510
5511 xfree (buf);
5512
5513 if (last_it == -1)
5514 /* There wasn't really an IT instruction after all. */
5515 return bpaddr;
5516
5517 if (last_it_count < 1)
5518 /* It was too far away. */
5519 return bpaddr;
5520
5521 /* This really is a trouble spot. Move the breakpoint to the IT
5522 instruction. */
5523 return bpaddr - buf_len + last_it;
5524 }
5525
5526 /* ARM displaced stepping support.
5527
5528 Generally ARM displaced stepping works as follows:
5529
5530 1. When an instruction is to be single-stepped, it is first decoded by
5531 arm_process_displaced_insn (called from arm_displaced_step_copy_insn).
5532 Depending on the type of instruction, it is then copied to a scratch
5533 location, possibly in a modified form. The copy_* set of functions
5534 performs such modification, as necessary. A breakpoint is placed after
5535 the modified instruction in the scratch space to return control to GDB.
5536 Note in particular that instructions which modify the PC will no longer
5537 do so after modification.
5538
5539 2. The instruction is single-stepped, by setting the PC to the scratch
5540 location address, and resuming. Control returns to GDB when the
5541 breakpoint is hit.
5542
5543 3. A cleanup function (cleanup_*) is called corresponding to the copy_*
5544 function used for the current instruction. This function's job is to
5545 put the CPU/memory state back to what it would have been if the
5546 instruction had been executed unmodified in its original location. */
5547
5548 /* NOP instruction (mov r0, r0). */
5549 #define ARM_NOP 0xe1a00000
5550 #define THUMB_NOP 0x4600
5551
5552 /* Helper for register reads for displaced stepping. In particular, this
5553 returns the PC as it would be seen by the instruction at its original
5554 location. */
5555
5556 ULONGEST
5557 displaced_read_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5558 int regno)
5559 {
5560 ULONGEST ret;
5561 CORE_ADDR from = dsc->insn_addr;
5562
5563 if (regno == ARM_PC_REGNUM)
5564 {
5565 /* Compute pipeline offset:
5566 - When executing an ARM instruction, PC reads as the address of the
5567 current instruction plus 8.
5568 - When executing a Thumb instruction, PC reads as the address of the
5569 current instruction plus 4. */
5570
5571 if (!dsc->is_thumb)
5572 from += 8;
5573 else
5574 from += 4;
5575
5576 if (debug_displaced)
5577 fprintf_unfiltered (gdb_stdlog, "displaced: read pc value %.8lx\n",
5578 (unsigned long) from);
5579 return (ULONGEST) from;
5580 }
5581 else
5582 {
5583 regcache_cooked_read_unsigned (regs, regno, &ret);
5584 if (debug_displaced)
5585 fprintf_unfiltered (gdb_stdlog, "displaced: read r%d value %.8lx\n",
5586 regno, (unsigned long) ret);
5587 return ret;
5588 }
5589 }
5590
5591 static int
5592 displaced_in_arm_mode (struct regcache *regs)
5593 {
5594 ULONGEST ps;
5595 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5596
5597 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5598
5599 return (ps & t_bit) == 0;
5600 }
5601
5602 /* Write to the PC as from a branch instruction. */
5603
5604 static void
5605 branch_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5606 ULONGEST val)
5607 {
5608 if (!dsc->is_thumb)
5609 /* Note: If bits 0/1 are set, this branch would be unpredictable for
5610 architecture versions < 6. */
5611 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5612 val & ~(ULONGEST) 0x3);
5613 else
5614 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
5615 val & ~(ULONGEST) 0x1);
5616 }
5617
5618 /* Write to the PC as from a branch-exchange instruction. */
5619
5620 static void
5621 bx_write_pc (struct regcache *regs, ULONGEST val)
5622 {
5623 ULONGEST ps;
5624 ULONGEST t_bit = arm_psr_thumb_bit (get_regcache_arch (regs));
5625
5626 regcache_cooked_read_unsigned (regs, ARM_PS_REGNUM, &ps);
5627
5628 if ((val & 1) == 1)
5629 {
5630 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps | t_bit);
5631 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffe);
5632 }
5633 else if ((val & 2) == 0)
5634 {
5635 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5636 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val);
5637 }
5638 else
5639 {
5640 /* Unpredictable behaviour. Try to do something sensible (switch to ARM
5641 mode, align dest to 4 bytes). */
5642 warning (_("Single-stepping BX to non-word-aligned ARM instruction."));
5643 regcache_cooked_write_unsigned (regs, ARM_PS_REGNUM, ps & ~t_bit);
5644 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM, val & 0xfffffffc);
5645 }
5646 }
5647
5648 /* Write to the PC as if from a load instruction. */
5649
5650 static void
5651 load_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5652 ULONGEST val)
5653 {
5654 if (DISPLACED_STEPPING_ARCH_VERSION >= 5)
5655 bx_write_pc (regs, val);
5656 else
5657 branch_write_pc (regs, dsc, val);
5658 }
5659
5660 /* Write to the PC as if from an ALU instruction. */
5661
5662 static void
5663 alu_write_pc (struct regcache *regs, struct displaced_step_closure *dsc,
5664 ULONGEST val)
5665 {
5666 if (DISPLACED_STEPPING_ARCH_VERSION >= 7 && !dsc->is_thumb)
5667 bx_write_pc (regs, val);
5668 else
5669 branch_write_pc (regs, dsc, val);
5670 }
5671
5672 /* Helper for writing to registers for displaced stepping. Writing to the PC
5673 has a varying effects depending on the instruction which does the write:
5674 this is controlled by the WRITE_PC argument. */
5675
5676 void
5677 displaced_write_reg (struct regcache *regs, struct displaced_step_closure *dsc,
5678 int regno, ULONGEST val, enum pc_write_style write_pc)
5679 {
5680 if (regno == ARM_PC_REGNUM)
5681 {
5682 if (debug_displaced)
5683 fprintf_unfiltered (gdb_stdlog, "displaced: writing pc %.8lx\n",
5684 (unsigned long) val);
5685 switch (write_pc)
5686 {
5687 case BRANCH_WRITE_PC:
5688 branch_write_pc (regs, dsc, val);
5689 break;
5690
5691 case BX_WRITE_PC:
5692 bx_write_pc (regs, val);
5693 break;
5694
5695 case LOAD_WRITE_PC:
5696 load_write_pc (regs, dsc, val);
5697 break;
5698
5699 case ALU_WRITE_PC:
5700 alu_write_pc (regs, dsc, val);
5701 break;
5702
5703 case CANNOT_WRITE_PC:
5704 warning (_("Instruction wrote to PC in an unexpected way when "
5705 "single-stepping"));
5706 break;
5707
5708 default:
5709 internal_error (__FILE__, __LINE__,
5710 _("Invalid argument to displaced_write_reg"));
5711 }
5712
5713 dsc->wrote_to_pc = 1;
5714 }
5715 else
5716 {
5717 if (debug_displaced)
5718 fprintf_unfiltered (gdb_stdlog, "displaced: writing r%d value %.8lx\n",
5719 regno, (unsigned long) val);
5720 regcache_cooked_write_unsigned (regs, regno, val);
5721 }
5722 }
5723
5724 /* This function is used to concisely determine if an instruction INSN
5725 references PC. Register fields of interest in INSN should have the
5726 corresponding fields of BITMASK set to 0b1111. The function
5727 returns return 1 if any of these fields in INSN reference the PC
5728 (also 0b1111, r15), else it returns 0. */
5729
5730 static int
5731 insn_references_pc (uint32_t insn, uint32_t bitmask)
5732 {
5733 uint32_t lowbit = 1;
5734
5735 while (bitmask != 0)
5736 {
5737 uint32_t mask;
5738
5739 for (; lowbit && (bitmask & lowbit) == 0; lowbit <<= 1)
5740 ;
5741
5742 if (!lowbit)
5743 break;
5744
5745 mask = lowbit * 0xf;
5746
5747 if ((insn & mask) == mask)
5748 return 1;
5749
5750 bitmask &= ~mask;
5751 }
5752
5753 return 0;
5754 }
5755
5756 /* The simplest copy function. Many instructions have the same effect no
5757 matter what address they are executed at: in those cases, use this. */
5758
5759 static int
5760 arm_copy_unmodified (struct gdbarch *gdbarch, uint32_t insn,
5761 const char *iname, struct displaced_step_closure *dsc)
5762 {
5763 if (debug_displaced)
5764 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx, "
5765 "opcode/class '%s' unmodified\n", (unsigned long) insn,
5766 iname);
5767
5768 dsc->modinsn[0] = insn;
5769
5770 return 0;
5771 }
5772
5773 static int
5774 thumb_copy_unmodified_32bit (struct gdbarch *gdbarch, uint16_t insn1,
5775 uint16_t insn2, const char *iname,
5776 struct displaced_step_closure *dsc)
5777 {
5778 if (debug_displaced)
5779 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x %.4x, "
5780 "opcode/class '%s' unmodified\n", insn1, insn2,
5781 iname);
5782
5783 dsc->modinsn[0] = insn1;
5784 dsc->modinsn[1] = insn2;
5785 dsc->numinsns = 2;
5786
5787 return 0;
5788 }
5789
5790 /* Copy 16-bit Thumb(Thumb and 16-bit Thumb-2) instruction without any
5791 modification. */
5792 static int
5793 thumb_copy_unmodified_16bit (struct gdbarch *gdbarch, unsigned int insn,
5794 const char *iname,
5795 struct displaced_step_closure *dsc)
5796 {
5797 if (debug_displaced)
5798 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x, "
5799 "opcode/class '%s' unmodified\n", insn,
5800 iname);
5801
5802 dsc->modinsn[0] = insn;
5803
5804 return 0;
5805 }
5806
5807 /* Preload instructions with immediate offset. */
5808
5809 static void
5810 cleanup_preload (struct gdbarch *gdbarch,
5811 struct regcache *regs, struct displaced_step_closure *dsc)
5812 {
5813 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5814 if (!dsc->u.preload.immed)
5815 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
5816 }
5817
5818 static void
5819 install_preload (struct gdbarch *gdbarch, struct regcache *regs,
5820 struct displaced_step_closure *dsc, unsigned int rn)
5821 {
5822 ULONGEST rn_val;
5823 /* Preload instructions:
5824
5825 {pli/pld} [rn, #+/-imm]
5826 ->
5827 {pli/pld} [r0, #+/-imm]. */
5828
5829 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5830 rn_val = displaced_read_reg (regs, dsc, rn);
5831 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5832 dsc->u.preload.immed = 1;
5833
5834 dsc->cleanup = &cleanup_preload;
5835 }
5836
5837 static int
5838 arm_copy_preload (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
5839 struct displaced_step_closure *dsc)
5840 {
5841 unsigned int rn = bits (insn, 16, 19);
5842
5843 if (!insn_references_pc (insn, 0x000f0000ul))
5844 return arm_copy_unmodified (gdbarch, insn, "preload", dsc);
5845
5846 if (debug_displaced)
5847 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5848 (unsigned long) insn);
5849
5850 dsc->modinsn[0] = insn & 0xfff0ffff;
5851
5852 install_preload (gdbarch, regs, dsc, rn);
5853
5854 return 0;
5855 }
5856
5857 static int
5858 thumb2_copy_preload (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
5859 struct regcache *regs, struct displaced_step_closure *dsc)
5860 {
5861 unsigned int rn = bits (insn1, 0, 3);
5862 unsigned int u_bit = bit (insn1, 7);
5863 int imm12 = bits (insn2, 0, 11);
5864 ULONGEST pc_val;
5865
5866 if (rn != ARM_PC_REGNUM)
5867 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "preload", dsc);
5868
5869 /* PC is only allowed to use in PLI (immediate,literal) Encoding T3, and
5870 PLD (literal) Encoding T1. */
5871 if (debug_displaced)
5872 fprintf_unfiltered (gdb_stdlog,
5873 "displaced: copying pld/pli pc (0x%x) %c imm12 %.4x\n",
5874 (unsigned int) dsc->insn_addr, u_bit ? '+' : '-',
5875 imm12);
5876
5877 if (!u_bit)
5878 imm12 = -1 * imm12;
5879
5880 /* Rewrite instruction {pli/pld} PC imm12 into:
5881 Prepare: tmp[0] <- r0, tmp[1] <- r1, r0 <- pc, r1 <- imm12
5882
5883 {pli/pld} [r0, r1]
5884
5885 Cleanup: r0 <- tmp[0], r1 <- tmp[1]. */
5886
5887 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5888 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5889
5890 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
5891
5892 displaced_write_reg (regs, dsc, 0, pc_val, CANNOT_WRITE_PC);
5893 displaced_write_reg (regs, dsc, 1, imm12, CANNOT_WRITE_PC);
5894 dsc->u.preload.immed = 0;
5895
5896 /* {pli/pld} [r0, r1] */
5897 dsc->modinsn[0] = insn1 & 0xfff0;
5898 dsc->modinsn[1] = 0xf001;
5899 dsc->numinsns = 2;
5900
5901 dsc->cleanup = &cleanup_preload;
5902 return 0;
5903 }
5904
5905 /* Preload instructions with register offset. */
5906
5907 static void
5908 install_preload_reg(struct gdbarch *gdbarch, struct regcache *regs,
5909 struct displaced_step_closure *dsc, unsigned int rn,
5910 unsigned int rm)
5911 {
5912 ULONGEST rn_val, rm_val;
5913
5914 /* Preload register-offset instructions:
5915
5916 {pli/pld} [rn, rm {, shift}]
5917 ->
5918 {pli/pld} [r0, r1 {, shift}]. */
5919
5920 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5921 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
5922 rn_val = displaced_read_reg (regs, dsc, rn);
5923 rm_val = displaced_read_reg (regs, dsc, rm);
5924 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5925 displaced_write_reg (regs, dsc, 1, rm_val, CANNOT_WRITE_PC);
5926 dsc->u.preload.immed = 0;
5927
5928 dsc->cleanup = &cleanup_preload;
5929 }
5930
5931 static int
5932 arm_copy_preload_reg (struct gdbarch *gdbarch, uint32_t insn,
5933 struct regcache *regs,
5934 struct displaced_step_closure *dsc)
5935 {
5936 unsigned int rn = bits (insn, 16, 19);
5937 unsigned int rm = bits (insn, 0, 3);
5938
5939
5940 if (!insn_references_pc (insn, 0x000f000ful))
5941 return arm_copy_unmodified (gdbarch, insn, "preload reg", dsc);
5942
5943 if (debug_displaced)
5944 fprintf_unfiltered (gdb_stdlog, "displaced: copying preload insn %.8lx\n",
5945 (unsigned long) insn);
5946
5947 dsc->modinsn[0] = (insn & 0xfff0fff0) | 0x1;
5948
5949 install_preload_reg (gdbarch, regs, dsc, rn, rm);
5950 return 0;
5951 }
5952
5953 /* Copy/cleanup coprocessor load and store instructions. */
5954
5955 static void
5956 cleanup_copro_load_store (struct gdbarch *gdbarch,
5957 struct regcache *regs,
5958 struct displaced_step_closure *dsc)
5959 {
5960 ULONGEST rn_val = displaced_read_reg (regs, dsc, 0);
5961
5962 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
5963
5964 if (dsc->u.ldst.writeback)
5965 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, LOAD_WRITE_PC);
5966 }
5967
5968 static void
5969 install_copro_load_store (struct gdbarch *gdbarch, struct regcache *regs,
5970 struct displaced_step_closure *dsc,
5971 int writeback, unsigned int rn)
5972 {
5973 ULONGEST rn_val;
5974
5975 /* Coprocessor load/store instructions:
5976
5977 {stc/stc2} [<Rn>, #+/-imm] (and other immediate addressing modes)
5978 ->
5979 {stc/stc2} [r0, #+/-imm].
5980
5981 ldc/ldc2 are handled identically. */
5982
5983 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
5984 rn_val = displaced_read_reg (regs, dsc, rn);
5985 /* PC should be 4-byte aligned. */
5986 rn_val = rn_val & 0xfffffffc;
5987 displaced_write_reg (regs, dsc, 0, rn_val, CANNOT_WRITE_PC);
5988
5989 dsc->u.ldst.writeback = writeback;
5990 dsc->u.ldst.rn = rn;
5991
5992 dsc->cleanup = &cleanup_copro_load_store;
5993 }
5994
5995 static int
5996 arm_copy_copro_load_store (struct gdbarch *gdbarch, uint32_t insn,
5997 struct regcache *regs,
5998 struct displaced_step_closure *dsc)
5999 {
6000 unsigned int rn = bits (insn, 16, 19);
6001
6002 if (!insn_references_pc (insn, 0x000f0000ul))
6003 return arm_copy_unmodified (gdbarch, insn, "copro load/store", dsc);
6004
6005 if (debug_displaced)
6006 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6007 "load/store insn %.8lx\n", (unsigned long) insn);
6008
6009 dsc->modinsn[0] = insn & 0xfff0ffff;
6010
6011 install_copro_load_store (gdbarch, regs, dsc, bit (insn, 25), rn);
6012
6013 return 0;
6014 }
6015
6016 static int
6017 thumb2_copy_copro_load_store (struct gdbarch *gdbarch, uint16_t insn1,
6018 uint16_t insn2, struct regcache *regs,
6019 struct displaced_step_closure *dsc)
6020 {
6021 unsigned int rn = bits (insn1, 0, 3);
6022
6023 if (rn != ARM_PC_REGNUM)
6024 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
6025 "copro load/store", dsc);
6026
6027 if (debug_displaced)
6028 fprintf_unfiltered (gdb_stdlog, "displaced: copying coprocessor "
6029 "load/store insn %.4x%.4x\n", insn1, insn2);
6030
6031 dsc->modinsn[0] = insn1 & 0xfff0;
6032 dsc->modinsn[1] = insn2;
6033 dsc->numinsns = 2;
6034
6035 /* This function is called for copying instruction LDC/LDC2/VLDR, which
6036 doesn't support writeback, so pass 0. */
6037 install_copro_load_store (gdbarch, regs, dsc, 0, rn);
6038
6039 return 0;
6040 }
6041
6042 /* Clean up branch instructions (actually perform the branch, by setting
6043 PC). */
6044
6045 static void
6046 cleanup_branch (struct gdbarch *gdbarch, struct regcache *regs,
6047 struct displaced_step_closure *dsc)
6048 {
6049 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6050 int branch_taken = condition_true (dsc->u.branch.cond, status);
6051 enum pc_write_style write_pc = dsc->u.branch.exchange
6052 ? BX_WRITE_PC : BRANCH_WRITE_PC;
6053
6054 if (!branch_taken)
6055 return;
6056
6057 if (dsc->u.branch.link)
6058 {
6059 /* The value of LR should be the next insn of current one. In order
6060 not to confuse logic hanlding later insn `bx lr', if current insn mode
6061 is Thumb, the bit 0 of LR value should be set to 1. */
6062 ULONGEST next_insn_addr = dsc->insn_addr + dsc->insn_size;
6063
6064 if (dsc->is_thumb)
6065 next_insn_addr |= 0x1;
6066
6067 displaced_write_reg (regs, dsc, ARM_LR_REGNUM, next_insn_addr,
6068 CANNOT_WRITE_PC);
6069 }
6070
6071 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, dsc->u.branch.dest, write_pc);
6072 }
6073
6074 /* Copy B/BL/BLX instructions with immediate destinations. */
6075
6076 static void
6077 install_b_bl_blx (struct gdbarch *gdbarch, struct regcache *regs,
6078 struct displaced_step_closure *dsc,
6079 unsigned int cond, int exchange, int link, long offset)
6080 {
6081 /* Implement "BL<cond> <label>" as:
6082
6083 Preparation: cond <- instruction condition
6084 Insn: mov r0, r0 (nop)
6085 Cleanup: if (condition true) { r14 <- pc; pc <- label }.
6086
6087 B<cond> similar, but don't set r14 in cleanup. */
6088
6089 dsc->u.branch.cond = cond;
6090 dsc->u.branch.link = link;
6091 dsc->u.branch.exchange = exchange;
6092
6093 dsc->u.branch.dest = dsc->insn_addr;
6094 if (link && exchange)
6095 /* For BLX, offset is computed from the Align (PC, 4). */
6096 dsc->u.branch.dest = dsc->u.branch.dest & 0xfffffffc;
6097
6098 if (dsc->is_thumb)
6099 dsc->u.branch.dest += 4 + offset;
6100 else
6101 dsc->u.branch.dest += 8 + offset;
6102
6103 dsc->cleanup = &cleanup_branch;
6104 }
6105 static int
6106 arm_copy_b_bl_blx (struct gdbarch *gdbarch, uint32_t insn,
6107 struct regcache *regs, struct displaced_step_closure *dsc)
6108 {
6109 unsigned int cond = bits (insn, 28, 31);
6110 int exchange = (cond == 0xf);
6111 int link = exchange || bit (insn, 24);
6112 long offset;
6113
6114 if (debug_displaced)
6115 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s immediate insn "
6116 "%.8lx\n", (exchange) ? "blx" : (link) ? "bl" : "b",
6117 (unsigned long) insn);
6118 if (exchange)
6119 /* For BLX, set bit 0 of the destination. The cleanup_branch function will
6120 then arrange the switch into Thumb mode. */
6121 offset = (bits (insn, 0, 23) << 2) | (bit (insn, 24) << 1) | 1;
6122 else
6123 offset = bits (insn, 0, 23) << 2;
6124
6125 if (bit (offset, 25))
6126 offset = offset | ~0x3ffffff;
6127
6128 dsc->modinsn[0] = ARM_NOP;
6129
6130 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6131 return 0;
6132 }
6133
6134 static int
6135 thumb2_copy_b_bl_blx (struct gdbarch *gdbarch, uint16_t insn1,
6136 uint16_t insn2, struct regcache *regs,
6137 struct displaced_step_closure *dsc)
6138 {
6139 int link = bit (insn2, 14);
6140 int exchange = link && !bit (insn2, 12);
6141 int cond = INST_AL;
6142 long offset = 0;
6143 int j1 = bit (insn2, 13);
6144 int j2 = bit (insn2, 11);
6145 int s = sbits (insn1, 10, 10);
6146 int i1 = !(j1 ^ bit (insn1, 10));
6147 int i2 = !(j2 ^ bit (insn1, 10));
6148
6149 if (!link && !exchange) /* B */
6150 {
6151 offset = (bits (insn2, 0, 10) << 1);
6152 if (bit (insn2, 12)) /* Encoding T4 */
6153 {
6154 offset |= (bits (insn1, 0, 9) << 12)
6155 | (i2 << 22)
6156 | (i1 << 23)
6157 | (s << 24);
6158 cond = INST_AL;
6159 }
6160 else /* Encoding T3 */
6161 {
6162 offset |= (bits (insn1, 0, 5) << 12)
6163 | (j1 << 18)
6164 | (j2 << 19)
6165 | (s << 20);
6166 cond = bits (insn1, 6, 9);
6167 }
6168 }
6169 else
6170 {
6171 offset = (bits (insn1, 0, 9) << 12);
6172 offset |= ((i2 << 22) | (i1 << 23) | (s << 24));
6173 offset |= exchange ?
6174 (bits (insn2, 1, 10) << 2) : (bits (insn2, 0, 10) << 1);
6175 }
6176
6177 if (debug_displaced)
6178 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s insn "
6179 "%.4x %.4x with offset %.8lx\n",
6180 link ? (exchange) ? "blx" : "bl" : "b",
6181 insn1, insn2, offset);
6182
6183 dsc->modinsn[0] = THUMB_NOP;
6184
6185 install_b_bl_blx (gdbarch, regs, dsc, cond, exchange, link, offset);
6186 return 0;
6187 }
6188
6189 /* Copy B Thumb instructions. */
6190 static int
6191 thumb_copy_b (struct gdbarch *gdbarch, unsigned short insn,
6192 struct displaced_step_closure *dsc)
6193 {
6194 unsigned int cond = 0;
6195 int offset = 0;
6196 unsigned short bit_12_15 = bits (insn, 12, 15);
6197 CORE_ADDR from = dsc->insn_addr;
6198
6199 if (bit_12_15 == 0xd)
6200 {
6201 /* offset = SignExtend (imm8:0, 32) */
6202 offset = sbits ((insn << 1), 0, 8);
6203 cond = bits (insn, 8, 11);
6204 }
6205 else if (bit_12_15 == 0xe) /* Encoding T2 */
6206 {
6207 offset = sbits ((insn << 1), 0, 11);
6208 cond = INST_AL;
6209 }
6210
6211 if (debug_displaced)
6212 fprintf_unfiltered (gdb_stdlog,
6213 "displaced: copying b immediate insn %.4x "
6214 "with offset %d\n", insn, offset);
6215
6216 dsc->u.branch.cond = cond;
6217 dsc->u.branch.link = 0;
6218 dsc->u.branch.exchange = 0;
6219 dsc->u.branch.dest = from + 4 + offset;
6220
6221 dsc->modinsn[0] = THUMB_NOP;
6222
6223 dsc->cleanup = &cleanup_branch;
6224
6225 return 0;
6226 }
6227
6228 /* Copy BX/BLX with register-specified destinations. */
6229
6230 static void
6231 install_bx_blx_reg (struct gdbarch *gdbarch, struct regcache *regs,
6232 struct displaced_step_closure *dsc, int link,
6233 unsigned int cond, unsigned int rm)
6234 {
6235 /* Implement {BX,BLX}<cond> <reg>" as:
6236
6237 Preparation: cond <- instruction condition
6238 Insn: mov r0, r0 (nop)
6239 Cleanup: if (condition true) { r14 <- pc; pc <- dest; }.
6240
6241 Don't set r14 in cleanup for BX. */
6242
6243 dsc->u.branch.dest = displaced_read_reg (regs, dsc, rm);
6244
6245 dsc->u.branch.cond = cond;
6246 dsc->u.branch.link = link;
6247
6248 dsc->u.branch.exchange = 1;
6249
6250 dsc->cleanup = &cleanup_branch;
6251 }
6252
6253 static int
6254 arm_copy_bx_blx_reg (struct gdbarch *gdbarch, uint32_t insn,
6255 struct regcache *regs, struct displaced_step_closure *dsc)
6256 {
6257 unsigned int cond = bits (insn, 28, 31);
6258 /* BX: x12xxx1x
6259 BLX: x12xxx3x. */
6260 int link = bit (insn, 5);
6261 unsigned int rm = bits (insn, 0, 3);
6262
6263 if (debug_displaced)
6264 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.8lx",
6265 (unsigned long) insn);
6266
6267 dsc->modinsn[0] = ARM_NOP;
6268
6269 install_bx_blx_reg (gdbarch, regs, dsc, link, cond, rm);
6270 return 0;
6271 }
6272
6273 static int
6274 thumb_copy_bx_blx_reg (struct gdbarch *gdbarch, uint16_t insn,
6275 struct regcache *regs,
6276 struct displaced_step_closure *dsc)
6277 {
6278 int link = bit (insn, 7);
6279 unsigned int rm = bits (insn, 3, 6);
6280
6281 if (debug_displaced)
6282 fprintf_unfiltered (gdb_stdlog, "displaced: copying insn %.4x",
6283 (unsigned short) insn);
6284
6285 dsc->modinsn[0] = THUMB_NOP;
6286
6287 install_bx_blx_reg (gdbarch, regs, dsc, link, INST_AL, rm);
6288
6289 return 0;
6290 }
6291
6292
6293 /* Copy/cleanup arithmetic/logic instruction with immediate RHS. */
6294
6295 static void
6296 cleanup_alu_imm (struct gdbarch *gdbarch,
6297 struct regcache *regs, struct displaced_step_closure *dsc)
6298 {
6299 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6300 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6301 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6302 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6303 }
6304
6305 static int
6306 arm_copy_alu_imm (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6307 struct displaced_step_closure *dsc)
6308 {
6309 unsigned int rn = bits (insn, 16, 19);
6310 unsigned int rd = bits (insn, 12, 15);
6311 unsigned int op = bits (insn, 21, 24);
6312 int is_mov = (op == 0xd);
6313 ULONGEST rd_val, rn_val;
6314
6315 if (!insn_references_pc (insn, 0x000ff000ul))
6316 return arm_copy_unmodified (gdbarch, insn, "ALU immediate", dsc);
6317
6318 if (debug_displaced)
6319 fprintf_unfiltered (gdb_stdlog, "displaced: copying immediate %s insn "
6320 "%.8lx\n", is_mov ? "move" : "ALU",
6321 (unsigned long) insn);
6322
6323 /* Instruction is of form:
6324
6325 <op><cond> rd, [rn,] #imm
6326
6327 Rewrite as:
6328
6329 Preparation: tmp1, tmp2 <- r0, r1;
6330 r0, r1 <- rd, rn
6331 Insn: <op><cond> r0, r1, #imm
6332 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6333 */
6334
6335 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6336 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6337 rn_val = displaced_read_reg (regs, dsc, rn);
6338 rd_val = displaced_read_reg (regs, dsc, rd);
6339 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6340 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6341 dsc->rd = rd;
6342
6343 if (is_mov)
6344 dsc->modinsn[0] = insn & 0xfff00fff;
6345 else
6346 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x10000;
6347
6348 dsc->cleanup = &cleanup_alu_imm;
6349
6350 return 0;
6351 }
6352
6353 static int
6354 thumb2_copy_alu_imm (struct gdbarch *gdbarch, uint16_t insn1,
6355 uint16_t insn2, struct regcache *regs,
6356 struct displaced_step_closure *dsc)
6357 {
6358 unsigned int op = bits (insn1, 5, 8);
6359 unsigned int rn, rm, rd;
6360 ULONGEST rd_val, rn_val;
6361
6362 rn = bits (insn1, 0, 3); /* Rn */
6363 rm = bits (insn2, 0, 3); /* Rm */
6364 rd = bits (insn2, 8, 11); /* Rd */
6365
6366 /* This routine is only called for instruction MOV. */
6367 gdb_assert (op == 0x2 && rn == 0xf);
6368
6369 if (rm != ARM_PC_REGNUM && rd != ARM_PC_REGNUM)
6370 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ALU imm", dsc);
6371
6372 if (debug_displaced)
6373 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.4x%.4x\n",
6374 "ALU", insn1, insn2);
6375
6376 /* Instruction is of form:
6377
6378 <op><cond> rd, [rn,] #imm
6379
6380 Rewrite as:
6381
6382 Preparation: tmp1, tmp2 <- r0, r1;
6383 r0, r1 <- rd, rn
6384 Insn: <op><cond> r0, r1, #imm
6385 Cleanup: rd <- r0; r0 <- tmp1; r1 <- tmp2
6386 */
6387
6388 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6389 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6390 rn_val = displaced_read_reg (regs, dsc, rn);
6391 rd_val = displaced_read_reg (regs, dsc, rd);
6392 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6393 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6394 dsc->rd = rd;
6395
6396 dsc->modinsn[0] = insn1;
6397 dsc->modinsn[1] = ((insn2 & 0xf0f0) | 0x1);
6398 dsc->numinsns = 2;
6399
6400 dsc->cleanup = &cleanup_alu_imm;
6401
6402 return 0;
6403 }
6404
6405 /* Copy/cleanup arithmetic/logic insns with register RHS. */
6406
6407 static void
6408 cleanup_alu_reg (struct gdbarch *gdbarch,
6409 struct regcache *regs, struct displaced_step_closure *dsc)
6410 {
6411 ULONGEST rd_val;
6412 int i;
6413
6414 rd_val = displaced_read_reg (regs, dsc, 0);
6415
6416 for (i = 0; i < 3; i++)
6417 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6418
6419 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6420 }
6421
6422 static void
6423 install_alu_reg (struct gdbarch *gdbarch, struct regcache *regs,
6424 struct displaced_step_closure *dsc,
6425 unsigned int rd, unsigned int rn, unsigned int rm)
6426 {
6427 ULONGEST rd_val, rn_val, rm_val;
6428
6429 /* Instruction is of form:
6430
6431 <op><cond> rd, [rn,] rm [, <shift>]
6432
6433 Rewrite as:
6434
6435 Preparation: tmp1, tmp2, tmp3 <- r0, r1, r2;
6436 r0, r1, r2 <- rd, rn, rm
6437 Insn: <op><cond> r0, [r1,] r2 [, <shift>]
6438 Cleanup: rd <- r0; r0, r1, r2 <- tmp1, tmp2, tmp3
6439 */
6440
6441 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6442 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6443 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6444 rd_val = displaced_read_reg (regs, dsc, rd);
6445 rn_val = displaced_read_reg (regs, dsc, rn);
6446 rm_val = displaced_read_reg (regs, dsc, rm);
6447 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6448 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6449 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6450 dsc->rd = rd;
6451
6452 dsc->cleanup = &cleanup_alu_reg;
6453 }
6454
6455 static int
6456 arm_copy_alu_reg (struct gdbarch *gdbarch, uint32_t insn, struct regcache *regs,
6457 struct displaced_step_closure *dsc)
6458 {
6459 unsigned int op = bits (insn, 21, 24);
6460 int is_mov = (op == 0xd);
6461
6462 if (!insn_references_pc (insn, 0x000ff00ful))
6463 return arm_copy_unmodified (gdbarch, insn, "ALU reg", dsc);
6464
6465 if (debug_displaced)
6466 fprintf_unfiltered (gdb_stdlog, "displaced: copying reg %s insn %.8lx\n",
6467 is_mov ? "move" : "ALU", (unsigned long) insn);
6468
6469 if (is_mov)
6470 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x2;
6471 else
6472 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x10002;
6473
6474 install_alu_reg (gdbarch, regs, dsc, bits (insn, 12, 15), bits (insn, 16, 19),
6475 bits (insn, 0, 3));
6476 return 0;
6477 }
6478
6479 static int
6480 thumb_copy_alu_reg (struct gdbarch *gdbarch, uint16_t insn,
6481 struct regcache *regs,
6482 struct displaced_step_closure *dsc)
6483 {
6484 unsigned rm, rd;
6485
6486 rm = bits (insn, 3, 6);
6487 rd = (bit (insn, 7) << 3) | bits (insn, 0, 2);
6488
6489 if (rd != ARM_PC_REGNUM && rm != ARM_PC_REGNUM)
6490 return thumb_copy_unmodified_16bit (gdbarch, insn, "ALU reg", dsc);
6491
6492 if (debug_displaced)
6493 fprintf_unfiltered (gdb_stdlog, "displaced: copying ALU reg insn %.4x\n",
6494 (unsigned short) insn);
6495
6496 dsc->modinsn[0] = ((insn & 0xff00) | 0x10);
6497
6498 install_alu_reg (gdbarch, regs, dsc, rd, rd, rm);
6499
6500 return 0;
6501 }
6502
6503 /* Cleanup/copy arithmetic/logic insns with shifted register RHS. */
6504
6505 static void
6506 cleanup_alu_shifted_reg (struct gdbarch *gdbarch,
6507 struct regcache *regs,
6508 struct displaced_step_closure *dsc)
6509 {
6510 ULONGEST rd_val = displaced_read_reg (regs, dsc, 0);
6511 int i;
6512
6513 for (i = 0; i < 4; i++)
6514 displaced_write_reg (regs, dsc, i, dsc->tmp[i], CANNOT_WRITE_PC);
6515
6516 displaced_write_reg (regs, dsc, dsc->rd, rd_val, ALU_WRITE_PC);
6517 }
6518
6519 static void
6520 install_alu_shifted_reg (struct gdbarch *gdbarch, struct regcache *regs,
6521 struct displaced_step_closure *dsc,
6522 unsigned int rd, unsigned int rn, unsigned int rm,
6523 unsigned rs)
6524 {
6525 int i;
6526 ULONGEST rd_val, rn_val, rm_val, rs_val;
6527
6528 /* Instruction is of form:
6529
6530 <op><cond> rd, [rn,] rm, <shift> rs
6531
6532 Rewrite as:
6533
6534 Preparation: tmp1, tmp2, tmp3, tmp4 <- r0, r1, r2, r3
6535 r0, r1, r2, r3 <- rd, rn, rm, rs
6536 Insn: <op><cond> r0, r1, r2, <shift> r3
6537 Cleanup: tmp5 <- r0
6538 r0, r1, r2, r3 <- tmp1, tmp2, tmp3, tmp4
6539 rd <- tmp5
6540 */
6541
6542 for (i = 0; i < 4; i++)
6543 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
6544
6545 rd_val = displaced_read_reg (regs, dsc, rd);
6546 rn_val = displaced_read_reg (regs, dsc, rn);
6547 rm_val = displaced_read_reg (regs, dsc, rm);
6548 rs_val = displaced_read_reg (regs, dsc, rs);
6549 displaced_write_reg (regs, dsc, 0, rd_val, CANNOT_WRITE_PC);
6550 displaced_write_reg (regs, dsc, 1, rn_val, CANNOT_WRITE_PC);
6551 displaced_write_reg (regs, dsc, 2, rm_val, CANNOT_WRITE_PC);
6552 displaced_write_reg (regs, dsc, 3, rs_val, CANNOT_WRITE_PC);
6553 dsc->rd = rd;
6554 dsc->cleanup = &cleanup_alu_shifted_reg;
6555 }
6556
6557 static int
6558 arm_copy_alu_shifted_reg (struct gdbarch *gdbarch, uint32_t insn,
6559 struct regcache *regs,
6560 struct displaced_step_closure *dsc)
6561 {
6562 unsigned int op = bits (insn, 21, 24);
6563 int is_mov = (op == 0xd);
6564 unsigned int rd, rn, rm, rs;
6565
6566 if (!insn_references_pc (insn, 0x000fff0ful))
6567 return arm_copy_unmodified (gdbarch, insn, "ALU shifted reg", dsc);
6568
6569 if (debug_displaced)
6570 fprintf_unfiltered (gdb_stdlog, "displaced: copying shifted reg %s insn "
6571 "%.8lx\n", is_mov ? "move" : "ALU",
6572 (unsigned long) insn);
6573
6574 rn = bits (insn, 16, 19);
6575 rm = bits (insn, 0, 3);
6576 rs = bits (insn, 8, 11);
6577 rd = bits (insn, 12, 15);
6578
6579 if (is_mov)
6580 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x302;
6581 else
6582 dsc->modinsn[0] = (insn & 0xfff000f0) | 0x10302;
6583
6584 install_alu_shifted_reg (gdbarch, regs, dsc, rd, rn, rm, rs);
6585
6586 return 0;
6587 }
6588
6589 /* Clean up load instructions. */
6590
6591 static void
6592 cleanup_load (struct gdbarch *gdbarch, struct regcache *regs,
6593 struct displaced_step_closure *dsc)
6594 {
6595 ULONGEST rt_val, rt_val2 = 0, rn_val;
6596
6597 rt_val = displaced_read_reg (regs, dsc, 0);
6598 if (dsc->u.ldst.xfersize == 8)
6599 rt_val2 = displaced_read_reg (regs, dsc, 1);
6600 rn_val = displaced_read_reg (regs, dsc, 2);
6601
6602 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6603 if (dsc->u.ldst.xfersize > 4)
6604 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6605 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6606 if (!dsc->u.ldst.immed)
6607 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6608
6609 /* Handle register writeback. */
6610 if (dsc->u.ldst.writeback)
6611 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6612 /* Put result in right place. */
6613 displaced_write_reg (regs, dsc, dsc->rd, rt_val, LOAD_WRITE_PC);
6614 if (dsc->u.ldst.xfersize == 8)
6615 displaced_write_reg (regs, dsc, dsc->rd + 1, rt_val2, LOAD_WRITE_PC);
6616 }
6617
6618 /* Clean up store instructions. */
6619
6620 static void
6621 cleanup_store (struct gdbarch *gdbarch, struct regcache *regs,
6622 struct displaced_step_closure *dsc)
6623 {
6624 ULONGEST rn_val = displaced_read_reg (regs, dsc, 2);
6625
6626 displaced_write_reg (regs, dsc, 0, dsc->tmp[0], CANNOT_WRITE_PC);
6627 if (dsc->u.ldst.xfersize > 4)
6628 displaced_write_reg (regs, dsc, 1, dsc->tmp[1], CANNOT_WRITE_PC);
6629 displaced_write_reg (regs, dsc, 2, dsc->tmp[2], CANNOT_WRITE_PC);
6630 if (!dsc->u.ldst.immed)
6631 displaced_write_reg (regs, dsc, 3, dsc->tmp[3], CANNOT_WRITE_PC);
6632 if (!dsc->u.ldst.restore_r4)
6633 displaced_write_reg (regs, dsc, 4, dsc->tmp[4], CANNOT_WRITE_PC);
6634
6635 /* Writeback. */
6636 if (dsc->u.ldst.writeback)
6637 displaced_write_reg (regs, dsc, dsc->u.ldst.rn, rn_val, CANNOT_WRITE_PC);
6638 }
6639
6640 /* Copy "extra" load/store instructions. These are halfword/doubleword
6641 transfers, which have a different encoding to byte/word transfers. */
6642
6643 static int
6644 arm_copy_extra_ld_st (struct gdbarch *gdbarch, uint32_t insn, int unpriveleged,
6645 struct regcache *regs, struct displaced_step_closure *dsc)
6646 {
6647 unsigned int op1 = bits (insn, 20, 24);
6648 unsigned int op2 = bits (insn, 5, 6);
6649 unsigned int rt = bits (insn, 12, 15);
6650 unsigned int rn = bits (insn, 16, 19);
6651 unsigned int rm = bits (insn, 0, 3);
6652 char load[12] = {0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1};
6653 char bytesize[12] = {2, 2, 2, 2, 8, 1, 8, 1, 8, 2, 8, 2};
6654 int immed = (op1 & 0x4) != 0;
6655 int opcode;
6656 ULONGEST rt_val, rt_val2 = 0, rn_val, rm_val = 0;
6657
6658 if (!insn_references_pc (insn, 0x000ff00ful))
6659 return arm_copy_unmodified (gdbarch, insn, "extra load/store", dsc);
6660
6661 if (debug_displaced)
6662 fprintf_unfiltered (gdb_stdlog, "displaced: copying %sextra load/store "
6663 "insn %.8lx\n", unpriveleged ? "unpriveleged " : "",
6664 (unsigned long) insn);
6665
6666 opcode = ((op2 << 2) | (op1 & 0x1) | ((op1 & 0x4) >> 1)) - 4;
6667
6668 if (opcode < 0)
6669 internal_error (__FILE__, __LINE__,
6670 _("copy_extra_ld_st: instruction decode error"));
6671
6672 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6673 dsc->tmp[1] = displaced_read_reg (regs, dsc, 1);
6674 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6675 if (!immed)
6676 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6677
6678 rt_val = displaced_read_reg (regs, dsc, rt);
6679 if (bytesize[opcode] == 8)
6680 rt_val2 = displaced_read_reg (regs, dsc, rt + 1);
6681 rn_val = displaced_read_reg (regs, dsc, rn);
6682 if (!immed)
6683 rm_val = displaced_read_reg (regs, dsc, rm);
6684
6685 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6686 if (bytesize[opcode] == 8)
6687 displaced_write_reg (regs, dsc, 1, rt_val2, CANNOT_WRITE_PC);
6688 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6689 if (!immed)
6690 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6691
6692 dsc->rd = rt;
6693 dsc->u.ldst.xfersize = bytesize[opcode];
6694 dsc->u.ldst.rn = rn;
6695 dsc->u.ldst.immed = immed;
6696 dsc->u.ldst.writeback = bit (insn, 24) == 0 || bit (insn, 21) != 0;
6697 dsc->u.ldst.restore_r4 = 0;
6698
6699 if (immed)
6700 /* {ldr,str}<width><cond> rt, [rt2,] [rn, #imm]
6701 ->
6702 {ldr,str}<width><cond> r0, [r1,] [r2, #imm]. */
6703 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6704 else
6705 /* {ldr,str}<width><cond> rt, [rt2,] [rn, +/-rm]
6706 ->
6707 {ldr,str}<width><cond> r0, [r1,] [r2, +/-r3]. */
6708 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6709
6710 dsc->cleanup = load[opcode] ? &cleanup_load : &cleanup_store;
6711
6712 return 0;
6713 }
6714
6715 /* Copy byte/half word/word loads and stores. */
6716
6717 static void
6718 install_load_store (struct gdbarch *gdbarch, struct regcache *regs,
6719 struct displaced_step_closure *dsc, int load,
6720 int immed, int writeback, int size, int usermode,
6721 int rt, int rm, int rn)
6722 {
6723 ULONGEST rt_val, rn_val, rm_val = 0;
6724
6725 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6726 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6727 if (!immed)
6728 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6729 if (!load)
6730 dsc->tmp[4] = displaced_read_reg (regs, dsc, 4);
6731
6732 rt_val = displaced_read_reg (regs, dsc, rt);
6733 rn_val = displaced_read_reg (regs, dsc, rn);
6734 if (!immed)
6735 rm_val = displaced_read_reg (regs, dsc, rm);
6736
6737 displaced_write_reg (regs, dsc, 0, rt_val, CANNOT_WRITE_PC);
6738 displaced_write_reg (regs, dsc, 2, rn_val, CANNOT_WRITE_PC);
6739 if (!immed)
6740 displaced_write_reg (regs, dsc, 3, rm_val, CANNOT_WRITE_PC);
6741 dsc->rd = rt;
6742 dsc->u.ldst.xfersize = size;
6743 dsc->u.ldst.rn = rn;
6744 dsc->u.ldst.immed = immed;
6745 dsc->u.ldst.writeback = writeback;
6746
6747 /* To write PC we can do:
6748
6749 Before this sequence of instructions:
6750 r0 is the PC value got from displaced_read_reg, so r0 = from + 8;
6751 r2 is the Rn value got from dispalced_read_reg.
6752
6753 Insn1: push {pc} Write address of STR instruction + offset on stack
6754 Insn2: pop {r4} Read it back from stack, r4 = addr(Insn1) + offset
6755 Insn3: sub r4, r4, pc r4 = addr(Insn1) + offset - pc
6756 = addr(Insn1) + offset - addr(Insn3) - 8
6757 = offset - 16
6758 Insn4: add r4, r4, #8 r4 = offset - 8
6759 Insn5: add r0, r0, r4 r0 = from + 8 + offset - 8
6760 = from + offset
6761 Insn6: str r0, [r2, #imm] (or str r0, [r2, r3])
6762
6763 Otherwise we don't know what value to write for PC, since the offset is
6764 architecture-dependent (sometimes PC+8, sometimes PC+12). More details
6765 of this can be found in Section "Saving from r15" in
6766 http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0204g/Cihbjifh.html */
6767
6768 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6769 }
6770
6771
6772 static int
6773 thumb2_copy_load_literal (struct gdbarch *gdbarch, uint16_t insn1,
6774 uint16_t insn2, struct regcache *regs,
6775 struct displaced_step_closure *dsc, int size)
6776 {
6777 unsigned int u_bit = bit (insn1, 7);
6778 unsigned int rt = bits (insn2, 12, 15);
6779 int imm12 = bits (insn2, 0, 11);
6780 ULONGEST pc_val;
6781
6782 if (debug_displaced)
6783 fprintf_unfiltered (gdb_stdlog,
6784 "displaced: copying ldr pc (0x%x) R%d %c imm12 %.4x\n",
6785 (unsigned int) dsc->insn_addr, rt, u_bit ? '+' : '-',
6786 imm12);
6787
6788 if (!u_bit)
6789 imm12 = -1 * imm12;
6790
6791 /* Rewrite instruction LDR Rt imm12 into:
6792
6793 Prepare: tmp[0] <- r0, tmp[1] <- r2, tmp[2] <- r3, r2 <- pc, r3 <- imm12
6794
6795 LDR R0, R2, R3,
6796
6797 Cleanup: rt <- r0, r0 <- tmp[0], r2 <- tmp[1], r3 <- tmp[2]. */
6798
6799
6800 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
6801 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
6802 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
6803
6804 pc_val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
6805
6806 pc_val = pc_val & 0xfffffffc;
6807
6808 displaced_write_reg (regs, dsc, 2, pc_val, CANNOT_WRITE_PC);
6809 displaced_write_reg (regs, dsc, 3, imm12, CANNOT_WRITE_PC);
6810
6811 dsc->rd = rt;
6812
6813 dsc->u.ldst.xfersize = size;
6814 dsc->u.ldst.immed = 0;
6815 dsc->u.ldst.writeback = 0;
6816 dsc->u.ldst.restore_r4 = 0;
6817
6818 /* LDR R0, R2, R3 */
6819 dsc->modinsn[0] = 0xf852;
6820 dsc->modinsn[1] = 0x3;
6821 dsc->numinsns = 2;
6822
6823 dsc->cleanup = &cleanup_load;
6824
6825 return 0;
6826 }
6827
6828 static int
6829 thumb2_copy_load_reg_imm (struct gdbarch *gdbarch, uint16_t insn1,
6830 uint16_t insn2, struct regcache *regs,
6831 struct displaced_step_closure *dsc,
6832 int writeback, int immed)
6833 {
6834 unsigned int rt = bits (insn2, 12, 15);
6835 unsigned int rn = bits (insn1, 0, 3);
6836 unsigned int rm = bits (insn2, 0, 3); /* Only valid if !immed. */
6837 /* In LDR (register), there is also a register Rm, which is not allowed to
6838 be PC, so we don't have to check it. */
6839
6840 if (rt != ARM_PC_REGNUM && rn != ARM_PC_REGNUM)
6841 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "load",
6842 dsc);
6843
6844 if (debug_displaced)
6845 fprintf_unfiltered (gdb_stdlog,
6846 "displaced: copying ldr r%d [r%d] insn %.4x%.4x\n",
6847 rt, rn, insn1, insn2);
6848
6849 install_load_store (gdbarch, regs, dsc, 1, immed, writeback, 4,
6850 0, rt, rm, rn);
6851
6852 dsc->u.ldst.restore_r4 = 0;
6853
6854 if (immed)
6855 /* ldr[b]<cond> rt, [rn, #imm], etc.
6856 ->
6857 ldr[b]<cond> r0, [r2, #imm]. */
6858 {
6859 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6860 dsc->modinsn[1] = insn2 & 0x0fff;
6861 }
6862 else
6863 /* ldr[b]<cond> rt, [rn, rm], etc.
6864 ->
6865 ldr[b]<cond> r0, [r2, r3]. */
6866 {
6867 dsc->modinsn[0] = (insn1 & 0xfff0) | 0x2;
6868 dsc->modinsn[1] = (insn2 & 0x0ff0) | 0x3;
6869 }
6870
6871 dsc->numinsns = 2;
6872
6873 return 0;
6874 }
6875
6876
6877 static int
6878 arm_copy_ldr_str_ldrb_strb (struct gdbarch *gdbarch, uint32_t insn,
6879 struct regcache *regs,
6880 struct displaced_step_closure *dsc,
6881 int load, int size, int usermode)
6882 {
6883 int immed = !bit (insn, 25);
6884 int writeback = (bit (insn, 24) == 0 || bit (insn, 21) != 0);
6885 unsigned int rt = bits (insn, 12, 15);
6886 unsigned int rn = bits (insn, 16, 19);
6887 unsigned int rm = bits (insn, 0, 3); /* Only valid if !immed. */
6888
6889 if (!insn_references_pc (insn, 0x000ff00ful))
6890 return arm_copy_unmodified (gdbarch, insn, "load/store", dsc);
6891
6892 if (debug_displaced)
6893 fprintf_unfiltered (gdb_stdlog,
6894 "displaced: copying %s%s r%d [r%d] insn %.8lx\n",
6895 load ? (size == 1 ? "ldrb" : "ldr")
6896 : (size == 1 ? "strb" : "str"), usermode ? "t" : "",
6897 rt, rn,
6898 (unsigned long) insn);
6899
6900 install_load_store (gdbarch, regs, dsc, load, immed, writeback, size,
6901 usermode, rt, rm, rn);
6902
6903 if (load || rt != ARM_PC_REGNUM)
6904 {
6905 dsc->u.ldst.restore_r4 = 0;
6906
6907 if (immed)
6908 /* {ldr,str}[b]<cond> rt, [rn, #imm], etc.
6909 ->
6910 {ldr,str}[b]<cond> r0, [r2, #imm]. */
6911 dsc->modinsn[0] = (insn & 0xfff00fff) | 0x20000;
6912 else
6913 /* {ldr,str}[b]<cond> rt, [rn, rm], etc.
6914 ->
6915 {ldr,str}[b]<cond> r0, [r2, r3]. */
6916 dsc->modinsn[0] = (insn & 0xfff00ff0) | 0x20003;
6917 }
6918 else
6919 {
6920 /* We need to use r4 as scratch. Make sure it's restored afterwards. */
6921 dsc->u.ldst.restore_r4 = 1;
6922 dsc->modinsn[0] = 0xe92d8000; /* push {pc} */
6923 dsc->modinsn[1] = 0xe8bd0010; /* pop {r4} */
6924 dsc->modinsn[2] = 0xe044400f; /* sub r4, r4, pc. */
6925 dsc->modinsn[3] = 0xe2844008; /* add r4, r4, #8. */
6926 dsc->modinsn[4] = 0xe0800004; /* add r0, r0, r4. */
6927
6928 /* As above. */
6929 if (immed)
6930 dsc->modinsn[5] = (insn & 0xfff00fff) | 0x20000;
6931 else
6932 dsc->modinsn[5] = (insn & 0xfff00ff0) | 0x20003;
6933
6934 dsc->numinsns = 6;
6935 }
6936
6937 dsc->cleanup = load ? &cleanup_load : &cleanup_store;
6938
6939 return 0;
6940 }
6941
6942 /* Cleanup LDM instructions with fully-populated register list. This is an
6943 unfortunate corner case: it's impossible to implement correctly by modifying
6944 the instruction. The issue is as follows: we have an instruction,
6945
6946 ldm rN, {r0-r15}
6947
6948 which we must rewrite to avoid loading PC. A possible solution would be to
6949 do the load in two halves, something like (with suitable cleanup
6950 afterwards):
6951
6952 mov r8, rN
6953 ldm[id][ab] r8!, {r0-r7}
6954 str r7, <temp>
6955 ldm[id][ab] r8, {r7-r14}
6956 <bkpt>
6957
6958 but at present there's no suitable place for <temp>, since the scratch space
6959 is overwritten before the cleanup routine is called. For now, we simply
6960 emulate the instruction. */
6961
6962 static void
6963 cleanup_block_load_all (struct gdbarch *gdbarch, struct regcache *regs,
6964 struct displaced_step_closure *dsc)
6965 {
6966 int inc = dsc->u.block.increment;
6967 int bump_before = dsc->u.block.before ? (inc ? 4 : -4) : 0;
6968 int bump_after = dsc->u.block.before ? 0 : (inc ? 4 : -4);
6969 uint32_t regmask = dsc->u.block.regmask;
6970 int regno = inc ? 0 : 15;
6971 CORE_ADDR xfer_addr = dsc->u.block.xfer_addr;
6972 int exception_return = dsc->u.block.load && dsc->u.block.user
6973 && (regmask & 0x8000) != 0;
6974 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
6975 int do_transfer = condition_true (dsc->u.block.cond, status);
6976 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
6977
6978 if (!do_transfer)
6979 return;
6980
6981 /* If the instruction is ldm rN, {...pc}^, I don't think there's anything
6982 sensible we can do here. Complain loudly. */
6983 if (exception_return)
6984 error (_("Cannot single-step exception return"));
6985
6986 /* We don't handle any stores here for now. */
6987 gdb_assert (dsc->u.block.load != 0);
6988
6989 if (debug_displaced)
6990 fprintf_unfiltered (gdb_stdlog, "displaced: emulating block transfer: "
6991 "%s %s %s\n", dsc->u.block.load ? "ldm" : "stm",
6992 dsc->u.block.increment ? "inc" : "dec",
6993 dsc->u.block.before ? "before" : "after");
6994
6995 while (regmask)
6996 {
6997 uint32_t memword;
6998
6999 if (inc)
7000 while (regno <= ARM_PC_REGNUM && (regmask & (1 << regno)) == 0)
7001 regno++;
7002 else
7003 while (regno >= 0 && (regmask & (1 << regno)) == 0)
7004 regno--;
7005
7006 xfer_addr += bump_before;
7007
7008 memword = read_memory_unsigned_integer (xfer_addr, 4, byte_order);
7009 displaced_write_reg (regs, dsc, regno, memword, LOAD_WRITE_PC);
7010
7011 xfer_addr += bump_after;
7012
7013 regmask &= ~(1 << regno);
7014 }
7015
7016 if (dsc->u.block.writeback)
7017 displaced_write_reg (regs, dsc, dsc->u.block.rn, xfer_addr,
7018 CANNOT_WRITE_PC);
7019 }
7020
7021 /* Clean up an STM which included the PC in the register list. */
7022
7023 static void
7024 cleanup_block_store_pc (struct gdbarch *gdbarch, struct regcache *regs,
7025 struct displaced_step_closure *dsc)
7026 {
7027 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7028 int store_executed = condition_true (dsc->u.block.cond, status);
7029 CORE_ADDR pc_stored_at, transferred_regs = bitcount (dsc->u.block.regmask);
7030 CORE_ADDR stm_insn_addr;
7031 uint32_t pc_val;
7032 long offset;
7033 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
7034
7035 /* If condition code fails, there's nothing else to do. */
7036 if (!store_executed)
7037 return;
7038
7039 if (dsc->u.block.increment)
7040 {
7041 pc_stored_at = dsc->u.block.xfer_addr + 4 * transferred_regs;
7042
7043 if (dsc->u.block.before)
7044 pc_stored_at += 4;
7045 }
7046 else
7047 {
7048 pc_stored_at = dsc->u.block.xfer_addr;
7049
7050 if (dsc->u.block.before)
7051 pc_stored_at -= 4;
7052 }
7053
7054 pc_val = read_memory_unsigned_integer (pc_stored_at, 4, byte_order);
7055 stm_insn_addr = dsc->scratch_base;
7056 offset = pc_val - stm_insn_addr;
7057
7058 if (debug_displaced)
7059 fprintf_unfiltered (gdb_stdlog, "displaced: detected PC offset %.8lx for "
7060 "STM instruction\n", offset);
7061
7062 /* Rewrite the stored PC to the proper value for the non-displaced original
7063 instruction. */
7064 write_memory_unsigned_integer (pc_stored_at, 4, byte_order,
7065 dsc->insn_addr + offset);
7066 }
7067
7068 /* Clean up an LDM which includes the PC in the register list. We clumped all
7069 the registers in the transferred list into a contiguous range r0...rX (to
7070 avoid loading PC directly and losing control of the debugged program), so we
7071 must undo that here. */
7072
7073 static void
7074 cleanup_block_load_pc (struct gdbarch *gdbarch,
7075 struct regcache *regs,
7076 struct displaced_step_closure *dsc)
7077 {
7078 uint32_t status = displaced_read_reg (regs, dsc, ARM_PS_REGNUM);
7079 int load_executed = condition_true (dsc->u.block.cond, status);
7080 unsigned int mask = dsc->u.block.regmask, write_reg = ARM_PC_REGNUM;
7081 unsigned int regs_loaded = bitcount (mask);
7082 unsigned int num_to_shuffle = regs_loaded, clobbered;
7083
7084 /* The method employed here will fail if the register list is fully populated
7085 (we need to avoid loading PC directly). */
7086 gdb_assert (num_to_shuffle < 16);
7087
7088 if (!load_executed)
7089 return;
7090
7091 clobbered = (1 << num_to_shuffle) - 1;
7092
7093 while (num_to_shuffle > 0)
7094 {
7095 if ((mask & (1 << write_reg)) != 0)
7096 {
7097 unsigned int read_reg = num_to_shuffle - 1;
7098
7099 if (read_reg != write_reg)
7100 {
7101 ULONGEST rval = displaced_read_reg (regs, dsc, read_reg);
7102 displaced_write_reg (regs, dsc, write_reg, rval, LOAD_WRITE_PC);
7103 if (debug_displaced)
7104 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: move "
7105 "loaded register r%d to r%d\n"), read_reg,
7106 write_reg);
7107 }
7108 else if (debug_displaced)
7109 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: register "
7110 "r%d already in the right place\n"),
7111 write_reg);
7112
7113 clobbered &= ~(1 << write_reg);
7114
7115 num_to_shuffle--;
7116 }
7117
7118 write_reg--;
7119 }
7120
7121 /* Restore any registers we scribbled over. */
7122 for (write_reg = 0; clobbered != 0; write_reg++)
7123 {
7124 if ((clobbered & (1 << write_reg)) != 0)
7125 {
7126 displaced_write_reg (regs, dsc, write_reg, dsc->tmp[write_reg],
7127 CANNOT_WRITE_PC);
7128 if (debug_displaced)
7129 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM: restored "
7130 "clobbered register r%d\n"), write_reg);
7131 clobbered &= ~(1 << write_reg);
7132 }
7133 }
7134
7135 /* Perform register writeback manually. */
7136 if (dsc->u.block.writeback)
7137 {
7138 ULONGEST new_rn_val = dsc->u.block.xfer_addr;
7139
7140 if (dsc->u.block.increment)
7141 new_rn_val += regs_loaded * 4;
7142 else
7143 new_rn_val -= regs_loaded * 4;
7144
7145 displaced_write_reg (regs, dsc, dsc->u.block.rn, new_rn_val,
7146 CANNOT_WRITE_PC);
7147 }
7148 }
7149
7150 /* Handle ldm/stm, apart from some tricky cases which are unlikely to occur
7151 in user-level code (in particular exception return, ldm rn, {...pc}^). */
7152
7153 static int
7154 arm_copy_block_xfer (struct gdbarch *gdbarch, uint32_t insn,
7155 struct regcache *regs,
7156 struct displaced_step_closure *dsc)
7157 {
7158 int load = bit (insn, 20);
7159 int user = bit (insn, 22);
7160 int increment = bit (insn, 23);
7161 int before = bit (insn, 24);
7162 int writeback = bit (insn, 21);
7163 int rn = bits (insn, 16, 19);
7164
7165 /* Block transfers which don't mention PC can be run directly
7166 out-of-line. */
7167 if (rn != ARM_PC_REGNUM && (insn & 0x8000) == 0)
7168 return arm_copy_unmodified (gdbarch, insn, "ldm/stm", dsc);
7169
7170 if (rn == ARM_PC_REGNUM)
7171 {
7172 warning (_("displaced: Unpredictable LDM or STM with "
7173 "base register r15"));
7174 return arm_copy_unmodified (gdbarch, insn, "unpredictable ldm/stm", dsc);
7175 }
7176
7177 if (debug_displaced)
7178 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7179 "%.8lx\n", (unsigned long) insn);
7180
7181 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7182 dsc->u.block.rn = rn;
7183
7184 dsc->u.block.load = load;
7185 dsc->u.block.user = user;
7186 dsc->u.block.increment = increment;
7187 dsc->u.block.before = before;
7188 dsc->u.block.writeback = writeback;
7189 dsc->u.block.cond = bits (insn, 28, 31);
7190
7191 dsc->u.block.regmask = insn & 0xffff;
7192
7193 if (load)
7194 {
7195 if ((insn & 0xffff) == 0xffff)
7196 {
7197 /* LDM with a fully-populated register list. This case is
7198 particularly tricky. Implement for now by fully emulating the
7199 instruction (which might not behave perfectly in all cases, but
7200 these instructions should be rare enough for that not to matter
7201 too much). */
7202 dsc->modinsn[0] = ARM_NOP;
7203
7204 dsc->cleanup = &cleanup_block_load_all;
7205 }
7206 else
7207 {
7208 /* LDM of a list of registers which includes PC. Implement by
7209 rewriting the list of registers to be transferred into a
7210 contiguous chunk r0...rX before doing the transfer, then shuffling
7211 registers into the correct places in the cleanup routine. */
7212 unsigned int regmask = insn & 0xffff;
7213 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7214 unsigned int to = 0, from = 0, i, new_rn;
7215
7216 for (i = 0; i < num_in_list; i++)
7217 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7218
7219 /* Writeback makes things complicated. We need to avoid clobbering
7220 the base register with one of the registers in our modified
7221 register list, but just using a different register can't work in
7222 all cases, e.g.:
7223
7224 ldm r14!, {r0-r13,pc}
7225
7226 which would need to be rewritten as:
7227
7228 ldm rN!, {r0-r14}
7229
7230 but that can't work, because there's no free register for N.
7231
7232 Solve this by turning off the writeback bit, and emulating
7233 writeback manually in the cleanup routine. */
7234
7235 if (writeback)
7236 insn &= ~(1 << 21);
7237
7238 new_regmask = (1 << num_in_list) - 1;
7239
7240 if (debug_displaced)
7241 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7242 "{..., pc}: original reg list %.4x, modified "
7243 "list %.4x\n"), rn, writeback ? "!" : "",
7244 (int) insn & 0xffff, new_regmask);
7245
7246 dsc->modinsn[0] = (insn & ~0xffff) | (new_regmask & 0xffff);
7247
7248 dsc->cleanup = &cleanup_block_load_pc;
7249 }
7250 }
7251 else
7252 {
7253 /* STM of a list of registers which includes PC. Run the instruction
7254 as-is, but out of line: this will store the wrong value for the PC,
7255 so we must manually fix up the memory in the cleanup routine.
7256 Doing things this way has the advantage that we can auto-detect
7257 the offset of the PC write (which is architecture-dependent) in
7258 the cleanup routine. */
7259 dsc->modinsn[0] = insn;
7260
7261 dsc->cleanup = &cleanup_block_store_pc;
7262 }
7263
7264 return 0;
7265 }
7266
7267 static int
7268 thumb2_copy_block_xfer (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7269 struct regcache *regs,
7270 struct displaced_step_closure *dsc)
7271 {
7272 int rn = bits (insn1, 0, 3);
7273 int load = bit (insn1, 4);
7274 int writeback = bit (insn1, 5);
7275
7276 /* Block transfers which don't mention PC can be run directly
7277 out-of-line. */
7278 if (rn != ARM_PC_REGNUM && (insn2 & 0x8000) == 0)
7279 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "ldm/stm", dsc);
7280
7281 if (rn == ARM_PC_REGNUM)
7282 {
7283 warning (_("displaced: Unpredictable LDM or STM with "
7284 "base register r15"));
7285 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7286 "unpredictable ldm/stm", dsc);
7287 }
7288
7289 if (debug_displaced)
7290 fprintf_unfiltered (gdb_stdlog, "displaced: copying block transfer insn "
7291 "%.4x%.4x\n", insn1, insn2);
7292
7293 /* Clear bit 13, since it should be always zero. */
7294 dsc->u.block.regmask = (insn2 & 0xdfff);
7295 dsc->u.block.rn = rn;
7296
7297 dsc->u.block.load = load;
7298 dsc->u.block.user = 0;
7299 dsc->u.block.increment = bit (insn1, 7);
7300 dsc->u.block.before = bit (insn1, 8);
7301 dsc->u.block.writeback = writeback;
7302 dsc->u.block.cond = INST_AL;
7303 dsc->u.block.xfer_addr = displaced_read_reg (regs, dsc, rn);
7304
7305 if (load)
7306 {
7307 if (dsc->u.block.regmask == 0xffff)
7308 {
7309 /* This branch is impossible to happen. */
7310 gdb_assert (0);
7311 }
7312 else
7313 {
7314 unsigned int regmask = dsc->u.block.regmask;
7315 unsigned int num_in_list = bitcount (regmask), new_regmask, bit = 1;
7316 unsigned int to = 0, from = 0, i, new_rn;
7317
7318 for (i = 0; i < num_in_list; i++)
7319 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
7320
7321 if (writeback)
7322 insn1 &= ~(1 << 5);
7323
7324 new_regmask = (1 << num_in_list) - 1;
7325
7326 if (debug_displaced)
7327 fprintf_unfiltered (gdb_stdlog, _("displaced: LDM r%d%s, "
7328 "{..., pc}: original reg list %.4x, modified "
7329 "list %.4x\n"), rn, writeback ? "!" : "",
7330 (int) dsc->u.block.regmask, new_regmask);
7331
7332 dsc->modinsn[0] = insn1;
7333 dsc->modinsn[1] = (new_regmask & 0xffff);
7334 dsc->numinsns = 2;
7335
7336 dsc->cleanup = &cleanup_block_load_pc;
7337 }
7338 }
7339 else
7340 {
7341 dsc->modinsn[0] = insn1;
7342 dsc->modinsn[1] = insn2;
7343 dsc->numinsns = 2;
7344 dsc->cleanup = &cleanup_block_store_pc;
7345 }
7346 return 0;
7347 }
7348
7349 /* Cleanup/copy SVC (SWI) instructions. These two functions are overridden
7350 for Linux, where some SVC instructions must be treated specially. */
7351
7352 static void
7353 cleanup_svc (struct gdbarch *gdbarch, struct regcache *regs,
7354 struct displaced_step_closure *dsc)
7355 {
7356 CORE_ADDR resume_addr = dsc->insn_addr + dsc->insn_size;
7357
7358 if (debug_displaced)
7359 fprintf_unfiltered (gdb_stdlog, "displaced: cleanup for svc, resume at "
7360 "%.8lx\n", (unsigned long) resume_addr);
7361
7362 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, resume_addr, BRANCH_WRITE_PC);
7363 }
7364
7365
7366 /* Common copy routine for svc instruciton. */
7367
7368 static int
7369 install_svc (struct gdbarch *gdbarch, struct regcache *regs,
7370 struct displaced_step_closure *dsc)
7371 {
7372 /* Preparation: none.
7373 Insn: unmodified svc.
7374 Cleanup: pc <- insn_addr + insn_size. */
7375
7376 /* Pretend we wrote to the PC, so cleanup doesn't set PC to the next
7377 instruction. */
7378 dsc->wrote_to_pc = 1;
7379
7380 /* Allow OS-specific code to override SVC handling. */
7381 if (dsc->u.svc.copy_svc_os)
7382 return dsc->u.svc.copy_svc_os (gdbarch, regs, dsc);
7383 else
7384 {
7385 dsc->cleanup = &cleanup_svc;
7386 return 0;
7387 }
7388 }
7389
7390 static int
7391 arm_copy_svc (struct gdbarch *gdbarch, uint32_t insn,
7392 struct regcache *regs, struct displaced_step_closure *dsc)
7393 {
7394
7395 if (debug_displaced)
7396 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.8lx\n",
7397 (unsigned long) insn);
7398
7399 dsc->modinsn[0] = insn;
7400
7401 return install_svc (gdbarch, regs, dsc);
7402 }
7403
7404 static int
7405 thumb_copy_svc (struct gdbarch *gdbarch, uint16_t insn,
7406 struct regcache *regs, struct displaced_step_closure *dsc)
7407 {
7408
7409 if (debug_displaced)
7410 fprintf_unfiltered (gdb_stdlog, "displaced: copying svc insn %.4x\n",
7411 insn);
7412
7413 dsc->modinsn[0] = insn;
7414
7415 return install_svc (gdbarch, regs, dsc);
7416 }
7417
7418 /* Copy undefined instructions. */
7419
7420 static int
7421 arm_copy_undef (struct gdbarch *gdbarch, uint32_t insn,
7422 struct displaced_step_closure *dsc)
7423 {
7424 if (debug_displaced)
7425 fprintf_unfiltered (gdb_stdlog,
7426 "displaced: copying undefined insn %.8lx\n",
7427 (unsigned long) insn);
7428
7429 dsc->modinsn[0] = insn;
7430
7431 return 0;
7432 }
7433
7434 static int
7435 thumb_32bit_copy_undef (struct gdbarch *gdbarch, uint16_t insn1, uint16_t insn2,
7436 struct displaced_step_closure *dsc)
7437 {
7438
7439 if (debug_displaced)
7440 fprintf_unfiltered (gdb_stdlog, "displaced: copying undefined insn "
7441 "%.4x %.4x\n", (unsigned short) insn1,
7442 (unsigned short) insn2);
7443
7444 dsc->modinsn[0] = insn1;
7445 dsc->modinsn[1] = insn2;
7446 dsc->numinsns = 2;
7447
7448 return 0;
7449 }
7450
7451 /* Copy unpredictable instructions. */
7452
7453 static int
7454 arm_copy_unpred (struct gdbarch *gdbarch, uint32_t insn,
7455 struct displaced_step_closure *dsc)
7456 {
7457 if (debug_displaced)
7458 fprintf_unfiltered (gdb_stdlog, "displaced: copying unpredictable insn "
7459 "%.8lx\n", (unsigned long) insn);
7460
7461 dsc->modinsn[0] = insn;
7462
7463 return 0;
7464 }
7465
7466 /* The decode_* functions are instruction decoding helpers. They mostly follow
7467 the presentation in the ARM ARM. */
7468
7469 static int
7470 arm_decode_misc_memhint_neon (struct gdbarch *gdbarch, uint32_t insn,
7471 struct regcache *regs,
7472 struct displaced_step_closure *dsc)
7473 {
7474 unsigned int op1 = bits (insn, 20, 26), op2 = bits (insn, 4, 7);
7475 unsigned int rn = bits (insn, 16, 19);
7476
7477 if (op1 == 0x10 && (op2 & 0x2) == 0x0 && (rn & 0xe) == 0x0)
7478 return arm_copy_unmodified (gdbarch, insn, "cps", dsc);
7479 else if (op1 == 0x10 && op2 == 0x0 && (rn & 0xe) == 0x1)
7480 return arm_copy_unmodified (gdbarch, insn, "setend", dsc);
7481 else if ((op1 & 0x60) == 0x20)
7482 return arm_copy_unmodified (gdbarch, insn, "neon dataproc", dsc);
7483 else if ((op1 & 0x71) == 0x40)
7484 return arm_copy_unmodified (gdbarch, insn, "neon elt/struct load/store",
7485 dsc);
7486 else if ((op1 & 0x77) == 0x41)
7487 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7488 else if ((op1 & 0x77) == 0x45)
7489 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pli. */
7490 else if ((op1 & 0x77) == 0x51)
7491 {
7492 if (rn != 0xf)
7493 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7494 else
7495 return arm_copy_unpred (gdbarch, insn, dsc);
7496 }
7497 else if ((op1 & 0x77) == 0x55)
7498 return arm_copy_preload (gdbarch, insn, regs, dsc); /* pld/pldw. */
7499 else if (op1 == 0x57)
7500 switch (op2)
7501 {
7502 case 0x1: return arm_copy_unmodified (gdbarch, insn, "clrex", dsc);
7503 case 0x4: return arm_copy_unmodified (gdbarch, insn, "dsb", dsc);
7504 case 0x5: return arm_copy_unmodified (gdbarch, insn, "dmb", dsc);
7505 case 0x6: return arm_copy_unmodified (gdbarch, insn, "isb", dsc);
7506 default: return arm_copy_unpred (gdbarch, insn, dsc);
7507 }
7508 else if ((op1 & 0x63) == 0x43)
7509 return arm_copy_unpred (gdbarch, insn, dsc);
7510 else if ((op2 & 0x1) == 0x0)
7511 switch (op1 & ~0x80)
7512 {
7513 case 0x61:
7514 return arm_copy_unmodified (gdbarch, insn, "unallocated mem hint", dsc);
7515 case 0x65:
7516 return arm_copy_preload_reg (gdbarch, insn, regs, dsc); /* pli reg. */
7517 case 0x71: case 0x75:
7518 /* pld/pldw reg. */
7519 return arm_copy_preload_reg (gdbarch, insn, regs, dsc);
7520 case 0x63: case 0x67: case 0x73: case 0x77:
7521 return arm_copy_unpred (gdbarch, insn, dsc);
7522 default:
7523 return arm_copy_undef (gdbarch, insn, dsc);
7524 }
7525 else
7526 return arm_copy_undef (gdbarch, insn, dsc); /* Probably unreachable. */
7527 }
7528
7529 static int
7530 arm_decode_unconditional (struct gdbarch *gdbarch, uint32_t insn,
7531 struct regcache *regs,
7532 struct displaced_step_closure *dsc)
7533 {
7534 if (bit (insn, 27) == 0)
7535 return arm_decode_misc_memhint_neon (gdbarch, insn, regs, dsc);
7536 /* Switch on bits: 0bxxxxx321xxx0xxxxxxxxxxxxxxxxxxxx. */
7537 else switch (((insn & 0x7000000) >> 23) | ((insn & 0x100000) >> 20))
7538 {
7539 case 0x0: case 0x2:
7540 return arm_copy_unmodified (gdbarch, insn, "srs", dsc);
7541
7542 case 0x1: case 0x3:
7543 return arm_copy_unmodified (gdbarch, insn, "rfe", dsc);
7544
7545 case 0x4: case 0x5: case 0x6: case 0x7:
7546 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7547
7548 case 0x8:
7549 switch ((insn & 0xe00000) >> 21)
7550 {
7551 case 0x1: case 0x3: case 0x4: case 0x5: case 0x6: case 0x7:
7552 /* stc/stc2. */
7553 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7554
7555 case 0x2:
7556 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7557
7558 default:
7559 return arm_copy_undef (gdbarch, insn, dsc);
7560 }
7561
7562 case 0x9:
7563 {
7564 int rn_f = (bits (insn, 16, 19) == 0xf);
7565 switch ((insn & 0xe00000) >> 21)
7566 {
7567 case 0x1: case 0x3:
7568 /* ldc/ldc2 imm (undefined for rn == pc). */
7569 return rn_f ? arm_copy_undef (gdbarch, insn, dsc)
7570 : arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7571
7572 case 0x2:
7573 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7574
7575 case 0x4: case 0x5: case 0x6: case 0x7:
7576 /* ldc/ldc2 lit (undefined for rn != pc). */
7577 return rn_f ? arm_copy_copro_load_store (gdbarch, insn, regs, dsc)
7578 : arm_copy_undef (gdbarch, insn, dsc);
7579
7580 default:
7581 return arm_copy_undef (gdbarch, insn, dsc);
7582 }
7583 }
7584
7585 case 0xa:
7586 return arm_copy_unmodified (gdbarch, insn, "stc/stc2", dsc);
7587
7588 case 0xb:
7589 if (bits (insn, 16, 19) == 0xf)
7590 /* ldc/ldc2 lit. */
7591 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7592 else
7593 return arm_copy_undef (gdbarch, insn, dsc);
7594
7595 case 0xc:
7596 if (bit (insn, 4))
7597 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7598 else
7599 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7600
7601 case 0xd:
7602 if (bit (insn, 4))
7603 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7604 else
7605 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7606
7607 default:
7608 return arm_copy_undef (gdbarch, insn, dsc);
7609 }
7610 }
7611
7612 /* Decode miscellaneous instructions in dp/misc encoding space. */
7613
7614 static int
7615 arm_decode_miscellaneous (struct gdbarch *gdbarch, uint32_t insn,
7616 struct regcache *regs,
7617 struct displaced_step_closure *dsc)
7618 {
7619 unsigned int op2 = bits (insn, 4, 6);
7620 unsigned int op = bits (insn, 21, 22);
7621 unsigned int op1 = bits (insn, 16, 19);
7622
7623 switch (op2)
7624 {
7625 case 0x0:
7626 return arm_copy_unmodified (gdbarch, insn, "mrs/msr", dsc);
7627
7628 case 0x1:
7629 if (op == 0x1) /* bx. */
7630 return arm_copy_bx_blx_reg (gdbarch, insn, regs, dsc);
7631 else if (op == 0x3)
7632 return arm_copy_unmodified (gdbarch, insn, "clz", dsc);
7633 else
7634 return arm_copy_undef (gdbarch, insn, dsc);
7635
7636 case 0x2:
7637 if (op == 0x1)
7638 /* Not really supported. */
7639 return arm_copy_unmodified (gdbarch, insn, "bxj", dsc);
7640 else
7641 return arm_copy_undef (gdbarch, insn, dsc);
7642
7643 case 0x3:
7644 if (op == 0x1)
7645 return arm_copy_bx_blx_reg (gdbarch, insn,
7646 regs, dsc); /* blx register. */
7647 else
7648 return arm_copy_undef (gdbarch, insn, dsc);
7649
7650 case 0x5:
7651 return arm_copy_unmodified (gdbarch, insn, "saturating add/sub", dsc);
7652
7653 case 0x7:
7654 if (op == 0x1)
7655 return arm_copy_unmodified (gdbarch, insn, "bkpt", dsc);
7656 else if (op == 0x3)
7657 /* Not really supported. */
7658 return arm_copy_unmodified (gdbarch, insn, "smc", dsc);
7659
7660 default:
7661 return arm_copy_undef (gdbarch, insn, dsc);
7662 }
7663 }
7664
7665 static int
7666 arm_decode_dp_misc (struct gdbarch *gdbarch, uint32_t insn,
7667 struct regcache *regs,
7668 struct displaced_step_closure *dsc)
7669 {
7670 if (bit (insn, 25))
7671 switch (bits (insn, 20, 24))
7672 {
7673 case 0x10:
7674 return arm_copy_unmodified (gdbarch, insn, "movw", dsc);
7675
7676 case 0x14:
7677 return arm_copy_unmodified (gdbarch, insn, "movt", dsc);
7678
7679 case 0x12: case 0x16:
7680 return arm_copy_unmodified (gdbarch, insn, "msr imm", dsc);
7681
7682 default:
7683 return arm_copy_alu_imm (gdbarch, insn, regs, dsc);
7684 }
7685 else
7686 {
7687 uint32_t op1 = bits (insn, 20, 24), op2 = bits (insn, 4, 7);
7688
7689 if ((op1 & 0x19) != 0x10 && (op2 & 0x1) == 0x0)
7690 return arm_copy_alu_reg (gdbarch, insn, regs, dsc);
7691 else if ((op1 & 0x19) != 0x10 && (op2 & 0x9) == 0x1)
7692 return arm_copy_alu_shifted_reg (gdbarch, insn, regs, dsc);
7693 else if ((op1 & 0x19) == 0x10 && (op2 & 0x8) == 0x0)
7694 return arm_decode_miscellaneous (gdbarch, insn, regs, dsc);
7695 else if ((op1 & 0x19) == 0x10 && (op2 & 0x9) == 0x8)
7696 return arm_copy_unmodified (gdbarch, insn, "halfword mul/mla", dsc);
7697 else if ((op1 & 0x10) == 0x00 && op2 == 0x9)
7698 return arm_copy_unmodified (gdbarch, insn, "mul/mla", dsc);
7699 else if ((op1 & 0x10) == 0x10 && op2 == 0x9)
7700 return arm_copy_unmodified (gdbarch, insn, "synch", dsc);
7701 else if (op2 == 0xb || (op2 & 0xd) == 0xd)
7702 /* 2nd arg means "unpriveleged". */
7703 return arm_copy_extra_ld_st (gdbarch, insn, (op1 & 0x12) == 0x02, regs,
7704 dsc);
7705 }
7706
7707 /* Should be unreachable. */
7708 return 1;
7709 }
7710
7711 static int
7712 arm_decode_ld_st_word_ubyte (struct gdbarch *gdbarch, uint32_t insn,
7713 struct regcache *regs,
7714 struct displaced_step_closure *dsc)
7715 {
7716 int a = bit (insn, 25), b = bit (insn, 4);
7717 uint32_t op1 = bits (insn, 20, 24);
7718 int rn_f = bits (insn, 16, 19) == 0xf;
7719
7720 if ((!a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02)
7721 || (a && (op1 & 0x05) == 0x00 && (op1 & 0x17) != 0x02 && !b))
7722 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 0);
7723 else if ((!a && (op1 & 0x17) == 0x02)
7724 || (a && (op1 & 0x17) == 0x02 && !b))
7725 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 4, 1);
7726 else if ((!a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03)
7727 || (a && (op1 & 0x05) == 0x01 && (op1 & 0x17) != 0x03 && !b))
7728 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 0);
7729 else if ((!a && (op1 & 0x17) == 0x03)
7730 || (a && (op1 & 0x17) == 0x03 && !b))
7731 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 4, 1);
7732 else if ((!a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06)
7733 || (a && (op1 & 0x05) == 0x04 && (op1 & 0x17) != 0x06 && !b))
7734 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 0);
7735 else if ((!a && (op1 & 0x17) == 0x06)
7736 || (a && (op1 & 0x17) == 0x06 && !b))
7737 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 0, 1, 1);
7738 else if ((!a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07)
7739 || (a && (op1 & 0x05) == 0x05 && (op1 & 0x17) != 0x07 && !b))
7740 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 0);
7741 else if ((!a && (op1 & 0x17) == 0x07)
7742 || (a && (op1 & 0x17) == 0x07 && !b))
7743 return arm_copy_ldr_str_ldrb_strb (gdbarch, insn, regs, dsc, 1, 1, 1);
7744
7745 /* Should be unreachable. */
7746 return 1;
7747 }
7748
7749 static int
7750 arm_decode_media (struct gdbarch *gdbarch, uint32_t insn,
7751 struct displaced_step_closure *dsc)
7752 {
7753 switch (bits (insn, 20, 24))
7754 {
7755 case 0x00: case 0x01: case 0x02: case 0x03:
7756 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub signed", dsc);
7757
7758 case 0x04: case 0x05: case 0x06: case 0x07:
7759 return arm_copy_unmodified (gdbarch, insn, "parallel add/sub unsigned", dsc);
7760
7761 case 0x08: case 0x09: case 0x0a: case 0x0b:
7762 case 0x0c: case 0x0d: case 0x0e: case 0x0f:
7763 return arm_copy_unmodified (gdbarch, insn,
7764 "decode/pack/unpack/saturate/reverse", dsc);
7765
7766 case 0x18:
7767 if (bits (insn, 5, 7) == 0) /* op2. */
7768 {
7769 if (bits (insn, 12, 15) == 0xf)
7770 return arm_copy_unmodified (gdbarch, insn, "usad8", dsc);
7771 else
7772 return arm_copy_unmodified (gdbarch, insn, "usada8", dsc);
7773 }
7774 else
7775 return arm_copy_undef (gdbarch, insn, dsc);
7776
7777 case 0x1a: case 0x1b:
7778 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7779 return arm_copy_unmodified (gdbarch, insn, "sbfx", dsc);
7780 else
7781 return arm_copy_undef (gdbarch, insn, dsc);
7782
7783 case 0x1c: case 0x1d:
7784 if (bits (insn, 5, 6) == 0x0) /* op2[1:0]. */
7785 {
7786 if (bits (insn, 0, 3) == 0xf)
7787 return arm_copy_unmodified (gdbarch, insn, "bfc", dsc);
7788 else
7789 return arm_copy_unmodified (gdbarch, insn, "bfi", dsc);
7790 }
7791 else
7792 return arm_copy_undef (gdbarch, insn, dsc);
7793
7794 case 0x1e: case 0x1f:
7795 if (bits (insn, 5, 6) == 0x2) /* op2[1:0]. */
7796 return arm_copy_unmodified (gdbarch, insn, "ubfx", dsc);
7797 else
7798 return arm_copy_undef (gdbarch, insn, dsc);
7799 }
7800
7801 /* Should be unreachable. */
7802 return 1;
7803 }
7804
7805 static int
7806 arm_decode_b_bl_ldmstm (struct gdbarch *gdbarch, int32_t insn,
7807 struct regcache *regs,
7808 struct displaced_step_closure *dsc)
7809 {
7810 if (bit (insn, 25))
7811 return arm_copy_b_bl_blx (gdbarch, insn, regs, dsc);
7812 else
7813 return arm_copy_block_xfer (gdbarch, insn, regs, dsc);
7814 }
7815
7816 static int
7817 arm_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint32_t insn,
7818 struct regcache *regs,
7819 struct displaced_step_closure *dsc)
7820 {
7821 unsigned int opcode = bits (insn, 20, 24);
7822
7823 switch (opcode)
7824 {
7825 case 0x04: case 0x05: /* VFP/Neon mrrc/mcrr. */
7826 return arm_copy_unmodified (gdbarch, insn, "vfp/neon mrrc/mcrr", dsc);
7827
7828 case 0x08: case 0x0a: case 0x0c: case 0x0e:
7829 case 0x12: case 0x16:
7830 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vstm/vpush", dsc);
7831
7832 case 0x09: case 0x0b: case 0x0d: case 0x0f:
7833 case 0x13: case 0x17:
7834 return arm_copy_unmodified (gdbarch, insn, "vfp/neon vldm/vpop", dsc);
7835
7836 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7837 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7838 /* Note: no writeback for these instructions. Bit 25 will always be
7839 zero though (via caller), so the following works OK. */
7840 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7841 }
7842
7843 /* Should be unreachable. */
7844 return 1;
7845 }
7846
7847 /* Decode shifted register instructions. */
7848
7849 static int
7850 thumb2_decode_dp_shift_reg (struct gdbarch *gdbarch, uint16_t insn1,
7851 uint16_t insn2, struct regcache *regs,
7852 struct displaced_step_closure *dsc)
7853 {
7854 /* PC is only allowed to be used in instruction MOV. */
7855
7856 unsigned int op = bits (insn1, 5, 8);
7857 unsigned int rn = bits (insn1, 0, 3);
7858
7859 if (op == 0x2 && rn == 0xf) /* MOV */
7860 return thumb2_copy_alu_imm (gdbarch, insn1, insn2, regs, dsc);
7861 else
7862 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7863 "dp (shift reg)", dsc);
7864 }
7865
7866
7867 /* Decode extension register load/store. Exactly the same as
7868 arm_decode_ext_reg_ld_st. */
7869
7870 static int
7871 thumb2_decode_ext_reg_ld_st (struct gdbarch *gdbarch, uint16_t insn1,
7872 uint16_t insn2, struct regcache *regs,
7873 struct displaced_step_closure *dsc)
7874 {
7875 unsigned int opcode = bits (insn1, 4, 8);
7876
7877 switch (opcode)
7878 {
7879 case 0x04: case 0x05:
7880 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7881 "vfp/neon vmov", dsc);
7882
7883 case 0x08: case 0x0c: /* 01x00 */
7884 case 0x0a: case 0x0e: /* 01x10 */
7885 case 0x12: case 0x16: /* 10x10 */
7886 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7887 "vfp/neon vstm/vpush", dsc);
7888
7889 case 0x09: case 0x0d: /* 01x01 */
7890 case 0x0b: case 0x0f: /* 01x11 */
7891 case 0x13: case 0x17: /* 10x11 */
7892 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7893 "vfp/neon vldm/vpop", dsc);
7894
7895 case 0x10: case 0x14: case 0x18: case 0x1c: /* vstr. */
7896 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7897 "vstr", dsc);
7898 case 0x11: case 0x15: case 0x19: case 0x1d: /* vldr. */
7899 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2, regs, dsc);
7900 }
7901
7902 /* Should be unreachable. */
7903 return 1;
7904 }
7905
7906 static int
7907 arm_decode_svc_copro (struct gdbarch *gdbarch, uint32_t insn, CORE_ADDR to,
7908 struct regcache *regs, struct displaced_step_closure *dsc)
7909 {
7910 unsigned int op1 = bits (insn, 20, 25);
7911 int op = bit (insn, 4);
7912 unsigned int coproc = bits (insn, 8, 11);
7913 unsigned int rn = bits (insn, 16, 19);
7914
7915 if ((op1 & 0x20) == 0x00 && (op1 & 0x3a) != 0x00 && (coproc & 0xe) == 0xa)
7916 return arm_decode_ext_reg_ld_st (gdbarch, insn, regs, dsc);
7917 else if ((op1 & 0x21) == 0x00 && (op1 & 0x3a) != 0x00
7918 && (coproc & 0xe) != 0xa)
7919 /* stc/stc2. */
7920 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7921 else if ((op1 & 0x21) == 0x01 && (op1 & 0x3a) != 0x00
7922 && (coproc & 0xe) != 0xa)
7923 /* ldc/ldc2 imm/lit. */
7924 return arm_copy_copro_load_store (gdbarch, insn, regs, dsc);
7925 else if ((op1 & 0x3e) == 0x00)
7926 return arm_copy_undef (gdbarch, insn, dsc);
7927 else if ((op1 & 0x3e) == 0x04 && (coproc & 0xe) == 0xa)
7928 return arm_copy_unmodified (gdbarch, insn, "neon 64bit xfer", dsc);
7929 else if (op1 == 0x04 && (coproc & 0xe) != 0xa)
7930 return arm_copy_unmodified (gdbarch, insn, "mcrr/mcrr2", dsc);
7931 else if (op1 == 0x05 && (coproc & 0xe) != 0xa)
7932 return arm_copy_unmodified (gdbarch, insn, "mrrc/mrrc2", dsc);
7933 else if ((op1 & 0x30) == 0x20 && !op)
7934 {
7935 if ((coproc & 0xe) == 0xa)
7936 return arm_copy_unmodified (gdbarch, insn, "vfp dataproc", dsc);
7937 else
7938 return arm_copy_unmodified (gdbarch, insn, "cdp/cdp2", dsc);
7939 }
7940 else if ((op1 & 0x30) == 0x20 && op)
7941 return arm_copy_unmodified (gdbarch, insn, "neon 8/16/32 bit xfer", dsc);
7942 else if ((op1 & 0x31) == 0x20 && op && (coproc & 0xe) != 0xa)
7943 return arm_copy_unmodified (gdbarch, insn, "mcr/mcr2", dsc);
7944 else if ((op1 & 0x31) == 0x21 && op && (coproc & 0xe) != 0xa)
7945 return arm_copy_unmodified (gdbarch, insn, "mrc/mrc2", dsc);
7946 else if ((op1 & 0x30) == 0x30)
7947 return arm_copy_svc (gdbarch, insn, regs, dsc);
7948 else
7949 return arm_copy_undef (gdbarch, insn, dsc); /* Possibly unreachable. */
7950 }
7951
7952 static int
7953 thumb2_decode_svc_copro (struct gdbarch *gdbarch, uint16_t insn1,
7954 uint16_t insn2, struct regcache *regs,
7955 struct displaced_step_closure *dsc)
7956 {
7957 unsigned int coproc = bits (insn2, 8, 11);
7958 unsigned int op1 = bits (insn1, 4, 9);
7959 unsigned int bit_5_8 = bits (insn1, 5, 8);
7960 unsigned int bit_9 = bit (insn1, 9);
7961 unsigned int bit_4 = bit (insn1, 4);
7962 unsigned int rn = bits (insn1, 0, 3);
7963
7964 if (bit_9 == 0)
7965 {
7966 if (bit_5_8 == 2)
7967 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7968 "neon 64bit xfer/mrrc/mrrc2/mcrr/mcrr2",
7969 dsc);
7970 else if (bit_5_8 == 0) /* UNDEFINED. */
7971 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
7972 else
7973 {
7974 /*coproc is 101x. SIMD/VFP, ext registers load/store. */
7975 if ((coproc & 0xe) == 0xa)
7976 return thumb2_decode_ext_reg_ld_st (gdbarch, insn1, insn2, regs,
7977 dsc);
7978 else /* coproc is not 101x. */
7979 {
7980 if (bit_4 == 0) /* STC/STC2. */
7981 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
7982 "stc/stc2", dsc);
7983 else /* LDC/LDC2 {literal, immeidate}. */
7984 return thumb2_copy_copro_load_store (gdbarch, insn1, insn2,
7985 regs, dsc);
7986 }
7987 }
7988 }
7989 else
7990 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2, "coproc", dsc);
7991
7992 return 0;
7993 }
7994
7995 static void
7996 install_pc_relative (struct gdbarch *gdbarch, struct regcache *regs,
7997 struct displaced_step_closure *dsc, int rd)
7998 {
7999 /* ADR Rd, #imm
8000
8001 Rewrite as:
8002
8003 Preparation: Rd <- PC
8004 Insn: ADD Rd, #imm
8005 Cleanup: Null.
8006 */
8007
8008 /* Rd <- PC */
8009 int val = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8010 displaced_write_reg (regs, dsc, rd, val, CANNOT_WRITE_PC);
8011 }
8012
8013 static int
8014 thumb_copy_pc_relative_16bit (struct gdbarch *gdbarch, struct regcache *regs,
8015 struct displaced_step_closure *dsc,
8016 int rd, unsigned int imm)
8017 {
8018
8019 /* Encoding T2: ADDS Rd, #imm */
8020 dsc->modinsn[0] = (0x3000 | (rd << 8) | imm);
8021
8022 install_pc_relative (gdbarch, regs, dsc, rd);
8023
8024 return 0;
8025 }
8026
8027 static int
8028 thumb_decode_pc_relative_16bit (struct gdbarch *gdbarch, uint16_t insn,
8029 struct regcache *regs,
8030 struct displaced_step_closure *dsc)
8031 {
8032 unsigned int rd = bits (insn, 8, 10);
8033 unsigned int imm8 = bits (insn, 0, 7);
8034
8035 if (debug_displaced)
8036 fprintf_unfiltered (gdb_stdlog,
8037 "displaced: copying thumb adr r%d, #%d insn %.4x\n",
8038 rd, imm8, insn);
8039
8040 return thumb_copy_pc_relative_16bit (gdbarch, regs, dsc, rd, imm8);
8041 }
8042
8043 static int
8044 thumb_copy_pc_relative_32bit (struct gdbarch *gdbarch, uint16_t insn1,
8045 uint16_t insn2, struct regcache *regs,
8046 struct displaced_step_closure *dsc)
8047 {
8048 unsigned int rd = bits (insn2, 8, 11);
8049 /* Since immediate has the same encoding in ADR ADD and SUB, so we simply
8050 extract raw immediate encoding rather than computing immediate. When
8051 generating ADD or SUB instruction, we can simply perform OR operation to
8052 set immediate into ADD. */
8053 unsigned int imm_3_8 = insn2 & 0x70ff;
8054 unsigned int imm_i = insn1 & 0x0400; /* Clear all bits except bit 10. */
8055
8056 if (debug_displaced)
8057 fprintf_unfiltered (gdb_stdlog,
8058 "displaced: copying thumb adr r%d, #%d:%d insn %.4x%.4x\n",
8059 rd, imm_i, imm_3_8, insn1, insn2);
8060
8061 if (bit (insn1, 7)) /* Encoding T2 */
8062 {
8063 /* Encoding T3: SUB Rd, Rd, #imm */
8064 dsc->modinsn[0] = (0xf1a0 | rd | imm_i);
8065 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8066 }
8067 else /* Encoding T3 */
8068 {
8069 /* Encoding T3: ADD Rd, Rd, #imm */
8070 dsc->modinsn[0] = (0xf100 | rd | imm_i);
8071 dsc->modinsn[1] = ((rd << 8) | imm_3_8);
8072 }
8073 dsc->numinsns = 2;
8074
8075 install_pc_relative (gdbarch, regs, dsc, rd);
8076
8077 return 0;
8078 }
8079
8080 static int
8081 thumb_copy_16bit_ldr_literal (struct gdbarch *gdbarch, unsigned short insn1,
8082 struct regcache *regs,
8083 struct displaced_step_closure *dsc)
8084 {
8085 unsigned int rt = bits (insn1, 8, 10);
8086 unsigned int pc;
8087 int imm8 = (bits (insn1, 0, 7) << 2);
8088 CORE_ADDR from = dsc->insn_addr;
8089
8090 /* LDR Rd, #imm8
8091
8092 Rwrite as:
8093
8094 Preparation: tmp0 <- R0, tmp2 <- R2, tmp3 <- R3, R2 <- PC, R3 <- #imm8;
8095
8096 Insn: LDR R0, [R2, R3];
8097 Cleanup: R2 <- tmp2, R3 <- tmp3, Rd <- R0, R0 <- tmp0 */
8098
8099 if (debug_displaced)
8100 fprintf_unfiltered (gdb_stdlog,
8101 "displaced: copying thumb ldr r%d [pc #%d]\n"
8102 , rt, imm8);
8103
8104 dsc->tmp[0] = displaced_read_reg (regs, dsc, 0);
8105 dsc->tmp[2] = displaced_read_reg (regs, dsc, 2);
8106 dsc->tmp[3] = displaced_read_reg (regs, dsc, 3);
8107 pc = displaced_read_reg (regs, dsc, ARM_PC_REGNUM);
8108 /* The assembler calculates the required value of the offset from the
8109 Align(PC,4) value of this instruction to the label. */
8110 pc = pc & 0xfffffffc;
8111
8112 displaced_write_reg (regs, dsc, 2, pc, CANNOT_WRITE_PC);
8113 displaced_write_reg (regs, dsc, 3, imm8, CANNOT_WRITE_PC);
8114
8115 dsc->rd = rt;
8116 dsc->u.ldst.xfersize = 4;
8117 dsc->u.ldst.rn = 0;
8118 dsc->u.ldst.immed = 0;
8119 dsc->u.ldst.writeback = 0;
8120 dsc->u.ldst.restore_r4 = 0;
8121
8122 dsc->modinsn[0] = 0x58d0; /* ldr r0, [r2, r3]*/
8123
8124 dsc->cleanup = &cleanup_load;
8125
8126 return 0;
8127 }
8128
8129 /* Copy Thumb cbnz/cbz insruction. */
8130
8131 static int
8132 thumb_copy_cbnz_cbz (struct gdbarch *gdbarch, uint16_t insn1,
8133 struct regcache *regs,
8134 struct displaced_step_closure *dsc)
8135 {
8136 int non_zero = bit (insn1, 11);
8137 unsigned int imm5 = (bit (insn1, 9) << 6) | (bits (insn1, 3, 7) << 1);
8138 CORE_ADDR from = dsc->insn_addr;
8139 int rn = bits (insn1, 0, 2);
8140 int rn_val = displaced_read_reg (regs, dsc, rn);
8141
8142 dsc->u.branch.cond = (rn_val && non_zero) || (!rn_val && !non_zero);
8143 /* CBNZ and CBZ do not affect the condition flags. If condition is true,
8144 set it INST_AL, so cleanup_branch will know branch is taken, otherwise,
8145 condition is false, let it be, cleanup_branch will do nothing. */
8146 if (dsc->u.branch.cond)
8147 {
8148 dsc->u.branch.cond = INST_AL;
8149 dsc->u.branch.dest = from + 4 + imm5;
8150 }
8151 else
8152 dsc->u.branch.dest = from + 2;
8153
8154 dsc->u.branch.link = 0;
8155 dsc->u.branch.exchange = 0;
8156
8157 if (debug_displaced)
8158 fprintf_unfiltered (gdb_stdlog, "displaced: copying %s [r%d = 0x%x]"
8159 " insn %.4x to %.8lx\n", non_zero ? "cbnz" : "cbz",
8160 rn, rn_val, insn1, dsc->u.branch.dest);
8161
8162 dsc->modinsn[0] = THUMB_NOP;
8163
8164 dsc->cleanup = &cleanup_branch;
8165 return 0;
8166 }
8167
8168 /* Copy Table Branch Byte/Halfword */
8169 static int
8170 thumb2_copy_table_branch (struct gdbarch *gdbarch, uint16_t insn1,
8171 uint16_t insn2, struct regcache *regs,
8172 struct displaced_step_closure *dsc)
8173 {
8174 ULONGEST rn_val, rm_val;
8175 int is_tbh = bit (insn2, 4);
8176 CORE_ADDR halfwords = 0;
8177 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8178
8179 rn_val = displaced_read_reg (regs, dsc, bits (insn1, 0, 3));
8180 rm_val = displaced_read_reg (regs, dsc, bits (insn2, 0, 3));
8181
8182 if (is_tbh)
8183 {
8184 gdb_byte buf[2];
8185
8186 target_read_memory (rn_val + 2 * rm_val, buf, 2);
8187 halfwords = extract_unsigned_integer (buf, 2, byte_order);
8188 }
8189 else
8190 {
8191 gdb_byte buf[1];
8192
8193 target_read_memory (rn_val + rm_val, buf, 1);
8194 halfwords = extract_unsigned_integer (buf, 1, byte_order);
8195 }
8196
8197 if (debug_displaced)
8198 fprintf_unfiltered (gdb_stdlog, "displaced: %s base 0x%x offset 0x%x"
8199 " offset 0x%x\n", is_tbh ? "tbh" : "tbb",
8200 (unsigned int) rn_val, (unsigned int) rm_val,
8201 (unsigned int) halfwords);
8202
8203 dsc->u.branch.cond = INST_AL;
8204 dsc->u.branch.link = 0;
8205 dsc->u.branch.exchange = 0;
8206 dsc->u.branch.dest = dsc->insn_addr + 4 + 2 * halfwords;
8207
8208 dsc->cleanup = &cleanup_branch;
8209
8210 return 0;
8211 }
8212
8213 static void
8214 cleanup_pop_pc_16bit_all (struct gdbarch *gdbarch, struct regcache *regs,
8215 struct displaced_step_closure *dsc)
8216 {
8217 /* PC <- r7 */
8218 int val = displaced_read_reg (regs, dsc, 7);
8219 displaced_write_reg (regs, dsc, ARM_PC_REGNUM, val, BX_WRITE_PC);
8220
8221 /* r7 <- r8 */
8222 val = displaced_read_reg (regs, dsc, 8);
8223 displaced_write_reg (regs, dsc, 7, val, CANNOT_WRITE_PC);
8224
8225 /* r8 <- tmp[0] */
8226 displaced_write_reg (regs, dsc, 8, dsc->tmp[0], CANNOT_WRITE_PC);
8227
8228 }
8229
8230 static int
8231 thumb_copy_pop_pc_16bit (struct gdbarch *gdbarch, unsigned short insn1,
8232 struct regcache *regs,
8233 struct displaced_step_closure *dsc)
8234 {
8235 dsc->u.block.regmask = insn1 & 0x00ff;
8236
8237 /* Rewrite instruction: POP {rX, rY, ...,rZ, PC}
8238 to :
8239
8240 (1) register list is full, that is, r0-r7 are used.
8241 Prepare: tmp[0] <- r8
8242
8243 POP {r0, r1, ...., r6, r7}; remove PC from reglist
8244 MOV r8, r7; Move value of r7 to r8;
8245 POP {r7}; Store PC value into r7.
8246
8247 Cleanup: PC <- r7, r7 <- r8, r8 <-tmp[0]
8248
8249 (2) register list is not full, supposing there are N registers in
8250 register list (except PC, 0 <= N <= 7).
8251 Prepare: for each i, 0 - N, tmp[i] <- ri.
8252
8253 POP {r0, r1, ...., rN};
8254
8255 Cleanup: Set registers in original reglist from r0 - rN. Restore r0 - rN
8256 from tmp[] properly.
8257 */
8258 if (debug_displaced)
8259 fprintf_unfiltered (gdb_stdlog,
8260 "displaced: copying thumb pop {%.8x, pc} insn %.4x\n",
8261 dsc->u.block.regmask, insn1);
8262
8263 if (dsc->u.block.regmask == 0xff)
8264 {
8265 dsc->tmp[0] = displaced_read_reg (regs, dsc, 8);
8266
8267 dsc->modinsn[0] = (insn1 & 0xfeff); /* POP {r0,r1,...,r6, r7} */
8268 dsc->modinsn[1] = 0x46b8; /* MOV r8, r7 */
8269 dsc->modinsn[2] = 0xbc80; /* POP {r7} */
8270
8271 dsc->numinsns = 3;
8272 dsc->cleanup = &cleanup_pop_pc_16bit_all;
8273 }
8274 else
8275 {
8276 unsigned int num_in_list = bitcount (dsc->u.block.regmask);
8277 unsigned int new_regmask, bit = 1;
8278 unsigned int to = 0, from = 0, i, new_rn;
8279
8280 for (i = 0; i < num_in_list + 1; i++)
8281 dsc->tmp[i] = displaced_read_reg (regs, dsc, i);
8282
8283 new_regmask = (1 << (num_in_list + 1)) - 1;
8284
8285 if (debug_displaced)
8286 fprintf_unfiltered (gdb_stdlog, _("displaced: POP "
8287 "{..., pc}: original reg list %.4x,"
8288 " modified list %.4x\n"),
8289 (int) dsc->u.block.regmask, new_regmask);
8290
8291 dsc->u.block.regmask |= 0x8000;
8292 dsc->u.block.writeback = 0;
8293 dsc->u.block.cond = INST_AL;
8294
8295 dsc->modinsn[0] = (insn1 & ~0x1ff) | (new_regmask & 0xff);
8296
8297 dsc->cleanup = &cleanup_block_load_pc;
8298 }
8299
8300 return 0;
8301 }
8302
8303 static void
8304 thumb_process_displaced_16bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8305 struct regcache *regs,
8306 struct displaced_step_closure *dsc)
8307 {
8308 unsigned short op_bit_12_15 = bits (insn1, 12, 15);
8309 unsigned short op_bit_10_11 = bits (insn1, 10, 11);
8310 int err = 0;
8311
8312 /* 16-bit thumb instructions. */
8313 switch (op_bit_12_15)
8314 {
8315 /* Shift (imme), add, subtract, move and compare. */
8316 case 0: case 1: case 2: case 3:
8317 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8318 "shift/add/sub/mov/cmp",
8319 dsc);
8320 break;
8321 case 4:
8322 switch (op_bit_10_11)
8323 {
8324 case 0: /* Data-processing */
8325 err = thumb_copy_unmodified_16bit (gdbarch, insn1,
8326 "data-processing",
8327 dsc);
8328 break;
8329 case 1: /* Special data instructions and branch and exchange. */
8330 {
8331 unsigned short op = bits (insn1, 7, 9);
8332 if (op == 6 || op == 7) /* BX or BLX */
8333 err = thumb_copy_bx_blx_reg (gdbarch, insn1, regs, dsc);
8334 else if (bits (insn1, 6, 7) != 0) /* ADD/MOV/CMP high registers. */
8335 err = thumb_copy_alu_reg (gdbarch, insn1, regs, dsc);
8336 else
8337 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "special data",
8338 dsc);
8339 }
8340 break;
8341 default: /* LDR (literal) */
8342 err = thumb_copy_16bit_ldr_literal (gdbarch, insn1, regs, dsc);
8343 }
8344 break;
8345 case 5: case 6: case 7: case 8: case 9: /* Load/Store single data item */
8346 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldr/str", dsc);
8347 break;
8348 case 10:
8349 if (op_bit_10_11 < 2) /* Generate PC-relative address */
8350 err = thumb_decode_pc_relative_16bit (gdbarch, insn1, regs, dsc);
8351 else /* Generate SP-relative address */
8352 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "sp-relative", dsc);
8353 break;
8354 case 11: /* Misc 16-bit instructions */
8355 {
8356 switch (bits (insn1, 8, 11))
8357 {
8358 case 1: case 3: case 9: case 11: /* CBNZ, CBZ */
8359 err = thumb_copy_cbnz_cbz (gdbarch, insn1, regs, dsc);
8360 break;
8361 case 12: case 13: /* POP */
8362 if (bit (insn1, 8)) /* PC is in register list. */
8363 err = thumb_copy_pop_pc_16bit (gdbarch, insn1, regs, dsc);
8364 else
8365 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "pop", dsc);
8366 break;
8367 case 15: /* If-Then, and hints */
8368 if (bits (insn1, 0, 3))
8369 /* If-Then makes up to four following instructions conditional.
8370 IT instruction itself is not conditional, so handle it as a
8371 common unmodified instruction. */
8372 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "If-Then",
8373 dsc);
8374 else
8375 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "hints", dsc);
8376 break;
8377 default:
8378 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "misc", dsc);
8379 }
8380 }
8381 break;
8382 case 12:
8383 if (op_bit_10_11 < 2) /* Store multiple registers */
8384 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "stm", dsc);
8385 else /* Load multiple registers */
8386 err = thumb_copy_unmodified_16bit (gdbarch, insn1, "ldm", dsc);
8387 break;
8388 case 13: /* Conditional branch and supervisor call */
8389 if (bits (insn1, 9, 11) != 7) /* conditional branch */
8390 err = thumb_copy_b (gdbarch, insn1, dsc);
8391 else
8392 err = thumb_copy_svc (gdbarch, insn1, regs, dsc);
8393 break;
8394 case 14: /* Unconditional branch */
8395 err = thumb_copy_b (gdbarch, insn1, dsc);
8396 break;
8397 default:
8398 err = 1;
8399 }
8400
8401 if (err)
8402 internal_error (__FILE__, __LINE__,
8403 _("thumb_process_displaced_16bit_insn: Instruction decode error"));
8404 }
8405
8406 static int
8407 decode_thumb_32bit_ld_mem_hints (struct gdbarch *gdbarch,
8408 uint16_t insn1, uint16_t insn2,
8409 struct regcache *regs,
8410 struct displaced_step_closure *dsc)
8411 {
8412 int rt = bits (insn2, 12, 15);
8413 int rn = bits (insn1, 0, 3);
8414 int op1 = bits (insn1, 7, 8);
8415 int err = 0;
8416
8417 switch (bits (insn1, 5, 6))
8418 {
8419 case 0: /* Load byte and memory hints */
8420 if (rt == 0xf) /* PLD/PLI */
8421 {
8422 if (rn == 0xf)
8423 /* PLD literal or Encoding T3 of PLI(immediate, literal). */
8424 return thumb2_copy_preload (gdbarch, insn1, insn2, regs, dsc);
8425 else
8426 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8427 "pli/pld", dsc);
8428 }
8429 else
8430 {
8431 if (rn == 0xf) /* LDRB/LDRSB (literal) */
8432 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8433 1);
8434 else
8435 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8436 "ldrb{reg, immediate}/ldrbt",
8437 dsc);
8438 }
8439
8440 break;
8441 case 1: /* Load halfword and memory hints. */
8442 if (rt == 0xf) /* PLD{W} and Unalloc memory hint. */
8443 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8444 "pld/unalloc memhint", dsc);
8445 else
8446 {
8447 if (rn == 0xf)
8448 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc,
8449 2);
8450 else
8451 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8452 "ldrh/ldrht", dsc);
8453 }
8454 break;
8455 case 2: /* Load word */
8456 {
8457 int insn2_bit_8_11 = bits (insn2, 8, 11);
8458
8459 if (rn == 0xf)
8460 return thumb2_copy_load_literal (gdbarch, insn1, insn2, regs, dsc, 4);
8461 else if (op1 == 0x1) /* Encoding T3 */
8462 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs, dsc,
8463 0, 1);
8464 else /* op1 == 0x0 */
8465 {
8466 if (insn2_bit_8_11 == 0xc || (insn2_bit_8_11 & 0x9) == 0x9)
8467 /* LDR (immediate) */
8468 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8469 dsc, bit (insn2, 8), 1);
8470 else if (insn2_bit_8_11 == 0xe) /* LDRT */
8471 return thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8472 "ldrt", dsc);
8473 else
8474 /* LDR (register) */
8475 return thumb2_copy_load_reg_imm (gdbarch, insn1, insn2, regs,
8476 dsc, 0, 0);
8477 }
8478 break;
8479 }
8480 default:
8481 return thumb_32bit_copy_undef (gdbarch, insn1, insn2, dsc);
8482 break;
8483 }
8484 return 0;
8485 }
8486
8487 static void
8488 thumb_process_displaced_32bit_insn (struct gdbarch *gdbarch, uint16_t insn1,
8489 uint16_t insn2, struct regcache *regs,
8490 struct displaced_step_closure *dsc)
8491 {
8492 int err = 0;
8493 unsigned short op = bit (insn2, 15);
8494 unsigned int op1 = bits (insn1, 11, 12);
8495
8496 switch (op1)
8497 {
8498 case 1:
8499 {
8500 switch (bits (insn1, 9, 10))
8501 {
8502 case 0:
8503 if (bit (insn1, 6))
8504 {
8505 /* Load/store {dual, execlusive}, table branch. */
8506 if (bits (insn1, 7, 8) == 1 && bits (insn1, 4, 5) == 1
8507 && bits (insn2, 5, 7) == 0)
8508 err = thumb2_copy_table_branch (gdbarch, insn1, insn2, regs,
8509 dsc);
8510 else
8511 /* PC is not allowed to use in load/store {dual, exclusive}
8512 instructions. */
8513 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8514 "load/store dual/ex", dsc);
8515 }
8516 else /* load/store multiple */
8517 {
8518 switch (bits (insn1, 7, 8))
8519 {
8520 case 0: case 3: /* SRS, RFE */
8521 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8522 "srs/rfe", dsc);
8523 break;
8524 case 1: case 2: /* LDM/STM/PUSH/POP */
8525 err = thumb2_copy_block_xfer (gdbarch, insn1, insn2, regs, dsc);
8526 break;
8527 }
8528 }
8529 break;
8530
8531 case 1:
8532 /* Data-processing (shift register). */
8533 err = thumb2_decode_dp_shift_reg (gdbarch, insn1, insn2, regs,
8534 dsc);
8535 break;
8536 default: /* Coprocessor instructions. */
8537 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8538 break;
8539 }
8540 break;
8541 }
8542 case 2: /* op1 = 2 */
8543 if (op) /* Branch and misc control. */
8544 {
8545 if (bit (insn2, 14) /* BLX/BL */
8546 || bit (insn2, 12) /* Unconditional branch */
8547 || (bits (insn1, 7, 9) != 0x7)) /* Conditional branch */
8548 err = thumb2_copy_b_bl_blx (gdbarch, insn1, insn2, regs, dsc);
8549 else
8550 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8551 "misc ctrl", dsc);
8552 }
8553 else
8554 {
8555 if (bit (insn1, 9)) /* Data processing (plain binary imm). */
8556 {
8557 int op = bits (insn1, 4, 8);
8558 int rn = bits (insn1, 0, 3);
8559 if ((op == 0 || op == 0xa) && rn == 0xf)
8560 err = thumb_copy_pc_relative_32bit (gdbarch, insn1, insn2,
8561 regs, dsc);
8562 else
8563 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8564 "dp/pb", dsc);
8565 }
8566 else /* Data processing (modified immeidate) */
8567 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8568 "dp/mi", dsc);
8569 }
8570 break;
8571 case 3: /* op1 = 3 */
8572 switch (bits (insn1, 9, 10))
8573 {
8574 case 0:
8575 if (bit (insn1, 4))
8576 err = decode_thumb_32bit_ld_mem_hints (gdbarch, insn1, insn2,
8577 regs, dsc);
8578 else /* NEON Load/Store and Store single data item */
8579 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8580 "neon elt/struct load/store",
8581 dsc);
8582 break;
8583 case 1: /* op1 = 3, bits (9, 10) == 1 */
8584 switch (bits (insn1, 7, 8))
8585 {
8586 case 0: case 1: /* Data processing (register) */
8587 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8588 "dp(reg)", dsc);
8589 break;
8590 case 2: /* Multiply and absolute difference */
8591 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8592 "mul/mua/diff", dsc);
8593 break;
8594 case 3: /* Long multiply and divide */
8595 err = thumb_copy_unmodified_32bit (gdbarch, insn1, insn2,
8596 "lmul/lmua", dsc);
8597 break;
8598 }
8599 break;
8600 default: /* Coprocessor instructions */
8601 err = thumb2_decode_svc_copro (gdbarch, insn1, insn2, regs, dsc);
8602 break;
8603 }
8604 break;
8605 default:
8606 err = 1;
8607 }
8608
8609 if (err)
8610 internal_error (__FILE__, __LINE__,
8611 _("thumb_process_displaced_32bit_insn: Instruction decode error"));
8612
8613 }
8614
8615 static void
8616 thumb_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8617 CORE_ADDR to, struct regcache *regs,
8618 struct displaced_step_closure *dsc)
8619 {
8620 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8621 uint16_t insn1
8622 = read_memory_unsigned_integer (from, 2, byte_order_for_code);
8623
8624 if (debug_displaced)
8625 fprintf_unfiltered (gdb_stdlog, "displaced: process thumb insn %.4x "
8626 "at %.8lx\n", insn1, (unsigned long) from);
8627
8628 dsc->is_thumb = 1;
8629 dsc->insn_size = thumb_insn_size (insn1);
8630 if (thumb_insn_size (insn1) == 4)
8631 {
8632 uint16_t insn2
8633 = read_memory_unsigned_integer (from + 2, 2, byte_order_for_code);
8634 thumb_process_displaced_32bit_insn (gdbarch, insn1, insn2, regs, dsc);
8635 }
8636 else
8637 thumb_process_displaced_16bit_insn (gdbarch, insn1, regs, dsc);
8638 }
8639
8640 void
8641 arm_process_displaced_insn (struct gdbarch *gdbarch, CORE_ADDR from,
8642 CORE_ADDR to, struct regcache *regs,
8643 struct displaced_step_closure *dsc)
8644 {
8645 int err = 0;
8646 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8647 uint32_t insn;
8648
8649 /* Most displaced instructions use a 1-instruction scratch space, so set this
8650 here and override below if/when necessary. */
8651 dsc->numinsns = 1;
8652 dsc->insn_addr = from;
8653 dsc->scratch_base = to;
8654 dsc->cleanup = NULL;
8655 dsc->wrote_to_pc = 0;
8656
8657 if (!displaced_in_arm_mode (regs))
8658 return thumb_process_displaced_insn (gdbarch, from, to, regs, dsc);
8659
8660 dsc->is_thumb = 0;
8661 dsc->insn_size = 4;
8662 insn = read_memory_unsigned_integer (from, 4, byte_order_for_code);
8663 if (debug_displaced)
8664 fprintf_unfiltered (gdb_stdlog, "displaced: stepping insn %.8lx "
8665 "at %.8lx\n", (unsigned long) insn,
8666 (unsigned long) from);
8667
8668 if ((insn & 0xf0000000) == 0xf0000000)
8669 err = arm_decode_unconditional (gdbarch, insn, regs, dsc);
8670 else switch (((insn & 0x10) >> 4) | ((insn & 0xe000000) >> 24))
8671 {
8672 case 0x0: case 0x1: case 0x2: case 0x3:
8673 err = arm_decode_dp_misc (gdbarch, insn, regs, dsc);
8674 break;
8675
8676 case 0x4: case 0x5: case 0x6:
8677 err = arm_decode_ld_st_word_ubyte (gdbarch, insn, regs, dsc);
8678 break;
8679
8680 case 0x7:
8681 err = arm_decode_media (gdbarch, insn, dsc);
8682 break;
8683
8684 case 0x8: case 0x9: case 0xa: case 0xb:
8685 err = arm_decode_b_bl_ldmstm (gdbarch, insn, regs, dsc);
8686 break;
8687
8688 case 0xc: case 0xd: case 0xe: case 0xf:
8689 err = arm_decode_svc_copro (gdbarch, insn, to, regs, dsc);
8690 break;
8691 }
8692
8693 if (err)
8694 internal_error (__FILE__, __LINE__,
8695 _("arm_process_displaced_insn: Instruction decode error"));
8696 }
8697
8698 /* Actually set up the scratch space for a displaced instruction. */
8699
8700 void
8701 arm_displaced_init_closure (struct gdbarch *gdbarch, CORE_ADDR from,
8702 CORE_ADDR to, struct displaced_step_closure *dsc)
8703 {
8704 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8705 unsigned int i, len, offset;
8706 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8707 int size = dsc->is_thumb? 2 : 4;
8708 const gdb_byte *bkp_insn;
8709
8710 offset = 0;
8711 /* Poke modified instruction(s). */
8712 for (i = 0; i < dsc->numinsns; i++)
8713 {
8714 if (debug_displaced)
8715 {
8716 fprintf_unfiltered (gdb_stdlog, "displaced: writing insn ");
8717 if (size == 4)
8718 fprintf_unfiltered (gdb_stdlog, "%.8lx",
8719 dsc->modinsn[i]);
8720 else if (size == 2)
8721 fprintf_unfiltered (gdb_stdlog, "%.4x",
8722 (unsigned short)dsc->modinsn[i]);
8723
8724 fprintf_unfiltered (gdb_stdlog, " at %.8lx\n",
8725 (unsigned long) to + offset);
8726
8727 }
8728 write_memory_unsigned_integer (to + offset, size,
8729 byte_order_for_code,
8730 dsc->modinsn[i]);
8731 offset += size;
8732 }
8733
8734 /* Choose the correct breakpoint instruction. */
8735 if (dsc->is_thumb)
8736 {
8737 bkp_insn = tdep->thumb_breakpoint;
8738 len = tdep->thumb_breakpoint_size;
8739 }
8740 else
8741 {
8742 bkp_insn = tdep->arm_breakpoint;
8743 len = tdep->arm_breakpoint_size;
8744 }
8745
8746 /* Put breakpoint afterwards. */
8747 write_memory (to + offset, bkp_insn, len);
8748
8749 if (debug_displaced)
8750 fprintf_unfiltered (gdb_stdlog, "displaced: copy %s->%s: ",
8751 paddress (gdbarch, from), paddress (gdbarch, to));
8752 }
8753
8754 /* Entry point for copying an instruction into scratch space for displaced
8755 stepping. */
8756
8757 struct displaced_step_closure *
8758 arm_displaced_step_copy_insn (struct gdbarch *gdbarch,
8759 CORE_ADDR from, CORE_ADDR to,
8760 struct regcache *regs)
8761 {
8762 struct displaced_step_closure *dsc = XNEW (struct displaced_step_closure);
8763
8764 arm_process_displaced_insn (gdbarch, from, to, regs, dsc);
8765 arm_displaced_init_closure (gdbarch, from, to, dsc);
8766
8767 return dsc;
8768 }
8769
8770 /* Entry point for cleaning things up after a displaced instruction has been
8771 single-stepped. */
8772
8773 void
8774 arm_displaced_step_fixup (struct gdbarch *gdbarch,
8775 struct displaced_step_closure *dsc,
8776 CORE_ADDR from, CORE_ADDR to,
8777 struct regcache *regs)
8778 {
8779 if (dsc->cleanup)
8780 dsc->cleanup (gdbarch, regs, dsc);
8781
8782 if (!dsc->wrote_to_pc)
8783 regcache_cooked_write_unsigned (regs, ARM_PC_REGNUM,
8784 dsc->insn_addr + dsc->insn_size);
8785
8786 }
8787
8788 #include "bfd-in2.h"
8789 #include "libcoff.h"
8790
8791 static int
8792 gdb_print_insn_arm (bfd_vma memaddr, disassemble_info *info)
8793 {
8794 struct gdbarch *gdbarch = (struct gdbarch *) info->application_data;
8795
8796 if (arm_pc_is_thumb (gdbarch, memaddr))
8797 {
8798 static asymbol *asym;
8799 static combined_entry_type ce;
8800 static struct coff_symbol_struct csym;
8801 static struct bfd fake_bfd;
8802 static bfd_target fake_target;
8803
8804 if (csym.native == NULL)
8805 {
8806 /* Create a fake symbol vector containing a Thumb symbol.
8807 This is solely so that the code in print_insn_little_arm()
8808 and print_insn_big_arm() in opcodes/arm-dis.c will detect
8809 the presence of a Thumb symbol and switch to decoding
8810 Thumb instructions. */
8811
8812 fake_target.flavour = bfd_target_coff_flavour;
8813 fake_bfd.xvec = &fake_target;
8814 ce.u.syment.n_sclass = C_THUMBEXTFUNC;
8815 csym.native = &ce;
8816 csym.symbol.the_bfd = &fake_bfd;
8817 csym.symbol.name = "fake";
8818 asym = (asymbol *) & csym;
8819 }
8820
8821 memaddr = UNMAKE_THUMB_ADDR (memaddr);
8822 info->symbols = &asym;
8823 }
8824 else
8825 info->symbols = NULL;
8826
8827 if (info->endian == BFD_ENDIAN_BIG)
8828 return print_insn_big_arm (memaddr, info);
8829 else
8830 return print_insn_little_arm (memaddr, info);
8831 }
8832
8833 /* The following define instruction sequences that will cause ARM
8834 cpu's to take an undefined instruction trap. These are used to
8835 signal a breakpoint to GDB.
8836
8837 The newer ARMv4T cpu's are capable of operating in ARM or Thumb
8838 modes. A different instruction is required for each mode. The ARM
8839 cpu's can also be big or little endian. Thus four different
8840 instructions are needed to support all cases.
8841
8842 Note: ARMv4 defines several new instructions that will take the
8843 undefined instruction trap. ARM7TDMI is nominally ARMv4T, but does
8844 not in fact add the new instructions. The new undefined
8845 instructions in ARMv4 are all instructions that had no defined
8846 behaviour in earlier chips. There is no guarantee that they will
8847 raise an exception, but may be treated as NOP's. In practice, it
8848 may only safe to rely on instructions matching:
8849
8850 3 3 2 2 2 2 2 2 2 2 2 2 1 1 1 1 1 1 1 1 1 1
8851 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0 9 8 7 6 5 4 3 2 1 0
8852 C C C C 0 1 1 x x x x x x x x x x x x x x x x x x x x 1 x x x x
8853
8854 Even this may only true if the condition predicate is true. The
8855 following use a condition predicate of ALWAYS so it is always TRUE.
8856
8857 There are other ways of forcing a breakpoint. GNU/Linux, RISC iX,
8858 and NetBSD all use a software interrupt rather than an undefined
8859 instruction to force a trap. This can be handled by by the
8860 abi-specific code during establishment of the gdbarch vector. */
8861
8862 #define ARM_LE_BREAKPOINT {0xFE,0xDE,0xFF,0xE7}
8863 #define ARM_BE_BREAKPOINT {0xE7,0xFF,0xDE,0xFE}
8864 #define THUMB_LE_BREAKPOINT {0xbe,0xbe}
8865 #define THUMB_BE_BREAKPOINT {0xbe,0xbe}
8866
8867 static const gdb_byte arm_default_arm_le_breakpoint[] = ARM_LE_BREAKPOINT;
8868 static const gdb_byte arm_default_arm_be_breakpoint[] = ARM_BE_BREAKPOINT;
8869 static const gdb_byte arm_default_thumb_le_breakpoint[] = THUMB_LE_BREAKPOINT;
8870 static const gdb_byte arm_default_thumb_be_breakpoint[] = THUMB_BE_BREAKPOINT;
8871
8872 /* Determine the type and size of breakpoint to insert at PCPTR. Uses
8873 the program counter value to determine whether a 16-bit or 32-bit
8874 breakpoint should be used. It returns a pointer to a string of
8875 bytes that encode a breakpoint instruction, stores the length of
8876 the string to *lenptr, and adjusts the program counter (if
8877 necessary) to point to the actual memory location where the
8878 breakpoint should be inserted. */
8879
8880 static const unsigned char *
8881 arm_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr, int *lenptr)
8882 {
8883 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
8884 enum bfd_endian byte_order_for_code = gdbarch_byte_order_for_code (gdbarch);
8885
8886 if (arm_pc_is_thumb (gdbarch, *pcptr))
8887 {
8888 *pcptr = UNMAKE_THUMB_ADDR (*pcptr);
8889
8890 /* If we have a separate 32-bit breakpoint instruction for Thumb-2,
8891 check whether we are replacing a 32-bit instruction. */
8892 if (tdep->thumb2_breakpoint != NULL)
8893 {
8894 gdb_byte buf[2];
8895 if (target_read_memory (*pcptr, buf, 2) == 0)
8896 {
8897 unsigned short inst1;
8898 inst1 = extract_unsigned_integer (buf, 2, byte_order_for_code);
8899 if (thumb_insn_size (inst1) == 4)
8900 {
8901 *lenptr = tdep->thumb2_breakpoint_size;
8902 return tdep->thumb2_breakpoint;
8903 }
8904 }
8905 }
8906
8907 *lenptr = tdep->thumb_breakpoint_size;
8908 return tdep->thumb_breakpoint;
8909 }
8910 else
8911 {
8912 *lenptr = tdep->arm_breakpoint_size;
8913 return tdep->arm_breakpoint;
8914 }
8915 }
8916
8917 static void
8918 arm_remote_breakpoint_from_pc (struct gdbarch *gdbarch, CORE_ADDR *pcptr,
8919 int *kindptr)
8920 {
8921 arm_breakpoint_from_pc (gdbarch, pcptr, kindptr);
8922
8923 if (arm_pc_is_thumb (gdbarch, *pcptr) && *kindptr == 4)
8924 /* The documented magic value for a 32-bit Thumb-2 breakpoint, so
8925 that this is not confused with a 32-bit ARM breakpoint. */
8926 *kindptr = 3;
8927 }
8928
8929 /* Extract from an array REGBUF containing the (raw) register state a
8930 function return value of type TYPE, and copy that, in virtual
8931 format, into VALBUF. */
8932
8933 static void
8934 arm_extract_return_value (struct type *type, struct regcache *regs,
8935 gdb_byte *valbuf)
8936 {
8937 struct gdbarch *gdbarch = get_regcache_arch (regs);
8938 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
8939
8940 if (TYPE_CODE_FLT == TYPE_CODE (type))
8941 {
8942 switch (gdbarch_tdep (gdbarch)->fp_model)
8943 {
8944 case ARM_FLOAT_FPA:
8945 {
8946 /* The value is in register F0 in internal format. We need to
8947 extract the raw value and then convert it to the desired
8948 internal type. */
8949 bfd_byte tmpbuf[FP_REGISTER_SIZE];
8950
8951 regcache_cooked_read (regs, ARM_F0_REGNUM, tmpbuf);
8952 convert_from_extended (floatformat_from_type (type), tmpbuf,
8953 valbuf, gdbarch_byte_order (gdbarch));
8954 }
8955 break;
8956
8957 case ARM_FLOAT_SOFT_FPA:
8958 case ARM_FLOAT_SOFT_VFP:
8959 /* ARM_FLOAT_VFP can arise if this is a variadic function so
8960 not using the VFP ABI code. */
8961 case ARM_FLOAT_VFP:
8962 regcache_cooked_read (regs, ARM_A1_REGNUM, valbuf);
8963 if (TYPE_LENGTH (type) > 4)
8964 regcache_cooked_read (regs, ARM_A1_REGNUM + 1,
8965 valbuf + INT_REGISTER_SIZE);
8966 break;
8967
8968 default:
8969 internal_error (__FILE__, __LINE__,
8970 _("arm_extract_return_value: "
8971 "Floating point model not supported"));
8972 break;
8973 }
8974 }
8975 else if (TYPE_CODE (type) == TYPE_CODE_INT
8976 || TYPE_CODE (type) == TYPE_CODE_CHAR
8977 || TYPE_CODE (type) == TYPE_CODE_BOOL
8978 || TYPE_CODE (type) == TYPE_CODE_PTR
8979 || TYPE_CODE (type) == TYPE_CODE_REF
8980 || TYPE_CODE (type) == TYPE_CODE_ENUM)
8981 {
8982 /* If the type is a plain integer, then the access is
8983 straight-forward. Otherwise we have to play around a bit
8984 more. */
8985 int len = TYPE_LENGTH (type);
8986 int regno = ARM_A1_REGNUM;
8987 ULONGEST tmp;
8988
8989 while (len > 0)
8990 {
8991 /* By using store_unsigned_integer we avoid having to do
8992 anything special for small big-endian values. */
8993 regcache_cooked_read_unsigned (regs, regno++, &tmp);
8994 store_unsigned_integer (valbuf,
8995 (len > INT_REGISTER_SIZE
8996 ? INT_REGISTER_SIZE : len),
8997 byte_order, tmp);
8998 len -= INT_REGISTER_SIZE;
8999 valbuf += INT_REGISTER_SIZE;
9000 }
9001 }
9002 else
9003 {
9004 /* For a structure or union the behaviour is as if the value had
9005 been stored to word-aligned memory and then loaded into
9006 registers with 32-bit load instruction(s). */
9007 int len = TYPE_LENGTH (type);
9008 int regno = ARM_A1_REGNUM;
9009 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9010
9011 while (len > 0)
9012 {
9013 regcache_cooked_read (regs, regno++, tmpbuf);
9014 memcpy (valbuf, tmpbuf,
9015 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9016 len -= INT_REGISTER_SIZE;
9017 valbuf += INT_REGISTER_SIZE;
9018 }
9019 }
9020 }
9021
9022
9023 /* Will a function return an aggregate type in memory or in a
9024 register? Return 0 if an aggregate type can be returned in a
9025 register, 1 if it must be returned in memory. */
9026
9027 static int
9028 arm_return_in_memory (struct gdbarch *gdbarch, struct type *type)
9029 {
9030 enum type_code code;
9031
9032 type = check_typedef (type);
9033
9034 /* Simple, non-aggregate types (ie not including vectors and
9035 complex) are always returned in a register (or registers). */
9036 code = TYPE_CODE (type);
9037 if (TYPE_CODE_STRUCT != code && TYPE_CODE_UNION != code
9038 && TYPE_CODE_ARRAY != code && TYPE_CODE_COMPLEX != code)
9039 return 0;
9040
9041 if (TYPE_CODE_ARRAY == code && TYPE_VECTOR (type))
9042 {
9043 /* Vector values should be returned using ARM registers if they
9044 are not over 16 bytes. */
9045 return (TYPE_LENGTH (type) > 16);
9046 }
9047
9048 if (gdbarch_tdep (gdbarch)->arm_abi != ARM_ABI_APCS)
9049 {
9050 /* The AAPCS says all aggregates not larger than a word are returned
9051 in a register. */
9052 if (TYPE_LENGTH (type) <= INT_REGISTER_SIZE)
9053 return 0;
9054
9055 return 1;
9056 }
9057 else
9058 {
9059 int nRc;
9060
9061 /* All aggregate types that won't fit in a register must be returned
9062 in memory. */
9063 if (TYPE_LENGTH (type) > INT_REGISTER_SIZE)
9064 return 1;
9065
9066 /* In the ARM ABI, "integer" like aggregate types are returned in
9067 registers. For an aggregate type to be integer like, its size
9068 must be less than or equal to INT_REGISTER_SIZE and the
9069 offset of each addressable subfield must be zero. Note that bit
9070 fields are not addressable, and all addressable subfields of
9071 unions always start at offset zero.
9072
9073 This function is based on the behaviour of GCC 2.95.1.
9074 See: gcc/arm.c: arm_return_in_memory() for details.
9075
9076 Note: All versions of GCC before GCC 2.95.2 do not set up the
9077 parameters correctly for a function returning the following
9078 structure: struct { float f;}; This should be returned in memory,
9079 not a register. Richard Earnshaw sent me a patch, but I do not
9080 know of any way to detect if a function like the above has been
9081 compiled with the correct calling convention. */
9082
9083 /* Assume all other aggregate types can be returned in a register.
9084 Run a check for structures, unions and arrays. */
9085 nRc = 0;
9086
9087 if ((TYPE_CODE_STRUCT == code) || (TYPE_CODE_UNION == code))
9088 {
9089 int i;
9090 /* Need to check if this struct/union is "integer" like. For
9091 this to be true, its size must be less than or equal to
9092 INT_REGISTER_SIZE and the offset of each addressable
9093 subfield must be zero. Note that bit fields are not
9094 addressable, and unions always start at offset zero. If any
9095 of the subfields is a floating point type, the struct/union
9096 cannot be an integer type. */
9097
9098 /* For each field in the object, check:
9099 1) Is it FP? --> yes, nRc = 1;
9100 2) Is it addressable (bitpos != 0) and
9101 not packed (bitsize == 0)?
9102 --> yes, nRc = 1
9103 */
9104
9105 for (i = 0; i < TYPE_NFIELDS (type); i++)
9106 {
9107 enum type_code field_type_code;
9108
9109 field_type_code
9110 = TYPE_CODE (check_typedef (TYPE_FIELD_TYPE (type,
9111 i)));
9112
9113 /* Is it a floating point type field? */
9114 if (field_type_code == TYPE_CODE_FLT)
9115 {
9116 nRc = 1;
9117 break;
9118 }
9119
9120 /* If bitpos != 0, then we have to care about it. */
9121 if (TYPE_FIELD_BITPOS (type, i) != 0)
9122 {
9123 /* Bitfields are not addressable. If the field bitsize is
9124 zero, then the field is not packed. Hence it cannot be
9125 a bitfield or any other packed type. */
9126 if (TYPE_FIELD_BITSIZE (type, i) == 0)
9127 {
9128 nRc = 1;
9129 break;
9130 }
9131 }
9132 }
9133 }
9134
9135 return nRc;
9136 }
9137 }
9138
9139 /* Write into appropriate registers a function return value of type
9140 TYPE, given in virtual format. */
9141
9142 static void
9143 arm_store_return_value (struct type *type, struct regcache *regs,
9144 const gdb_byte *valbuf)
9145 {
9146 struct gdbarch *gdbarch = get_regcache_arch (regs);
9147 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9148
9149 if (TYPE_CODE (type) == TYPE_CODE_FLT)
9150 {
9151 gdb_byte buf[MAX_REGISTER_SIZE];
9152
9153 switch (gdbarch_tdep (gdbarch)->fp_model)
9154 {
9155 case ARM_FLOAT_FPA:
9156
9157 convert_to_extended (floatformat_from_type (type), buf, valbuf,
9158 gdbarch_byte_order (gdbarch));
9159 regcache_cooked_write (regs, ARM_F0_REGNUM, buf);
9160 break;
9161
9162 case ARM_FLOAT_SOFT_FPA:
9163 case ARM_FLOAT_SOFT_VFP:
9164 /* ARM_FLOAT_VFP can arise if this is a variadic function so
9165 not using the VFP ABI code. */
9166 case ARM_FLOAT_VFP:
9167 regcache_cooked_write (regs, ARM_A1_REGNUM, valbuf);
9168 if (TYPE_LENGTH (type) > 4)
9169 regcache_cooked_write (regs, ARM_A1_REGNUM + 1,
9170 valbuf + INT_REGISTER_SIZE);
9171 break;
9172
9173 default:
9174 internal_error (__FILE__, __LINE__,
9175 _("arm_store_return_value: Floating "
9176 "point model not supported"));
9177 break;
9178 }
9179 }
9180 else if (TYPE_CODE (type) == TYPE_CODE_INT
9181 || TYPE_CODE (type) == TYPE_CODE_CHAR
9182 || TYPE_CODE (type) == TYPE_CODE_BOOL
9183 || TYPE_CODE (type) == TYPE_CODE_PTR
9184 || TYPE_CODE (type) == TYPE_CODE_REF
9185 || TYPE_CODE (type) == TYPE_CODE_ENUM)
9186 {
9187 if (TYPE_LENGTH (type) <= 4)
9188 {
9189 /* Values of one word or less are zero/sign-extended and
9190 returned in r0. */
9191 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9192 LONGEST val = unpack_long (type, valbuf);
9193
9194 store_signed_integer (tmpbuf, INT_REGISTER_SIZE, byte_order, val);
9195 regcache_cooked_write (regs, ARM_A1_REGNUM, tmpbuf);
9196 }
9197 else
9198 {
9199 /* Integral values greater than one word are stored in consecutive
9200 registers starting with r0. This will always be a multiple of
9201 the regiser size. */
9202 int len = TYPE_LENGTH (type);
9203 int regno = ARM_A1_REGNUM;
9204
9205 while (len > 0)
9206 {
9207 regcache_cooked_write (regs, regno++, valbuf);
9208 len -= INT_REGISTER_SIZE;
9209 valbuf += INT_REGISTER_SIZE;
9210 }
9211 }
9212 }
9213 else
9214 {
9215 /* For a structure or union the behaviour is as if the value had
9216 been stored to word-aligned memory and then loaded into
9217 registers with 32-bit load instruction(s). */
9218 int len = TYPE_LENGTH (type);
9219 int regno = ARM_A1_REGNUM;
9220 bfd_byte tmpbuf[INT_REGISTER_SIZE];
9221
9222 while (len > 0)
9223 {
9224 memcpy (tmpbuf, valbuf,
9225 len > INT_REGISTER_SIZE ? INT_REGISTER_SIZE : len);
9226 regcache_cooked_write (regs, regno++, tmpbuf);
9227 len -= INT_REGISTER_SIZE;
9228 valbuf += INT_REGISTER_SIZE;
9229 }
9230 }
9231 }
9232
9233
9234 /* Handle function return values. */
9235
9236 static enum return_value_convention
9237 arm_return_value (struct gdbarch *gdbarch, struct value *function,
9238 struct type *valtype, struct regcache *regcache,
9239 gdb_byte *readbuf, const gdb_byte *writebuf)
9240 {
9241 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9242 struct type *func_type = function ? value_type (function) : NULL;
9243 enum arm_vfp_cprc_base_type vfp_base_type;
9244 int vfp_base_count;
9245
9246 if (arm_vfp_abi_for_function (gdbarch, func_type)
9247 && arm_vfp_call_candidate (valtype, &vfp_base_type, &vfp_base_count))
9248 {
9249 int reg_char = arm_vfp_cprc_reg_char (vfp_base_type);
9250 int unit_length = arm_vfp_cprc_unit_length (vfp_base_type);
9251 int i;
9252 for (i = 0; i < vfp_base_count; i++)
9253 {
9254 if (reg_char == 'q')
9255 {
9256 if (writebuf)
9257 arm_neon_quad_write (gdbarch, regcache, i,
9258 writebuf + i * unit_length);
9259
9260 if (readbuf)
9261 arm_neon_quad_read (gdbarch, regcache, i,
9262 readbuf + i * unit_length);
9263 }
9264 else
9265 {
9266 char name_buf[4];
9267 int regnum;
9268
9269 xsnprintf (name_buf, sizeof (name_buf), "%c%d", reg_char, i);
9270 regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9271 strlen (name_buf));
9272 if (writebuf)
9273 regcache_cooked_write (regcache, regnum,
9274 writebuf + i * unit_length);
9275 if (readbuf)
9276 regcache_cooked_read (regcache, regnum,
9277 readbuf + i * unit_length);
9278 }
9279 }
9280 return RETURN_VALUE_REGISTER_CONVENTION;
9281 }
9282
9283 if (TYPE_CODE (valtype) == TYPE_CODE_STRUCT
9284 || TYPE_CODE (valtype) == TYPE_CODE_UNION
9285 || TYPE_CODE (valtype) == TYPE_CODE_ARRAY)
9286 {
9287 if (tdep->struct_return == pcc_struct_return
9288 || arm_return_in_memory (gdbarch, valtype))
9289 return RETURN_VALUE_STRUCT_CONVENTION;
9290 }
9291 else if (TYPE_CODE (valtype) == TYPE_CODE_COMPLEX)
9292 {
9293 if (arm_return_in_memory (gdbarch, valtype))
9294 return RETURN_VALUE_STRUCT_CONVENTION;
9295 }
9296
9297 if (writebuf)
9298 arm_store_return_value (valtype, regcache, writebuf);
9299
9300 if (readbuf)
9301 arm_extract_return_value (valtype, regcache, readbuf);
9302
9303 return RETURN_VALUE_REGISTER_CONVENTION;
9304 }
9305
9306
9307 static int
9308 arm_get_longjmp_target (struct frame_info *frame, CORE_ADDR *pc)
9309 {
9310 struct gdbarch *gdbarch = get_frame_arch (frame);
9311 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
9312 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
9313 CORE_ADDR jb_addr;
9314 gdb_byte buf[INT_REGISTER_SIZE];
9315
9316 jb_addr = get_frame_register_unsigned (frame, ARM_A1_REGNUM);
9317
9318 if (target_read_memory (jb_addr + tdep->jb_pc * tdep->jb_elt_size, buf,
9319 INT_REGISTER_SIZE))
9320 return 0;
9321
9322 *pc = extract_unsigned_integer (buf, INT_REGISTER_SIZE, byte_order);
9323 return 1;
9324 }
9325
9326 /* Recognize GCC and GNU ld's trampolines. If we are in a trampoline,
9327 return the target PC. Otherwise return 0. */
9328
9329 CORE_ADDR
9330 arm_skip_stub (struct frame_info *frame, CORE_ADDR pc)
9331 {
9332 const char *name;
9333 int namelen;
9334 CORE_ADDR start_addr;
9335
9336 /* Find the starting address and name of the function containing the PC. */
9337 if (find_pc_partial_function (pc, &name, &start_addr, NULL) == 0)
9338 {
9339 /* Trampoline 'bx reg' doesn't belong to any functions. Do the
9340 check here. */
9341 start_addr = arm_skip_bx_reg (frame, pc);
9342 if (start_addr != 0)
9343 return start_addr;
9344
9345 return 0;
9346 }
9347
9348 /* If PC is in a Thumb call or return stub, return the address of the
9349 target PC, which is in a register. The thunk functions are called
9350 _call_via_xx, where x is the register name. The possible names
9351 are r0-r9, sl, fp, ip, sp, and lr. ARM RealView has similar
9352 functions, named __ARM_call_via_r[0-7]. */
9353 if (startswith (name, "_call_via_")
9354 || startswith (name, "__ARM_call_via_"))
9355 {
9356 /* Use the name suffix to determine which register contains the
9357 target PC. */
9358 static char *table[15] =
9359 {"r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
9360 "r8", "r9", "sl", "fp", "ip", "sp", "lr"
9361 };
9362 int regno;
9363 int offset = strlen (name) - 2;
9364
9365 for (regno = 0; regno <= 14; regno++)
9366 if (strcmp (&name[offset], table[regno]) == 0)
9367 return get_frame_register_unsigned (frame, regno);
9368 }
9369
9370 /* GNU ld generates __foo_from_arm or __foo_from_thumb for
9371 non-interworking calls to foo. We could decode the stubs
9372 to find the target but it's easier to use the symbol table. */
9373 namelen = strlen (name);
9374 if (name[0] == '_' && name[1] == '_'
9375 && ((namelen > 2 + strlen ("_from_thumb")
9376 && startswith (name + namelen - strlen ("_from_thumb"), "_from_thumb"))
9377 || (namelen > 2 + strlen ("_from_arm")
9378 && startswith (name + namelen - strlen ("_from_arm"), "_from_arm"))))
9379 {
9380 char *target_name;
9381 int target_len = namelen - 2;
9382 struct bound_minimal_symbol minsym;
9383 struct objfile *objfile;
9384 struct obj_section *sec;
9385
9386 if (name[namelen - 1] == 'b')
9387 target_len -= strlen ("_from_thumb");
9388 else
9389 target_len -= strlen ("_from_arm");
9390
9391 target_name = (char *) alloca (target_len + 1);
9392 memcpy (target_name, name + 2, target_len);
9393 target_name[target_len] = '\0';
9394
9395 sec = find_pc_section (pc);
9396 objfile = (sec == NULL) ? NULL : sec->objfile;
9397 minsym = lookup_minimal_symbol (target_name, NULL, objfile);
9398 if (minsym.minsym != NULL)
9399 return BMSYMBOL_VALUE_ADDRESS (minsym);
9400 else
9401 return 0;
9402 }
9403
9404 return 0; /* not a stub */
9405 }
9406
9407 static void
9408 set_arm_command (char *args, int from_tty)
9409 {
9410 printf_unfiltered (_("\
9411 \"set arm\" must be followed by an apporpriate subcommand.\n"));
9412 help_list (setarmcmdlist, "set arm ", all_commands, gdb_stdout);
9413 }
9414
9415 static void
9416 show_arm_command (char *args, int from_tty)
9417 {
9418 cmd_show_list (showarmcmdlist, from_tty, "");
9419 }
9420
9421 static void
9422 arm_update_current_architecture (void)
9423 {
9424 struct gdbarch_info info;
9425
9426 /* If the current architecture is not ARM, we have nothing to do. */
9427 if (gdbarch_bfd_arch_info (target_gdbarch ())->arch != bfd_arch_arm)
9428 return;
9429
9430 /* Update the architecture. */
9431 gdbarch_info_init (&info);
9432
9433 if (!gdbarch_update_p (info))
9434 internal_error (__FILE__, __LINE__, _("could not update architecture"));
9435 }
9436
9437 static void
9438 set_fp_model_sfunc (char *args, int from_tty,
9439 struct cmd_list_element *c)
9440 {
9441 int fp_model;
9442
9443 for (fp_model = ARM_FLOAT_AUTO; fp_model != ARM_FLOAT_LAST; fp_model++)
9444 if (strcmp (current_fp_model, fp_model_strings[fp_model]) == 0)
9445 {
9446 arm_fp_model = (enum arm_float_model) fp_model;
9447 break;
9448 }
9449
9450 if (fp_model == ARM_FLOAT_LAST)
9451 internal_error (__FILE__, __LINE__, _("Invalid fp model accepted: %s."),
9452 current_fp_model);
9453
9454 arm_update_current_architecture ();
9455 }
9456
9457 static void
9458 show_fp_model (struct ui_file *file, int from_tty,
9459 struct cmd_list_element *c, const char *value)
9460 {
9461 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9462
9463 if (arm_fp_model == ARM_FLOAT_AUTO
9464 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9465 fprintf_filtered (file, _("\
9466 The current ARM floating point model is \"auto\" (currently \"%s\").\n"),
9467 fp_model_strings[tdep->fp_model]);
9468 else
9469 fprintf_filtered (file, _("\
9470 The current ARM floating point model is \"%s\".\n"),
9471 fp_model_strings[arm_fp_model]);
9472 }
9473
9474 static void
9475 arm_set_abi (char *args, int from_tty,
9476 struct cmd_list_element *c)
9477 {
9478 int arm_abi;
9479
9480 for (arm_abi = ARM_ABI_AUTO; arm_abi != ARM_ABI_LAST; arm_abi++)
9481 if (strcmp (arm_abi_string, arm_abi_strings[arm_abi]) == 0)
9482 {
9483 arm_abi_global = (enum arm_abi_kind) arm_abi;
9484 break;
9485 }
9486
9487 if (arm_abi == ARM_ABI_LAST)
9488 internal_error (__FILE__, __LINE__, _("Invalid ABI accepted: %s."),
9489 arm_abi_string);
9490
9491 arm_update_current_architecture ();
9492 }
9493
9494 static void
9495 arm_show_abi (struct ui_file *file, int from_tty,
9496 struct cmd_list_element *c, const char *value)
9497 {
9498 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9499
9500 if (arm_abi_global == ARM_ABI_AUTO
9501 && gdbarch_bfd_arch_info (target_gdbarch ())->arch == bfd_arch_arm)
9502 fprintf_filtered (file, _("\
9503 The current ARM ABI is \"auto\" (currently \"%s\").\n"),
9504 arm_abi_strings[tdep->arm_abi]);
9505 else
9506 fprintf_filtered (file, _("The current ARM ABI is \"%s\".\n"),
9507 arm_abi_string);
9508 }
9509
9510 static void
9511 arm_show_fallback_mode (struct ui_file *file, int from_tty,
9512 struct cmd_list_element *c, const char *value)
9513 {
9514 fprintf_filtered (file,
9515 _("The current execution mode assumed "
9516 "(when symbols are unavailable) is \"%s\".\n"),
9517 arm_fallback_mode_string);
9518 }
9519
9520 static void
9521 arm_show_force_mode (struct ui_file *file, int from_tty,
9522 struct cmd_list_element *c, const char *value)
9523 {
9524 struct gdbarch_tdep *tdep = gdbarch_tdep (target_gdbarch ());
9525
9526 fprintf_filtered (file,
9527 _("The current execution mode assumed "
9528 "(even when symbols are available) is \"%s\".\n"),
9529 arm_force_mode_string);
9530 }
9531
9532 /* If the user changes the register disassembly style used for info
9533 register and other commands, we have to also switch the style used
9534 in opcodes for disassembly output. This function is run in the "set
9535 arm disassembly" command, and does that. */
9536
9537 static void
9538 set_disassembly_style_sfunc (char *args, int from_tty,
9539 struct cmd_list_element *c)
9540 {
9541 set_disassembly_style ();
9542 }
9543 \f
9544 /* Return the ARM register name corresponding to register I. */
9545 static const char *
9546 arm_register_name (struct gdbarch *gdbarch, int i)
9547 {
9548 const int num_regs = gdbarch_num_regs (gdbarch);
9549
9550 if (gdbarch_tdep (gdbarch)->have_vfp_pseudos
9551 && i >= num_regs && i < num_regs + 32)
9552 {
9553 static const char *const vfp_pseudo_names[] = {
9554 "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7",
9555 "s8", "s9", "s10", "s11", "s12", "s13", "s14", "s15",
9556 "s16", "s17", "s18", "s19", "s20", "s21", "s22", "s23",
9557 "s24", "s25", "s26", "s27", "s28", "s29", "s30", "s31",
9558 };
9559
9560 return vfp_pseudo_names[i - num_regs];
9561 }
9562
9563 if (gdbarch_tdep (gdbarch)->have_neon_pseudos
9564 && i >= num_regs + 32 && i < num_regs + 32 + 16)
9565 {
9566 static const char *const neon_pseudo_names[] = {
9567 "q0", "q1", "q2", "q3", "q4", "q5", "q6", "q7",
9568 "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15",
9569 };
9570
9571 return neon_pseudo_names[i - num_regs - 32];
9572 }
9573
9574 if (i >= ARRAY_SIZE (arm_register_names))
9575 /* These registers are only supported on targets which supply
9576 an XML description. */
9577 return "";
9578
9579 return arm_register_names[i];
9580 }
9581
9582 static void
9583 set_disassembly_style (void)
9584 {
9585 int current;
9586
9587 /* Find the style that the user wants. */
9588 for (current = 0; current < num_disassembly_options; current++)
9589 if (disassembly_style == valid_disassembly_styles[current])
9590 break;
9591 gdb_assert (current < num_disassembly_options);
9592
9593 /* Synchronize the disassembler. */
9594 set_arm_regname_option (current);
9595 }
9596
9597 /* Test whether the coff symbol specific value corresponds to a Thumb
9598 function. */
9599
9600 static int
9601 coff_sym_is_thumb (int val)
9602 {
9603 return (val == C_THUMBEXT
9604 || val == C_THUMBSTAT
9605 || val == C_THUMBEXTFUNC
9606 || val == C_THUMBSTATFUNC
9607 || val == C_THUMBLABEL);
9608 }
9609
9610 /* arm_coff_make_msymbol_special()
9611 arm_elf_make_msymbol_special()
9612
9613 These functions test whether the COFF or ELF symbol corresponds to
9614 an address in thumb code, and set a "special" bit in a minimal
9615 symbol to indicate that it does. */
9616
9617 static void
9618 arm_elf_make_msymbol_special(asymbol *sym, struct minimal_symbol *msym)
9619 {
9620 if (ARM_SYM_BRANCH_TYPE (&((elf_symbol_type *)sym)->internal_elf_sym)
9621 == ST_BRANCH_TO_THUMB)
9622 MSYMBOL_SET_SPECIAL (msym);
9623 }
9624
9625 static void
9626 arm_coff_make_msymbol_special(int val, struct minimal_symbol *msym)
9627 {
9628 if (coff_sym_is_thumb (val))
9629 MSYMBOL_SET_SPECIAL (msym);
9630 }
9631
9632 static void
9633 arm_objfile_data_free (struct objfile *objfile, void *arg)
9634 {
9635 struct arm_per_objfile *data = (struct arm_per_objfile *) arg;
9636 unsigned int i;
9637
9638 for (i = 0; i < objfile->obfd->section_count; i++)
9639 VEC_free (arm_mapping_symbol_s, data->section_maps[i]);
9640 }
9641
9642 static void
9643 arm_record_special_symbol (struct gdbarch *gdbarch, struct objfile *objfile,
9644 asymbol *sym)
9645 {
9646 const char *name = bfd_asymbol_name (sym);
9647 struct arm_per_objfile *data;
9648 VEC(arm_mapping_symbol_s) **map_p;
9649 struct arm_mapping_symbol new_map_sym;
9650
9651 gdb_assert (name[0] == '$');
9652 if (name[1] != 'a' && name[1] != 't' && name[1] != 'd')
9653 return;
9654
9655 data = (struct arm_per_objfile *) objfile_data (objfile,
9656 arm_objfile_data_key);
9657 if (data == NULL)
9658 {
9659 data = OBSTACK_ZALLOC (&objfile->objfile_obstack,
9660 struct arm_per_objfile);
9661 set_objfile_data (objfile, arm_objfile_data_key, data);
9662 data->section_maps = OBSTACK_CALLOC (&objfile->objfile_obstack,
9663 objfile->obfd->section_count,
9664 VEC(arm_mapping_symbol_s) *);
9665 }
9666 map_p = &data->section_maps[bfd_get_section (sym)->index];
9667
9668 new_map_sym.value = sym->value;
9669 new_map_sym.type = name[1];
9670
9671 /* Assume that most mapping symbols appear in order of increasing
9672 value. If they were randomly distributed, it would be faster to
9673 always push here and then sort at first use. */
9674 if (!VEC_empty (arm_mapping_symbol_s, *map_p))
9675 {
9676 struct arm_mapping_symbol *prev_map_sym;
9677
9678 prev_map_sym = VEC_last (arm_mapping_symbol_s, *map_p);
9679 if (prev_map_sym->value >= sym->value)
9680 {
9681 unsigned int idx;
9682 idx = VEC_lower_bound (arm_mapping_symbol_s, *map_p, &new_map_sym,
9683 arm_compare_mapping_symbols);
9684 VEC_safe_insert (arm_mapping_symbol_s, *map_p, idx, &new_map_sym);
9685 return;
9686 }
9687 }
9688
9689 VEC_safe_push (arm_mapping_symbol_s, *map_p, &new_map_sym);
9690 }
9691
9692 static void
9693 arm_write_pc (struct regcache *regcache, CORE_ADDR pc)
9694 {
9695 struct gdbarch *gdbarch = get_regcache_arch (regcache);
9696 regcache_cooked_write_unsigned (regcache, ARM_PC_REGNUM, pc);
9697
9698 /* If necessary, set the T bit. */
9699 if (arm_apcs_32)
9700 {
9701 ULONGEST val, t_bit;
9702 regcache_cooked_read_unsigned (regcache, ARM_PS_REGNUM, &val);
9703 t_bit = arm_psr_thumb_bit (gdbarch);
9704 if (arm_pc_is_thumb (gdbarch, pc))
9705 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9706 val | t_bit);
9707 else
9708 regcache_cooked_write_unsigned (regcache, ARM_PS_REGNUM,
9709 val & ~t_bit);
9710 }
9711 }
9712
9713 /* Read the contents of a NEON quad register, by reading from two
9714 double registers. This is used to implement the quad pseudo
9715 registers, and for argument passing in case the quad registers are
9716 missing; vectors are passed in quad registers when using the VFP
9717 ABI, even if a NEON unit is not present. REGNUM is the index of
9718 the quad register, in [0, 15]. */
9719
9720 static enum register_status
9721 arm_neon_quad_read (struct gdbarch *gdbarch, struct regcache *regcache,
9722 int regnum, gdb_byte *buf)
9723 {
9724 char name_buf[4];
9725 gdb_byte reg_buf[8];
9726 int offset, double_regnum;
9727 enum register_status status;
9728
9729 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9730 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9731 strlen (name_buf));
9732
9733 /* d0 is always the least significant half of q0. */
9734 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9735 offset = 8;
9736 else
9737 offset = 0;
9738
9739 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9740 if (status != REG_VALID)
9741 return status;
9742 memcpy (buf + offset, reg_buf, 8);
9743
9744 offset = 8 - offset;
9745 status = regcache_raw_read (regcache, double_regnum + 1, reg_buf);
9746 if (status != REG_VALID)
9747 return status;
9748 memcpy (buf + offset, reg_buf, 8);
9749
9750 return REG_VALID;
9751 }
9752
9753 static enum register_status
9754 arm_pseudo_read (struct gdbarch *gdbarch, struct regcache *regcache,
9755 int regnum, gdb_byte *buf)
9756 {
9757 const int num_regs = gdbarch_num_regs (gdbarch);
9758 char name_buf[4];
9759 gdb_byte reg_buf[8];
9760 int offset, double_regnum;
9761
9762 gdb_assert (regnum >= num_regs);
9763 regnum -= num_regs;
9764
9765 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9766 /* Quad-precision register. */
9767 return arm_neon_quad_read (gdbarch, regcache, regnum - 32, buf);
9768 else
9769 {
9770 enum register_status status;
9771
9772 /* Single-precision register. */
9773 gdb_assert (regnum < 32);
9774
9775 /* s0 is always the least significant half of d0. */
9776 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9777 offset = (regnum & 1) ? 0 : 4;
9778 else
9779 offset = (regnum & 1) ? 4 : 0;
9780
9781 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9782 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9783 strlen (name_buf));
9784
9785 status = regcache_raw_read (regcache, double_regnum, reg_buf);
9786 if (status == REG_VALID)
9787 memcpy (buf, reg_buf + offset, 4);
9788 return status;
9789 }
9790 }
9791
9792 /* Store the contents of BUF to a NEON quad register, by writing to
9793 two double registers. This is used to implement the quad pseudo
9794 registers, and for argument passing in case the quad registers are
9795 missing; vectors are passed in quad registers when using the VFP
9796 ABI, even if a NEON unit is not present. REGNUM is the index
9797 of the quad register, in [0, 15]. */
9798
9799 static void
9800 arm_neon_quad_write (struct gdbarch *gdbarch, struct regcache *regcache,
9801 int regnum, const gdb_byte *buf)
9802 {
9803 char name_buf[4];
9804 int offset, double_regnum;
9805
9806 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum << 1);
9807 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9808 strlen (name_buf));
9809
9810 /* d0 is always the least significant half of q0. */
9811 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9812 offset = 8;
9813 else
9814 offset = 0;
9815
9816 regcache_raw_write (regcache, double_regnum, buf + offset);
9817 offset = 8 - offset;
9818 regcache_raw_write (regcache, double_regnum + 1, buf + offset);
9819 }
9820
9821 static void
9822 arm_pseudo_write (struct gdbarch *gdbarch, struct regcache *regcache,
9823 int regnum, const gdb_byte *buf)
9824 {
9825 const int num_regs = gdbarch_num_regs (gdbarch);
9826 char name_buf[4];
9827 gdb_byte reg_buf[8];
9828 int offset, double_regnum;
9829
9830 gdb_assert (regnum >= num_regs);
9831 regnum -= num_regs;
9832
9833 if (gdbarch_tdep (gdbarch)->have_neon_pseudos && regnum >= 32 && regnum < 48)
9834 /* Quad-precision register. */
9835 arm_neon_quad_write (gdbarch, regcache, regnum - 32, buf);
9836 else
9837 {
9838 /* Single-precision register. */
9839 gdb_assert (regnum < 32);
9840
9841 /* s0 is always the least significant half of d0. */
9842 if (gdbarch_byte_order (gdbarch) == BFD_ENDIAN_BIG)
9843 offset = (regnum & 1) ? 0 : 4;
9844 else
9845 offset = (regnum & 1) ? 4 : 0;
9846
9847 xsnprintf (name_buf, sizeof (name_buf), "d%d", regnum >> 1);
9848 double_regnum = user_reg_map_name_to_regnum (gdbarch, name_buf,
9849 strlen (name_buf));
9850
9851 regcache_raw_read (regcache, double_regnum, reg_buf);
9852 memcpy (reg_buf + offset, buf, 4);
9853 regcache_raw_write (regcache, double_regnum, reg_buf);
9854 }
9855 }
9856
9857 static struct value *
9858 value_of_arm_user_reg (struct frame_info *frame, const void *baton)
9859 {
9860 const int *reg_p = (const int *) baton;
9861 return value_of_register (*reg_p, frame);
9862 }
9863 \f
9864 static enum gdb_osabi
9865 arm_elf_osabi_sniffer (bfd *abfd)
9866 {
9867 unsigned int elfosabi;
9868 enum gdb_osabi osabi = GDB_OSABI_UNKNOWN;
9869
9870 elfosabi = elf_elfheader (abfd)->e_ident[EI_OSABI];
9871
9872 if (elfosabi == ELFOSABI_ARM)
9873 /* GNU tools use this value. Check note sections in this case,
9874 as well. */
9875 bfd_map_over_sections (abfd,
9876 generic_elf_osabi_sniff_abi_tag_sections,
9877 &osabi);
9878
9879 /* Anything else will be handled by the generic ELF sniffer. */
9880 return osabi;
9881 }
9882
9883 static int
9884 arm_register_reggroup_p (struct gdbarch *gdbarch, int regnum,
9885 struct reggroup *group)
9886 {
9887 /* FPS register's type is INT, but belongs to float_reggroup. Beside
9888 this, FPS register belongs to save_regroup, restore_reggroup, and
9889 all_reggroup, of course. */
9890 if (regnum == ARM_FPS_REGNUM)
9891 return (group == float_reggroup
9892 || group == save_reggroup
9893 || group == restore_reggroup
9894 || group == all_reggroup);
9895 else
9896 return default_register_reggroup_p (gdbarch, regnum, group);
9897 }
9898
9899 \f
9900 /* For backward-compatibility we allow two 'g' packet lengths with
9901 the remote protocol depending on whether FPA registers are
9902 supplied. M-profile targets do not have FPA registers, but some
9903 stubs already exist in the wild which use a 'g' packet which
9904 supplies them albeit with dummy values. The packet format which
9905 includes FPA registers should be considered deprecated for
9906 M-profile targets. */
9907
9908 static void
9909 arm_register_g_packet_guesses (struct gdbarch *gdbarch)
9910 {
9911 if (gdbarch_tdep (gdbarch)->is_m)
9912 {
9913 /* If we know from the executable this is an M-profile target,
9914 cater for remote targets whose register set layout is the
9915 same as the FPA layout. */
9916 register_remote_g_packet_guess (gdbarch,
9917 /* r0-r12,sp,lr,pc; f0-f7; fps,xpsr */
9918 (16 * INT_REGISTER_SIZE)
9919 + (8 * FP_REGISTER_SIZE)
9920 + (2 * INT_REGISTER_SIZE),
9921 tdesc_arm_with_m_fpa_layout);
9922
9923 /* The regular M-profile layout. */
9924 register_remote_g_packet_guess (gdbarch,
9925 /* r0-r12,sp,lr,pc; xpsr */
9926 (16 * INT_REGISTER_SIZE)
9927 + INT_REGISTER_SIZE,
9928 tdesc_arm_with_m);
9929
9930 /* M-profile plus M4F VFP. */
9931 register_remote_g_packet_guess (gdbarch,
9932 /* r0-r12,sp,lr,pc; d0-d15; fpscr,xpsr */
9933 (16 * INT_REGISTER_SIZE)
9934 + (16 * VFP_REGISTER_SIZE)
9935 + (2 * INT_REGISTER_SIZE),
9936 tdesc_arm_with_m_vfp_d16);
9937 }
9938
9939 /* Otherwise we don't have a useful guess. */
9940 }
9941
9942 \f
9943 /* Initialize the current architecture based on INFO. If possible,
9944 re-use an architecture from ARCHES, which is a list of
9945 architectures already created during this debugging session.
9946
9947 Called e.g. at program startup, when reading a core file, and when
9948 reading a binary file. */
9949
9950 static struct gdbarch *
9951 arm_gdbarch_init (struct gdbarch_info info, struct gdbarch_list *arches)
9952 {
9953 struct gdbarch_tdep *tdep;
9954 struct gdbarch *gdbarch;
9955 struct gdbarch_list *best_arch;
9956 enum arm_abi_kind arm_abi = arm_abi_global;
9957 enum arm_float_model fp_model = arm_fp_model;
9958 struct tdesc_arch_data *tdesc_data = NULL;
9959 int i, is_m = 0;
9960 int vfp_register_count = 0, have_vfp_pseudos = 0, have_neon_pseudos = 0;
9961 int have_wmmx_registers = 0;
9962 int have_neon = 0;
9963 int have_fpa_registers = 1;
9964 const struct target_desc *tdesc = info.target_desc;
9965
9966 /* If we have an object to base this architecture on, try to determine
9967 its ABI. */
9968
9969 if (arm_abi == ARM_ABI_AUTO && info.abfd != NULL)
9970 {
9971 int ei_osabi, e_flags;
9972
9973 switch (bfd_get_flavour (info.abfd))
9974 {
9975 case bfd_target_aout_flavour:
9976 /* Assume it's an old APCS-style ABI. */
9977 arm_abi = ARM_ABI_APCS;
9978 break;
9979
9980 case bfd_target_coff_flavour:
9981 /* Assume it's an old APCS-style ABI. */
9982 /* XXX WinCE? */
9983 arm_abi = ARM_ABI_APCS;
9984 break;
9985
9986 case bfd_target_elf_flavour:
9987 ei_osabi = elf_elfheader (info.abfd)->e_ident[EI_OSABI];
9988 e_flags = elf_elfheader (info.abfd)->e_flags;
9989
9990 if (ei_osabi == ELFOSABI_ARM)
9991 {
9992 /* GNU tools used to use this value, but do not for EABI
9993 objects. There's nowhere to tag an EABI version
9994 anyway, so assume APCS. */
9995 arm_abi = ARM_ABI_APCS;
9996 }
9997 else if (ei_osabi == ELFOSABI_NONE || ei_osabi == ELFOSABI_GNU)
9998 {
9999 int eabi_ver = EF_ARM_EABI_VERSION (e_flags);
10000 int attr_arch, attr_profile;
10001
10002 switch (eabi_ver)
10003 {
10004 case EF_ARM_EABI_UNKNOWN:
10005 /* Assume GNU tools. */
10006 arm_abi = ARM_ABI_APCS;
10007 break;
10008
10009 case EF_ARM_EABI_VER4:
10010 case EF_ARM_EABI_VER5:
10011 arm_abi = ARM_ABI_AAPCS;
10012 /* EABI binaries default to VFP float ordering.
10013 They may also contain build attributes that can
10014 be used to identify if the VFP argument-passing
10015 ABI is in use. */
10016 if (fp_model == ARM_FLOAT_AUTO)
10017 {
10018 #ifdef HAVE_ELF
10019 switch (bfd_elf_get_obj_attr_int (info.abfd,
10020 OBJ_ATTR_PROC,
10021 Tag_ABI_VFP_args))
10022 {
10023 case AEABI_VFP_args_base:
10024 /* "The user intended FP parameter/result
10025 passing to conform to AAPCS, base
10026 variant". */
10027 fp_model = ARM_FLOAT_SOFT_VFP;
10028 break;
10029 case AEABI_VFP_args_vfp:
10030 /* "The user intended FP parameter/result
10031 passing to conform to AAPCS, VFP
10032 variant". */
10033 fp_model = ARM_FLOAT_VFP;
10034 break;
10035 case AEABI_VFP_args_toolchain:
10036 /* "The user intended FP parameter/result
10037 passing to conform to tool chain-specific
10038 conventions" - we don't know any such
10039 conventions, so leave it as "auto". */
10040 break;
10041 case AEABI_VFP_args_compatible:
10042 /* "Code is compatible with both the base
10043 and VFP variants; the user did not permit
10044 non-variadic functions to pass FP
10045 parameters/results" - leave it as
10046 "auto". */
10047 break;
10048 default:
10049 /* Attribute value not mentioned in the
10050 November 2012 ABI, so leave it as
10051 "auto". */
10052 break;
10053 }
10054 #else
10055 fp_model = ARM_FLOAT_SOFT_VFP;
10056 #endif
10057 }
10058 break;
10059
10060 default:
10061 /* Leave it as "auto". */
10062 warning (_("unknown ARM EABI version 0x%x"), eabi_ver);
10063 break;
10064 }
10065
10066 #ifdef HAVE_ELF
10067 /* Detect M-profile programs. This only works if the
10068 executable file includes build attributes; GCC does
10069 copy them to the executable, but e.g. RealView does
10070 not. */
10071 attr_arch = bfd_elf_get_obj_attr_int (info.abfd, OBJ_ATTR_PROC,
10072 Tag_CPU_arch);
10073 attr_profile = bfd_elf_get_obj_attr_int (info.abfd,
10074 OBJ_ATTR_PROC,
10075 Tag_CPU_arch_profile);
10076 /* GCC specifies the profile for v6-M; RealView only
10077 specifies the profile for architectures starting with
10078 V7 (as opposed to architectures with a tag
10079 numerically greater than TAG_CPU_ARCH_V7). */
10080 if (!tdesc_has_registers (tdesc)
10081 && (attr_arch == TAG_CPU_ARCH_V6_M
10082 || attr_arch == TAG_CPU_ARCH_V6S_M
10083 || attr_profile == 'M'))
10084 is_m = 1;
10085 #endif
10086 }
10087
10088 if (fp_model == ARM_FLOAT_AUTO)
10089 {
10090 int e_flags = elf_elfheader (info.abfd)->e_flags;
10091
10092 switch (e_flags & (EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT))
10093 {
10094 case 0:
10095 /* Leave it as "auto". Strictly speaking this case
10096 means FPA, but almost nobody uses that now, and
10097 many toolchains fail to set the appropriate bits
10098 for the floating-point model they use. */
10099 break;
10100 case EF_ARM_SOFT_FLOAT:
10101 fp_model = ARM_FLOAT_SOFT_FPA;
10102 break;
10103 case EF_ARM_VFP_FLOAT:
10104 fp_model = ARM_FLOAT_VFP;
10105 break;
10106 case EF_ARM_SOFT_FLOAT | EF_ARM_VFP_FLOAT:
10107 fp_model = ARM_FLOAT_SOFT_VFP;
10108 break;
10109 }
10110 }
10111
10112 if (e_flags & EF_ARM_BE8)
10113 info.byte_order_for_code = BFD_ENDIAN_LITTLE;
10114
10115 break;
10116
10117 default:
10118 /* Leave it as "auto". */
10119 break;
10120 }
10121 }
10122
10123 /* Check any target description for validity. */
10124 if (tdesc_has_registers (tdesc))
10125 {
10126 /* For most registers we require GDB's default names; but also allow
10127 the numeric names for sp / lr / pc, as a convenience. */
10128 static const char *const arm_sp_names[] = { "r13", "sp", NULL };
10129 static const char *const arm_lr_names[] = { "r14", "lr", NULL };
10130 static const char *const arm_pc_names[] = { "r15", "pc", NULL };
10131
10132 const struct tdesc_feature *feature;
10133 int valid_p;
10134
10135 feature = tdesc_find_feature (tdesc,
10136 "org.gnu.gdb.arm.core");
10137 if (feature == NULL)
10138 {
10139 feature = tdesc_find_feature (tdesc,
10140 "org.gnu.gdb.arm.m-profile");
10141 if (feature == NULL)
10142 return NULL;
10143 else
10144 is_m = 1;
10145 }
10146
10147 tdesc_data = tdesc_data_alloc ();
10148
10149 valid_p = 1;
10150 for (i = 0; i < ARM_SP_REGNUM; i++)
10151 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10152 arm_register_names[i]);
10153 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10154 ARM_SP_REGNUM,
10155 arm_sp_names);
10156 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10157 ARM_LR_REGNUM,
10158 arm_lr_names);
10159 valid_p &= tdesc_numbered_register_choices (feature, tdesc_data,
10160 ARM_PC_REGNUM,
10161 arm_pc_names);
10162 if (is_m)
10163 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10164 ARM_PS_REGNUM, "xpsr");
10165 else
10166 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10167 ARM_PS_REGNUM, "cpsr");
10168
10169 if (!valid_p)
10170 {
10171 tdesc_data_cleanup (tdesc_data);
10172 return NULL;
10173 }
10174
10175 feature = tdesc_find_feature (tdesc,
10176 "org.gnu.gdb.arm.fpa");
10177 if (feature != NULL)
10178 {
10179 valid_p = 1;
10180 for (i = ARM_F0_REGNUM; i <= ARM_FPS_REGNUM; i++)
10181 valid_p &= tdesc_numbered_register (feature, tdesc_data, i,
10182 arm_register_names[i]);
10183 if (!valid_p)
10184 {
10185 tdesc_data_cleanup (tdesc_data);
10186 return NULL;
10187 }
10188 }
10189 else
10190 have_fpa_registers = 0;
10191
10192 feature = tdesc_find_feature (tdesc,
10193 "org.gnu.gdb.xscale.iwmmxt");
10194 if (feature != NULL)
10195 {
10196 static const char *const iwmmxt_names[] = {
10197 "wR0", "wR1", "wR2", "wR3", "wR4", "wR5", "wR6", "wR7",
10198 "wR8", "wR9", "wR10", "wR11", "wR12", "wR13", "wR14", "wR15",
10199 "wCID", "wCon", "wCSSF", "wCASF", "", "", "", "",
10200 "wCGR0", "wCGR1", "wCGR2", "wCGR3", "", "", "", "",
10201 };
10202
10203 valid_p = 1;
10204 for (i = ARM_WR0_REGNUM; i <= ARM_WR15_REGNUM; i++)
10205 valid_p
10206 &= tdesc_numbered_register (feature, tdesc_data, i,
10207 iwmmxt_names[i - ARM_WR0_REGNUM]);
10208
10209 /* Check for the control registers, but do not fail if they
10210 are missing. */
10211 for (i = ARM_WC0_REGNUM; i <= ARM_WCASF_REGNUM; i++)
10212 tdesc_numbered_register (feature, tdesc_data, i,
10213 iwmmxt_names[i - ARM_WR0_REGNUM]);
10214
10215 for (i = ARM_WCGR0_REGNUM; i <= ARM_WCGR3_REGNUM; i++)
10216 valid_p
10217 &= tdesc_numbered_register (feature, tdesc_data, i,
10218 iwmmxt_names[i - ARM_WR0_REGNUM]);
10219
10220 if (!valid_p)
10221 {
10222 tdesc_data_cleanup (tdesc_data);
10223 return NULL;
10224 }
10225
10226 have_wmmx_registers = 1;
10227 }
10228
10229 /* If we have a VFP unit, check whether the single precision registers
10230 are present. If not, then we will synthesize them as pseudo
10231 registers. */
10232 feature = tdesc_find_feature (tdesc,
10233 "org.gnu.gdb.arm.vfp");
10234 if (feature != NULL)
10235 {
10236 static const char *const vfp_double_names[] = {
10237 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
10238 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
10239 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
10240 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
10241 };
10242
10243 /* Require the double precision registers. There must be either
10244 16 or 32. */
10245 valid_p = 1;
10246 for (i = 0; i < 32; i++)
10247 {
10248 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10249 ARM_D0_REGNUM + i,
10250 vfp_double_names[i]);
10251 if (!valid_p)
10252 break;
10253 }
10254 if (!valid_p && i == 16)
10255 valid_p = 1;
10256
10257 /* Also require FPSCR. */
10258 valid_p &= tdesc_numbered_register (feature, tdesc_data,
10259 ARM_FPSCR_REGNUM, "fpscr");
10260 if (!valid_p)
10261 {
10262 tdesc_data_cleanup (tdesc_data);
10263 return NULL;
10264 }
10265
10266 if (tdesc_unnumbered_register (feature, "s0") == 0)
10267 have_vfp_pseudos = 1;
10268
10269 vfp_register_count = i;
10270
10271 /* If we have VFP, also check for NEON. The architecture allows
10272 NEON without VFP (integer vector operations only), but GDB
10273 does not support that. */
10274 feature = tdesc_find_feature (tdesc,
10275 "org.gnu.gdb.arm.neon");
10276 if (feature != NULL)
10277 {
10278 /* NEON requires 32 double-precision registers. */
10279 if (i != 32)
10280 {
10281 tdesc_data_cleanup (tdesc_data);
10282 return NULL;
10283 }
10284
10285 /* If there are quad registers defined by the stub, use
10286 their type; otherwise (normally) provide them with
10287 the default type. */
10288 if (tdesc_unnumbered_register (feature, "q0") == 0)
10289 have_neon_pseudos = 1;
10290
10291 have_neon = 1;
10292 }
10293 }
10294 }
10295
10296 /* If there is already a candidate, use it. */
10297 for (best_arch = gdbarch_list_lookup_by_info (arches, &info);
10298 best_arch != NULL;
10299 best_arch = gdbarch_list_lookup_by_info (best_arch->next, &info))
10300 {
10301 if (arm_abi != ARM_ABI_AUTO
10302 && arm_abi != gdbarch_tdep (best_arch->gdbarch)->arm_abi)
10303 continue;
10304
10305 if (fp_model != ARM_FLOAT_AUTO
10306 && fp_model != gdbarch_tdep (best_arch->gdbarch)->fp_model)
10307 continue;
10308
10309 /* There are various other properties in tdep that we do not
10310 need to check here: those derived from a target description,
10311 since gdbarches with a different target description are
10312 automatically disqualified. */
10313
10314 /* Do check is_m, though, since it might come from the binary. */
10315 if (is_m != gdbarch_tdep (best_arch->gdbarch)->is_m)
10316 continue;
10317
10318 /* Found a match. */
10319 break;
10320 }
10321
10322 if (best_arch != NULL)
10323 {
10324 if (tdesc_data != NULL)
10325 tdesc_data_cleanup (tdesc_data);
10326 return best_arch->gdbarch;
10327 }
10328
10329 tdep = XCNEW (struct gdbarch_tdep);
10330 gdbarch = gdbarch_alloc (&info, tdep);
10331
10332 /* Record additional information about the architecture we are defining.
10333 These are gdbarch discriminators, like the OSABI. */
10334 tdep->arm_abi = arm_abi;
10335 tdep->fp_model = fp_model;
10336 tdep->is_m = is_m;
10337 tdep->have_fpa_registers = have_fpa_registers;
10338 tdep->have_wmmx_registers = have_wmmx_registers;
10339 gdb_assert (vfp_register_count == 0
10340 || vfp_register_count == 16
10341 || vfp_register_count == 32);
10342 tdep->vfp_register_count = vfp_register_count;
10343 tdep->have_vfp_pseudos = have_vfp_pseudos;
10344 tdep->have_neon_pseudos = have_neon_pseudos;
10345 tdep->have_neon = have_neon;
10346
10347 arm_register_g_packet_guesses (gdbarch);
10348
10349 /* Breakpoints. */
10350 switch (info.byte_order_for_code)
10351 {
10352 case BFD_ENDIAN_BIG:
10353 tdep->arm_breakpoint = arm_default_arm_be_breakpoint;
10354 tdep->arm_breakpoint_size = sizeof (arm_default_arm_be_breakpoint);
10355 tdep->thumb_breakpoint = arm_default_thumb_be_breakpoint;
10356 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_be_breakpoint);
10357
10358 break;
10359
10360 case BFD_ENDIAN_LITTLE:
10361 tdep->arm_breakpoint = arm_default_arm_le_breakpoint;
10362 tdep->arm_breakpoint_size = sizeof (arm_default_arm_le_breakpoint);
10363 tdep->thumb_breakpoint = arm_default_thumb_le_breakpoint;
10364 tdep->thumb_breakpoint_size = sizeof (arm_default_thumb_le_breakpoint);
10365
10366 break;
10367
10368 default:
10369 internal_error (__FILE__, __LINE__,
10370 _("arm_gdbarch_init: bad byte order for float format"));
10371 }
10372
10373 /* On ARM targets char defaults to unsigned. */
10374 set_gdbarch_char_signed (gdbarch, 0);
10375
10376 /* Note: for displaced stepping, this includes the breakpoint, and one word
10377 of additional scratch space. This setting isn't used for anything beside
10378 displaced stepping at present. */
10379 set_gdbarch_max_insn_length (gdbarch, 4 * DISPLACED_MODIFIED_INSNS);
10380
10381 /* This should be low enough for everything. */
10382 tdep->lowest_pc = 0x20;
10383 tdep->jb_pc = -1; /* Longjump support not enabled by default. */
10384
10385 /* The default, for both APCS and AAPCS, is to return small
10386 structures in registers. */
10387 tdep->struct_return = reg_struct_return;
10388
10389 set_gdbarch_push_dummy_call (gdbarch, arm_push_dummy_call);
10390 set_gdbarch_frame_align (gdbarch, arm_frame_align);
10391
10392 set_gdbarch_write_pc (gdbarch, arm_write_pc);
10393
10394 /* Frame handling. */
10395 set_gdbarch_dummy_id (gdbarch, arm_dummy_id);
10396 set_gdbarch_unwind_pc (gdbarch, arm_unwind_pc);
10397 set_gdbarch_unwind_sp (gdbarch, arm_unwind_sp);
10398
10399 frame_base_set_default (gdbarch, &arm_normal_base);
10400
10401 /* Address manipulation. */
10402 set_gdbarch_addr_bits_remove (gdbarch, arm_addr_bits_remove);
10403
10404 /* Advance PC across function entry code. */
10405 set_gdbarch_skip_prologue (gdbarch, arm_skip_prologue);
10406
10407 /* Detect whether PC is at a point where the stack has been destroyed. */
10408 set_gdbarch_stack_frame_destroyed_p (gdbarch, arm_stack_frame_destroyed_p);
10409
10410 /* Skip trampolines. */
10411 set_gdbarch_skip_trampoline_code (gdbarch, arm_skip_stub);
10412
10413 /* The stack grows downward. */
10414 set_gdbarch_inner_than (gdbarch, core_addr_lessthan);
10415
10416 /* Breakpoint manipulation. */
10417 set_gdbarch_breakpoint_from_pc (gdbarch, arm_breakpoint_from_pc);
10418 set_gdbarch_remote_breakpoint_from_pc (gdbarch,
10419 arm_remote_breakpoint_from_pc);
10420
10421 /* Information about registers, etc. */
10422 set_gdbarch_sp_regnum (gdbarch, ARM_SP_REGNUM);
10423 set_gdbarch_pc_regnum (gdbarch, ARM_PC_REGNUM);
10424 set_gdbarch_num_regs (gdbarch, ARM_NUM_REGS);
10425 set_gdbarch_register_type (gdbarch, arm_register_type);
10426 set_gdbarch_register_reggroup_p (gdbarch, arm_register_reggroup_p);
10427
10428 /* This "info float" is FPA-specific. Use the generic version if we
10429 do not have FPA. */
10430 if (gdbarch_tdep (gdbarch)->have_fpa_registers)
10431 set_gdbarch_print_float_info (gdbarch, arm_print_float_info);
10432
10433 /* Internal <-> external register number maps. */
10434 set_gdbarch_dwarf2_reg_to_regnum (gdbarch, arm_dwarf_reg_to_regnum);
10435 set_gdbarch_register_sim_regno (gdbarch, arm_register_sim_regno);
10436
10437 set_gdbarch_register_name (gdbarch, arm_register_name);
10438
10439 /* Returning results. */
10440 set_gdbarch_return_value (gdbarch, arm_return_value);
10441
10442 /* Disassembly. */
10443 set_gdbarch_print_insn (gdbarch, gdb_print_insn_arm);
10444
10445 /* Minsymbol frobbing. */
10446 set_gdbarch_elf_make_msymbol_special (gdbarch, arm_elf_make_msymbol_special);
10447 set_gdbarch_coff_make_msymbol_special (gdbarch,
10448 arm_coff_make_msymbol_special);
10449 set_gdbarch_record_special_symbol (gdbarch, arm_record_special_symbol);
10450
10451 /* Thumb-2 IT block support. */
10452 set_gdbarch_adjust_breakpoint_address (gdbarch,
10453 arm_adjust_breakpoint_address);
10454
10455 /* Virtual tables. */
10456 set_gdbarch_vbit_in_delta (gdbarch, 1);
10457
10458 /* Hook in the ABI-specific overrides, if they have been registered. */
10459 gdbarch_init_osabi (info, gdbarch);
10460
10461 dwarf2_frame_set_init_reg (gdbarch, arm_dwarf2_frame_init_reg);
10462
10463 /* Add some default predicates. */
10464 if (is_m)
10465 frame_unwind_append_unwinder (gdbarch, &arm_m_exception_unwind);
10466 frame_unwind_append_unwinder (gdbarch, &arm_stub_unwind);
10467 dwarf2_append_unwinders (gdbarch);
10468 frame_unwind_append_unwinder (gdbarch, &arm_exidx_unwind);
10469 frame_unwind_append_unwinder (gdbarch, &arm_prologue_unwind);
10470
10471 /* Now we have tuned the configuration, set a few final things,
10472 based on what the OS ABI has told us. */
10473
10474 /* If the ABI is not otherwise marked, assume the old GNU APCS. EABI
10475 binaries are always marked. */
10476 if (tdep->arm_abi == ARM_ABI_AUTO)
10477 tdep->arm_abi = ARM_ABI_APCS;
10478
10479 /* Watchpoints are not steppable. */
10480 set_gdbarch_have_nonsteppable_watchpoint (gdbarch, 1);
10481
10482 /* We used to default to FPA for generic ARM, but almost nobody
10483 uses that now, and we now provide a way for the user to force
10484 the model. So default to the most useful variant. */
10485 if (tdep->fp_model == ARM_FLOAT_AUTO)
10486 tdep->fp_model = ARM_FLOAT_SOFT_FPA;
10487
10488 if (tdep->jb_pc >= 0)
10489 set_gdbarch_get_longjmp_target (gdbarch, arm_get_longjmp_target);
10490
10491 /* Floating point sizes and format. */
10492 set_gdbarch_float_format (gdbarch, floatformats_ieee_single);
10493 if (tdep->fp_model == ARM_FLOAT_SOFT_FPA || tdep->fp_model == ARM_FLOAT_FPA)
10494 {
10495 set_gdbarch_double_format
10496 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10497 set_gdbarch_long_double_format
10498 (gdbarch, floatformats_ieee_double_littlebyte_bigword);
10499 }
10500 else
10501 {
10502 set_gdbarch_double_format (gdbarch, floatformats_ieee_double);
10503 set_gdbarch_long_double_format (gdbarch, floatformats_ieee_double);
10504 }
10505
10506 if (have_vfp_pseudos)
10507 {
10508 /* NOTE: These are the only pseudo registers used by
10509 the ARM target at the moment. If more are added, a
10510 little more care in numbering will be needed. */
10511
10512 int num_pseudos = 32;
10513 if (have_neon_pseudos)
10514 num_pseudos += 16;
10515 set_gdbarch_num_pseudo_regs (gdbarch, num_pseudos);
10516 set_gdbarch_pseudo_register_read (gdbarch, arm_pseudo_read);
10517 set_gdbarch_pseudo_register_write (gdbarch, arm_pseudo_write);
10518 }
10519
10520 if (tdesc_data)
10521 {
10522 set_tdesc_pseudo_register_name (gdbarch, arm_register_name);
10523
10524 tdesc_use_registers (gdbarch, tdesc, tdesc_data);
10525
10526 /* Override tdesc_register_type to adjust the types of VFP
10527 registers for NEON. */
10528 set_gdbarch_register_type (gdbarch, arm_register_type);
10529 }
10530
10531 /* Add standard register aliases. We add aliases even for those
10532 nanes which are used by the current architecture - it's simpler,
10533 and does no harm, since nothing ever lists user registers. */
10534 for (i = 0; i < ARRAY_SIZE (arm_register_aliases); i++)
10535 user_reg_add (gdbarch, arm_register_aliases[i].name,
10536 value_of_arm_user_reg, &arm_register_aliases[i].regnum);
10537
10538 return gdbarch;
10539 }
10540
10541 static void
10542 arm_dump_tdep (struct gdbarch *gdbarch, struct ui_file *file)
10543 {
10544 struct gdbarch_tdep *tdep = gdbarch_tdep (gdbarch);
10545
10546 if (tdep == NULL)
10547 return;
10548
10549 fprintf_unfiltered (file, _("arm_dump_tdep: Lowest pc = 0x%lx"),
10550 (unsigned long) tdep->lowest_pc);
10551 }
10552
10553 extern initialize_file_ftype _initialize_arm_tdep; /* -Wmissing-prototypes */
10554
10555 void
10556 _initialize_arm_tdep (void)
10557 {
10558 struct ui_file *stb;
10559 long length;
10560 struct cmd_list_element *new_set, *new_show;
10561 const char *setname;
10562 const char *setdesc;
10563 const char *const *regnames;
10564 int numregs, i, j;
10565 static char *helptext;
10566 char regdesc[1024], *rdptr = regdesc;
10567 size_t rest = sizeof (regdesc);
10568
10569 gdbarch_register (bfd_arch_arm, arm_gdbarch_init, arm_dump_tdep);
10570
10571 arm_objfile_data_key
10572 = register_objfile_data_with_cleanup (NULL, arm_objfile_data_free);
10573
10574 /* Add ourselves to objfile event chain. */
10575 observer_attach_new_objfile (arm_exidx_new_objfile);
10576 arm_exidx_data_key
10577 = register_objfile_data_with_cleanup (NULL, arm_exidx_data_free);
10578
10579 /* Register an ELF OS ABI sniffer for ARM binaries. */
10580 gdbarch_register_osabi_sniffer (bfd_arch_arm,
10581 bfd_target_elf_flavour,
10582 arm_elf_osabi_sniffer);
10583
10584 /* Initialize the standard target descriptions. */
10585 initialize_tdesc_arm_with_m ();
10586 initialize_tdesc_arm_with_m_fpa_layout ();
10587 initialize_tdesc_arm_with_m_vfp_d16 ();
10588 initialize_tdesc_arm_with_iwmmxt ();
10589 initialize_tdesc_arm_with_vfpv2 ();
10590 initialize_tdesc_arm_with_vfpv3 ();
10591 initialize_tdesc_arm_with_neon ();
10592
10593 /* Get the number of possible sets of register names defined in opcodes. */
10594 num_disassembly_options = get_arm_regname_num_options ();
10595
10596 /* Add root prefix command for all "set arm"/"show arm" commands. */
10597 add_prefix_cmd ("arm", no_class, set_arm_command,
10598 _("Various ARM-specific commands."),
10599 &setarmcmdlist, "set arm ", 0, &setlist);
10600
10601 add_prefix_cmd ("arm", no_class, show_arm_command,
10602 _("Various ARM-specific commands."),
10603 &showarmcmdlist, "show arm ", 0, &showlist);
10604
10605 /* Sync the opcode insn printer with our register viewer. */
10606 parse_arm_disassembler_option ("reg-names-std");
10607
10608 /* Initialize the array that will be passed to
10609 add_setshow_enum_cmd(). */
10610 valid_disassembly_styles = XNEWVEC (const char *,
10611 num_disassembly_options + 1);
10612 for (i = 0; i < num_disassembly_options; i++)
10613 {
10614 numregs = get_arm_regnames (i, &setname, &setdesc, &regnames);
10615 valid_disassembly_styles[i] = setname;
10616 length = snprintf (rdptr, rest, "%s - %s\n", setname, setdesc);
10617 rdptr += length;
10618 rest -= length;
10619 /* When we find the default names, tell the disassembler to use
10620 them. */
10621 if (!strcmp (setname, "std"))
10622 {
10623 disassembly_style = setname;
10624 set_arm_regname_option (i);
10625 }
10626 }
10627 /* Mark the end of valid options. */
10628 valid_disassembly_styles[num_disassembly_options] = NULL;
10629
10630 /* Create the help text. */
10631 stb = mem_fileopen ();
10632 fprintf_unfiltered (stb, "%s%s%s",
10633 _("The valid values are:\n"),
10634 regdesc,
10635 _("The default is \"std\"."));
10636 helptext = ui_file_xstrdup (stb, NULL);
10637 ui_file_delete (stb);
10638
10639 add_setshow_enum_cmd("disassembler", no_class,
10640 valid_disassembly_styles, &disassembly_style,
10641 _("Set the disassembly style."),
10642 _("Show the disassembly style."),
10643 helptext,
10644 set_disassembly_style_sfunc,
10645 NULL, /* FIXME: i18n: The disassembly style is
10646 \"%s\". */
10647 &setarmcmdlist, &showarmcmdlist);
10648
10649 add_setshow_boolean_cmd ("apcs32", no_class, &arm_apcs_32,
10650 _("Set usage of ARM 32-bit mode."),
10651 _("Show usage of ARM 32-bit mode."),
10652 _("When off, a 26-bit PC will be used."),
10653 NULL,
10654 NULL, /* FIXME: i18n: Usage of ARM 32-bit
10655 mode is %s. */
10656 &setarmcmdlist, &showarmcmdlist);
10657
10658 /* Add a command to allow the user to force the FPU model. */
10659 add_setshow_enum_cmd ("fpu", no_class, fp_model_strings, &current_fp_model,
10660 _("Set the floating point type."),
10661 _("Show the floating point type."),
10662 _("auto - Determine the FP typefrom the OS-ABI.\n\
10663 softfpa - Software FP, mixed-endian doubles on little-endian ARMs.\n\
10664 fpa - FPA co-processor (GCC compiled).\n\
10665 softvfp - Software FP with pure-endian doubles.\n\
10666 vfp - VFP co-processor."),
10667 set_fp_model_sfunc, show_fp_model,
10668 &setarmcmdlist, &showarmcmdlist);
10669
10670 /* Add a command to allow the user to force the ABI. */
10671 add_setshow_enum_cmd ("abi", class_support, arm_abi_strings, &arm_abi_string,
10672 _("Set the ABI."),
10673 _("Show the ABI."),
10674 NULL, arm_set_abi, arm_show_abi,
10675 &setarmcmdlist, &showarmcmdlist);
10676
10677 /* Add two commands to allow the user to force the assumed
10678 execution mode. */
10679 add_setshow_enum_cmd ("fallback-mode", class_support,
10680 arm_mode_strings, &arm_fallback_mode_string,
10681 _("Set the mode assumed when symbols are unavailable."),
10682 _("Show the mode assumed when symbols are unavailable."),
10683 NULL, NULL, arm_show_fallback_mode,
10684 &setarmcmdlist, &showarmcmdlist);
10685 add_setshow_enum_cmd ("force-mode", class_support,
10686 arm_mode_strings, &arm_force_mode_string,
10687 _("Set the mode assumed even when symbols are available."),
10688 _("Show the mode assumed even when symbols are available."),
10689 NULL, NULL, arm_show_force_mode,
10690 &setarmcmdlist, &showarmcmdlist);
10691
10692 /* Debugging flag. */
10693 add_setshow_boolean_cmd ("arm", class_maintenance, &arm_debug,
10694 _("Set ARM debugging."),
10695 _("Show ARM debugging."),
10696 _("When on, arm-specific debugging is enabled."),
10697 NULL,
10698 NULL, /* FIXME: i18n: "ARM debugging is %s. */
10699 &setdebuglist, &showdebuglist);
10700 }
10701
10702 /* ARM-reversible process record data structures. */
10703
10704 #define ARM_INSN_SIZE_BYTES 4
10705 #define THUMB_INSN_SIZE_BYTES 2
10706 #define THUMB2_INSN_SIZE_BYTES 4
10707
10708
10709 /* Position of the bit within a 32-bit ARM instruction
10710 that defines whether the instruction is a load or store. */
10711 #define INSN_S_L_BIT_NUM 20
10712
10713 #define REG_ALLOC(REGS, LENGTH, RECORD_BUF) \
10714 do \
10715 { \
10716 unsigned int reg_len = LENGTH; \
10717 if (reg_len) \
10718 { \
10719 REGS = XNEWVEC (uint32_t, reg_len); \
10720 memcpy(&REGS[0], &RECORD_BUF[0], sizeof(uint32_t)*LENGTH); \
10721 } \
10722 } \
10723 while (0)
10724
10725 #define MEM_ALLOC(MEMS, LENGTH, RECORD_BUF) \
10726 do \
10727 { \
10728 unsigned int mem_len = LENGTH; \
10729 if (mem_len) \
10730 { \
10731 MEMS = XNEWVEC (struct arm_mem_r, mem_len); \
10732 memcpy(&MEMS->len, &RECORD_BUF[0], \
10733 sizeof(struct arm_mem_r) * LENGTH); \
10734 } \
10735 } \
10736 while (0)
10737
10738 /* Checks whether insn is already recorded or yet to be decoded. (boolean expression). */
10739 #define INSN_RECORDED(ARM_RECORD) \
10740 (0 != (ARM_RECORD)->reg_rec_count || 0 != (ARM_RECORD)->mem_rec_count)
10741
10742 /* ARM memory record structure. */
10743 struct arm_mem_r
10744 {
10745 uint32_t len; /* Record length. */
10746 uint32_t addr; /* Memory address. */
10747 };
10748
10749 /* ARM instruction record contains opcode of current insn
10750 and execution state (before entry to decode_insn()),
10751 contains list of to-be-modified registers and
10752 memory blocks (on return from decode_insn()). */
10753
10754 typedef struct insn_decode_record_t
10755 {
10756 struct gdbarch *gdbarch;
10757 struct regcache *regcache;
10758 CORE_ADDR this_addr; /* Address of the insn being decoded. */
10759 uint32_t arm_insn; /* Should accommodate thumb. */
10760 uint32_t cond; /* Condition code. */
10761 uint32_t opcode; /* Insn opcode. */
10762 uint32_t decode; /* Insn decode bits. */
10763 uint32_t mem_rec_count; /* No of mem records. */
10764 uint32_t reg_rec_count; /* No of reg records. */
10765 uint32_t *arm_regs; /* Registers to be saved for this record. */
10766 struct arm_mem_r *arm_mems; /* Memory to be saved for this record. */
10767 } insn_decode_record;
10768
10769
10770 /* Checks ARM SBZ and SBO mandatory fields. */
10771
10772 static int
10773 sbo_sbz (uint32_t insn, uint32_t bit_num, uint32_t len, uint32_t sbo)
10774 {
10775 uint32_t ones = bits (insn, bit_num - 1, (bit_num -1) + (len - 1));
10776
10777 if (!len)
10778 return 1;
10779
10780 if (!sbo)
10781 ones = ~ones;
10782
10783 while (ones)
10784 {
10785 if (!(ones & sbo))
10786 {
10787 return 0;
10788 }
10789 ones = ones >> 1;
10790 }
10791 return 1;
10792 }
10793
10794 enum arm_record_result
10795 {
10796 ARM_RECORD_SUCCESS = 0,
10797 ARM_RECORD_FAILURE = 1
10798 };
10799
10800 typedef enum
10801 {
10802 ARM_RECORD_STRH=1,
10803 ARM_RECORD_STRD
10804 } arm_record_strx_t;
10805
10806 typedef enum
10807 {
10808 ARM_RECORD=1,
10809 THUMB_RECORD,
10810 THUMB2_RECORD
10811 } record_type_t;
10812
10813
10814 static int
10815 arm_record_strx (insn_decode_record *arm_insn_r, uint32_t *record_buf,
10816 uint32_t *record_buf_mem, arm_record_strx_t str_type)
10817 {
10818
10819 struct regcache *reg_cache = arm_insn_r->regcache;
10820 ULONGEST u_regval[2]= {0};
10821
10822 uint32_t reg_src1 = 0, reg_src2 = 0;
10823 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10824 uint32_t opcode1 = 0;
10825
10826 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
10827 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
10828 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
10829
10830
10831 if (14 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
10832 {
10833 /* 1) Handle misc store, immediate offset. */
10834 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10835 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10836 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10837 regcache_raw_read_unsigned (reg_cache, reg_src1,
10838 &u_regval[0]);
10839 if (ARM_PC_REGNUM == reg_src1)
10840 {
10841 /* If R15 was used as Rn, hence current PC+8. */
10842 u_regval[0] = u_regval[0] + 8;
10843 }
10844 offset_8 = (immed_high << 4) | immed_low;
10845 /* Calculate target store address. */
10846 if (14 == arm_insn_r->opcode)
10847 {
10848 tgt_mem_addr = u_regval[0] + offset_8;
10849 }
10850 else
10851 {
10852 tgt_mem_addr = u_regval[0] - offset_8;
10853 }
10854 if (ARM_RECORD_STRH == str_type)
10855 {
10856 record_buf_mem[0] = 2;
10857 record_buf_mem[1] = tgt_mem_addr;
10858 arm_insn_r->mem_rec_count = 1;
10859 }
10860 else if (ARM_RECORD_STRD == str_type)
10861 {
10862 record_buf_mem[0] = 4;
10863 record_buf_mem[1] = tgt_mem_addr;
10864 record_buf_mem[2] = 4;
10865 record_buf_mem[3] = tgt_mem_addr + 4;
10866 arm_insn_r->mem_rec_count = 2;
10867 }
10868 }
10869 else if (12 == arm_insn_r->opcode || 8 == arm_insn_r->opcode)
10870 {
10871 /* 2) Store, register offset. */
10872 /* Get Rm. */
10873 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10874 /* Get Rn. */
10875 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10876 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10877 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10878 if (15 == reg_src2)
10879 {
10880 /* If R15 was used as Rn, hence current PC+8. */
10881 u_regval[0] = u_regval[0] + 8;
10882 }
10883 /* Calculate target store address, Rn +/- Rm, register offset. */
10884 if (12 == arm_insn_r->opcode)
10885 {
10886 tgt_mem_addr = u_regval[0] + u_regval[1];
10887 }
10888 else
10889 {
10890 tgt_mem_addr = u_regval[1] - u_regval[0];
10891 }
10892 if (ARM_RECORD_STRH == str_type)
10893 {
10894 record_buf_mem[0] = 2;
10895 record_buf_mem[1] = tgt_mem_addr;
10896 arm_insn_r->mem_rec_count = 1;
10897 }
10898 else if (ARM_RECORD_STRD == str_type)
10899 {
10900 record_buf_mem[0] = 4;
10901 record_buf_mem[1] = tgt_mem_addr;
10902 record_buf_mem[2] = 4;
10903 record_buf_mem[3] = tgt_mem_addr + 4;
10904 arm_insn_r->mem_rec_count = 2;
10905 }
10906 }
10907 else if (11 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
10908 || 2 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10909 {
10910 /* 3) Store, immediate pre-indexed. */
10911 /* 5) Store, immediate post-indexed. */
10912 immed_low = bits (arm_insn_r->arm_insn, 0, 3);
10913 immed_high = bits (arm_insn_r->arm_insn, 8, 11);
10914 offset_8 = (immed_high << 4) | immed_low;
10915 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
10916 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10917 /* Calculate target store address, Rn +/- Rm, register offset. */
10918 if (15 == arm_insn_r->opcode || 6 == arm_insn_r->opcode)
10919 {
10920 tgt_mem_addr = u_regval[0] + offset_8;
10921 }
10922 else
10923 {
10924 tgt_mem_addr = u_regval[0] - offset_8;
10925 }
10926 if (ARM_RECORD_STRH == str_type)
10927 {
10928 record_buf_mem[0] = 2;
10929 record_buf_mem[1] = tgt_mem_addr;
10930 arm_insn_r->mem_rec_count = 1;
10931 }
10932 else if (ARM_RECORD_STRD == str_type)
10933 {
10934 record_buf_mem[0] = 4;
10935 record_buf_mem[1] = tgt_mem_addr;
10936 record_buf_mem[2] = 4;
10937 record_buf_mem[3] = tgt_mem_addr + 4;
10938 arm_insn_r->mem_rec_count = 2;
10939 }
10940 /* Record Rn also as it changes. */
10941 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10942 arm_insn_r->reg_rec_count = 1;
10943 }
10944 else if (9 == arm_insn_r->opcode || 13 == arm_insn_r->opcode
10945 || 0 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10946 {
10947 /* 4) Store, register pre-indexed. */
10948 /* 6) Store, register post -indexed. */
10949 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
10950 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
10951 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
10952 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
10953 /* Calculate target store address, Rn +/- Rm, register offset. */
10954 if (13 == arm_insn_r->opcode || 4 == arm_insn_r->opcode)
10955 {
10956 tgt_mem_addr = u_regval[0] + u_regval[1];
10957 }
10958 else
10959 {
10960 tgt_mem_addr = u_regval[1] - u_regval[0];
10961 }
10962 if (ARM_RECORD_STRH == str_type)
10963 {
10964 record_buf_mem[0] = 2;
10965 record_buf_mem[1] = tgt_mem_addr;
10966 arm_insn_r->mem_rec_count = 1;
10967 }
10968 else if (ARM_RECORD_STRD == str_type)
10969 {
10970 record_buf_mem[0] = 4;
10971 record_buf_mem[1] = tgt_mem_addr;
10972 record_buf_mem[2] = 4;
10973 record_buf_mem[3] = tgt_mem_addr + 4;
10974 arm_insn_r->mem_rec_count = 2;
10975 }
10976 /* Record Rn also as it changes. */
10977 *(record_buf) = bits (arm_insn_r->arm_insn, 16, 19);
10978 arm_insn_r->reg_rec_count = 1;
10979 }
10980 return 0;
10981 }
10982
10983 /* Handling ARM extension space insns. */
10984
10985 static int
10986 arm_record_extension_space (insn_decode_record *arm_insn_r)
10987 {
10988 uint32_t ret = 0; /* Return value: -1:record failure ; 0:success */
10989 uint32_t opcode1 = 0, opcode2 = 0, insn_op1 = 0;
10990 uint32_t record_buf[8], record_buf_mem[8];
10991 uint32_t reg_src1 = 0;
10992 uint32_t immed_high = 0, immed_low = 0,offset_8 = 0, tgt_mem_addr = 0;
10993 struct regcache *reg_cache = arm_insn_r->regcache;
10994 ULONGEST u_regval = 0;
10995
10996 gdb_assert (!INSN_RECORDED(arm_insn_r));
10997 /* Handle unconditional insn extension space. */
10998
10999 opcode1 = bits (arm_insn_r->arm_insn, 20, 27);
11000 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11001 if (arm_insn_r->cond)
11002 {
11003 /* PLD has no affect on architectural state, it just affects
11004 the caches. */
11005 if (5 == ((opcode1 & 0xE0) >> 5))
11006 {
11007 /* BLX(1) */
11008 record_buf[0] = ARM_PS_REGNUM;
11009 record_buf[1] = ARM_LR_REGNUM;
11010 arm_insn_r->reg_rec_count = 2;
11011 }
11012 /* STC2, LDC2, MCR2, MRC2, CDP2: <TBD>, co-processor insn. */
11013 }
11014
11015
11016 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11017 if (3 == opcode1 && bit (arm_insn_r->arm_insn, 4))
11018 {
11019 ret = -1;
11020 /* Undefined instruction on ARM V5; need to handle if later
11021 versions define it. */
11022 }
11023
11024 opcode1 = bits (arm_insn_r->arm_insn, 24, 27);
11025 opcode2 = bits (arm_insn_r->arm_insn, 4, 7);
11026 insn_op1 = bits (arm_insn_r->arm_insn, 20, 23);
11027
11028 /* Handle arithmetic insn extension space. */
11029 if (!opcode1 && 9 == opcode2 && 1 != arm_insn_r->cond
11030 && !INSN_RECORDED(arm_insn_r))
11031 {
11032 /* Handle MLA(S) and MUL(S). */
11033 if (0 <= insn_op1 && 3 >= insn_op1)
11034 {
11035 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11036 record_buf[1] = ARM_PS_REGNUM;
11037 arm_insn_r->reg_rec_count = 2;
11038 }
11039 else if (4 <= insn_op1 && 15 >= insn_op1)
11040 {
11041 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
11042 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11043 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11044 record_buf[2] = ARM_PS_REGNUM;
11045 arm_insn_r->reg_rec_count = 3;
11046 }
11047 }
11048
11049 opcode1 = bits (arm_insn_r->arm_insn, 26, 27);
11050 opcode2 = bits (arm_insn_r->arm_insn, 23, 24);
11051 insn_op1 = bits (arm_insn_r->arm_insn, 21, 22);
11052
11053 /* Handle control insn extension space. */
11054
11055 if (!opcode1 && 2 == opcode2 && !bit (arm_insn_r->arm_insn, 20)
11056 && 1 != arm_insn_r->cond && !INSN_RECORDED(arm_insn_r))
11057 {
11058 if (!bit (arm_insn_r->arm_insn,25))
11059 {
11060 if (!bits (arm_insn_r->arm_insn, 4, 7))
11061 {
11062 if ((0 == insn_op1) || (2 == insn_op1))
11063 {
11064 /* MRS. */
11065 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11066 arm_insn_r->reg_rec_count = 1;
11067 }
11068 else if (1 == insn_op1)
11069 {
11070 /* CSPR is going to be changed. */
11071 record_buf[0] = ARM_PS_REGNUM;
11072 arm_insn_r->reg_rec_count = 1;
11073 }
11074 else if (3 == insn_op1)
11075 {
11076 /* SPSR is going to be changed. */
11077 /* We need to get SPSR value, which is yet to be done. */
11078 printf_unfiltered (_("Process record does not support "
11079 "instruction 0x%0x at address %s.\n"),
11080 arm_insn_r->arm_insn,
11081 paddress (arm_insn_r->gdbarch,
11082 arm_insn_r->this_addr));
11083 return -1;
11084 }
11085 }
11086 else if (1 == bits (arm_insn_r->arm_insn, 4, 7))
11087 {
11088 if (1 == insn_op1)
11089 {
11090 /* BX. */
11091 record_buf[0] = ARM_PS_REGNUM;
11092 arm_insn_r->reg_rec_count = 1;
11093 }
11094 else if (3 == insn_op1)
11095 {
11096 /* CLZ. */
11097 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11098 arm_insn_r->reg_rec_count = 1;
11099 }
11100 }
11101 else if (3 == bits (arm_insn_r->arm_insn, 4, 7))
11102 {
11103 /* BLX. */
11104 record_buf[0] = ARM_PS_REGNUM;
11105 record_buf[1] = ARM_LR_REGNUM;
11106 arm_insn_r->reg_rec_count = 2;
11107 }
11108 else if (5 == bits (arm_insn_r->arm_insn, 4, 7))
11109 {
11110 /* QADD, QSUB, QDADD, QDSUB */
11111 record_buf[0] = ARM_PS_REGNUM;
11112 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11113 arm_insn_r->reg_rec_count = 2;
11114 }
11115 else if (7 == bits (arm_insn_r->arm_insn, 4, 7))
11116 {
11117 /* BKPT. */
11118 record_buf[0] = ARM_PS_REGNUM;
11119 record_buf[1] = ARM_LR_REGNUM;
11120 arm_insn_r->reg_rec_count = 2;
11121
11122 /* Save SPSR also;how? */
11123 printf_unfiltered (_("Process record does not support "
11124 "instruction 0x%0x at address %s.\n"),
11125 arm_insn_r->arm_insn,
11126 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11127 return -1;
11128 }
11129 else if(8 == bits (arm_insn_r->arm_insn, 4, 7)
11130 || 10 == bits (arm_insn_r->arm_insn, 4, 7)
11131 || 12 == bits (arm_insn_r->arm_insn, 4, 7)
11132 || 14 == bits (arm_insn_r->arm_insn, 4, 7)
11133 )
11134 {
11135 if (0 == insn_op1 || 1 == insn_op1)
11136 {
11137 /* SMLA<x><y>, SMLAW<y>, SMULW<y>. */
11138 /* We dont do optimization for SMULW<y> where we
11139 need only Rd. */
11140 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11141 record_buf[1] = ARM_PS_REGNUM;
11142 arm_insn_r->reg_rec_count = 2;
11143 }
11144 else if (2 == insn_op1)
11145 {
11146 /* SMLAL<x><y>. */
11147 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11148 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
11149 arm_insn_r->reg_rec_count = 2;
11150 }
11151 else if (3 == insn_op1)
11152 {
11153 /* SMUL<x><y>. */
11154 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11155 arm_insn_r->reg_rec_count = 1;
11156 }
11157 }
11158 }
11159 else
11160 {
11161 /* MSR : immediate form. */
11162 if (1 == insn_op1)
11163 {
11164 /* CSPR is going to be changed. */
11165 record_buf[0] = ARM_PS_REGNUM;
11166 arm_insn_r->reg_rec_count = 1;
11167 }
11168 else if (3 == insn_op1)
11169 {
11170 /* SPSR is going to be changed. */
11171 /* we need to get SPSR value, which is yet to be done */
11172 printf_unfiltered (_("Process record does not support "
11173 "instruction 0x%0x at address %s.\n"),
11174 arm_insn_r->arm_insn,
11175 paddress (arm_insn_r->gdbarch,
11176 arm_insn_r->this_addr));
11177 return -1;
11178 }
11179 }
11180 }
11181
11182 opcode1 = bits (arm_insn_r->arm_insn, 25, 27);
11183 opcode2 = bits (arm_insn_r->arm_insn, 20, 24);
11184 insn_op1 = bits (arm_insn_r->arm_insn, 5, 6);
11185
11186 /* Handle load/store insn extension space. */
11187
11188 if (!opcode1 && bit (arm_insn_r->arm_insn, 7)
11189 && bit (arm_insn_r->arm_insn, 4) && 1 != arm_insn_r->cond
11190 && !INSN_RECORDED(arm_insn_r))
11191 {
11192 /* SWP/SWPB. */
11193 if (0 == insn_op1)
11194 {
11195 /* These insn, changes register and memory as well. */
11196 /* SWP or SWPB insn. */
11197 /* Get memory address given by Rn. */
11198 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11199 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
11200 /* SWP insn ?, swaps word. */
11201 if (8 == arm_insn_r->opcode)
11202 {
11203 record_buf_mem[0] = 4;
11204 }
11205 else
11206 {
11207 /* SWPB insn, swaps only byte. */
11208 record_buf_mem[0] = 1;
11209 }
11210 record_buf_mem[1] = u_regval;
11211 arm_insn_r->mem_rec_count = 1;
11212 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11213 arm_insn_r->reg_rec_count = 1;
11214 }
11215 else if (1 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11216 {
11217 /* STRH. */
11218 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11219 ARM_RECORD_STRH);
11220 }
11221 else if (2 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11222 {
11223 /* LDRD. */
11224 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11225 record_buf[1] = record_buf[0] + 1;
11226 arm_insn_r->reg_rec_count = 2;
11227 }
11228 else if (3 == insn_op1 && !bit (arm_insn_r->arm_insn, 20))
11229 {
11230 /* STRD. */
11231 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11232 ARM_RECORD_STRD);
11233 }
11234 else if (bit (arm_insn_r->arm_insn, 20) && insn_op1 <= 3)
11235 {
11236 /* LDRH, LDRSB, LDRSH. */
11237 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11238 arm_insn_r->reg_rec_count = 1;
11239 }
11240
11241 }
11242
11243 opcode1 = bits (arm_insn_r->arm_insn, 23, 27);
11244 if (24 == opcode1 && bit (arm_insn_r->arm_insn, 21)
11245 && !INSN_RECORDED(arm_insn_r))
11246 {
11247 ret = -1;
11248 /* Handle coprocessor insn extension space. */
11249 }
11250
11251 /* To be done for ARMv5 and later; as of now we return -1. */
11252 if (-1 == ret)
11253 printf_unfiltered (_("Process record does not support instruction x%0x "
11254 "at address %s.\n"),arm_insn_r->arm_insn,
11255 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11256
11257
11258 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11259 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11260
11261 return ret;
11262 }
11263
11264 /* Handling opcode 000 insns. */
11265
11266 static int
11267 arm_record_data_proc_misc_ld_str (insn_decode_record *arm_insn_r)
11268 {
11269 struct regcache *reg_cache = arm_insn_r->regcache;
11270 uint32_t record_buf[8], record_buf_mem[8];
11271 ULONGEST u_regval[2] = {0};
11272
11273 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11274 uint32_t immed_high = 0, immed_low = 0, offset_8 = 0, tgt_mem_addr = 0;
11275 uint32_t opcode1 = 0;
11276
11277 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11278 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11279 opcode1 = bits (arm_insn_r->arm_insn, 20, 24);
11280
11281 /* Data processing insn /multiply insn. */
11282 if (9 == arm_insn_r->decode
11283 && ((4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11284 || (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)))
11285 {
11286 /* Handle multiply instructions. */
11287 /* MLA, MUL, SMLAL, SMULL, UMLAL, UMULL. */
11288 if (0 == arm_insn_r->opcode || 1 == arm_insn_r->opcode)
11289 {
11290 /* Handle MLA and MUL. */
11291 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11292 record_buf[1] = ARM_PS_REGNUM;
11293 arm_insn_r->reg_rec_count = 2;
11294 }
11295 else if (4 <= arm_insn_r->opcode && 7 >= arm_insn_r->opcode)
11296 {
11297 /* Handle SMLAL, SMULL, UMLAL, UMULL. */
11298 record_buf[0] = bits (arm_insn_r->arm_insn, 16, 19);
11299 record_buf[1] = bits (arm_insn_r->arm_insn, 12, 15);
11300 record_buf[2] = ARM_PS_REGNUM;
11301 arm_insn_r->reg_rec_count = 3;
11302 }
11303 }
11304 else if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11305 && (11 == arm_insn_r->decode || 13 == arm_insn_r->decode))
11306 {
11307 /* Handle misc load insns, as 20th bit (L = 1). */
11308 /* LDR insn has a capability to do branching, if
11309 MOV LR, PC is precceded by LDR insn having Rn as R15
11310 in that case, it emulates branch and link insn, and hence we
11311 need to save CSPR and PC as well. I am not sure this is right
11312 place; as opcode = 010 LDR insn make this happen, if R15 was
11313 used. */
11314 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11315 if (15 != reg_dest)
11316 {
11317 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11318 arm_insn_r->reg_rec_count = 1;
11319 }
11320 else
11321 {
11322 record_buf[0] = reg_dest;
11323 record_buf[1] = ARM_PS_REGNUM;
11324 arm_insn_r->reg_rec_count = 2;
11325 }
11326 }
11327 else if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11328 && sbo_sbz (arm_insn_r->arm_insn, 5, 12, 0)
11329 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11330 && 2 == bits (arm_insn_r->arm_insn, 20, 21))
11331 {
11332 /* Handle MSR insn. */
11333 if (9 == arm_insn_r->opcode)
11334 {
11335 /* CSPR is going to be changed. */
11336 record_buf[0] = ARM_PS_REGNUM;
11337 arm_insn_r->reg_rec_count = 1;
11338 }
11339 else
11340 {
11341 /* SPSR is going to be changed. */
11342 /* How to read SPSR value? */
11343 printf_unfiltered (_("Process record does not support instruction "
11344 "0x%0x at address %s.\n"),
11345 arm_insn_r->arm_insn,
11346 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11347 return -1;
11348 }
11349 }
11350 else if (9 == arm_insn_r->decode
11351 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11352 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11353 {
11354 /* Handling SWP, SWPB. */
11355 /* These insn, changes register and memory as well. */
11356 /* SWP or SWPB insn. */
11357
11358 reg_src1 = bits (arm_insn_r->arm_insn, 16, 19);
11359 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11360 /* SWP insn ?, swaps word. */
11361 if (8 == arm_insn_r->opcode)
11362 {
11363 record_buf_mem[0] = 4;
11364 }
11365 else
11366 {
11367 /* SWPB insn, swaps only byte. */
11368 record_buf_mem[0] = 1;
11369 }
11370 record_buf_mem[1] = u_regval[0];
11371 arm_insn_r->mem_rec_count = 1;
11372 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11373 arm_insn_r->reg_rec_count = 1;
11374 }
11375 else if (3 == arm_insn_r->decode && 0x12 == opcode1
11376 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11377 {
11378 /* Handle BLX, branch and link/exchange. */
11379 if (9 == arm_insn_r->opcode)
11380 {
11381 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm,
11382 and R14 stores the return address. */
11383 record_buf[0] = ARM_PS_REGNUM;
11384 record_buf[1] = ARM_LR_REGNUM;
11385 arm_insn_r->reg_rec_count = 2;
11386 }
11387 }
11388 else if (7 == arm_insn_r->decode && 0x12 == opcode1)
11389 {
11390 /* Handle enhanced software breakpoint insn, BKPT. */
11391 /* CPSR is changed to be executed in ARM state, disabling normal
11392 interrupts, entering abort mode. */
11393 /* According to high vector configuration PC is set. */
11394 /* user hit breakpoint and type reverse, in
11395 that case, we need to go back with previous CPSR and
11396 Program Counter. */
11397 record_buf[0] = ARM_PS_REGNUM;
11398 record_buf[1] = ARM_LR_REGNUM;
11399 arm_insn_r->reg_rec_count = 2;
11400
11401 /* Save SPSR also; how? */
11402 printf_unfiltered (_("Process record does not support instruction "
11403 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11404 paddress (arm_insn_r->gdbarch,
11405 arm_insn_r->this_addr));
11406 return -1;
11407 }
11408 else if (11 == arm_insn_r->decode
11409 && !bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11410 {
11411 /* Handle enhanced store insns and DSP insns (e.g. LDRD). */
11412
11413 /* Handle str(x) insn */
11414 arm_record_strx(arm_insn_r, &record_buf[0], &record_buf_mem[0],
11415 ARM_RECORD_STRH);
11416 }
11417 else if (1 == arm_insn_r->decode && 0x12 == opcode1
11418 && sbo_sbz (arm_insn_r->arm_insn, 9, 12, 1))
11419 {
11420 /* Handle BX, branch and link/exchange. */
11421 /* Branch is chosen by setting T bit of CSPR, bitp[0] of Rm. */
11422 record_buf[0] = ARM_PS_REGNUM;
11423 arm_insn_r->reg_rec_count = 1;
11424 }
11425 else if (1 == arm_insn_r->decode && 0x16 == opcode1
11426 && sbo_sbz (arm_insn_r->arm_insn, 9, 4, 1)
11427 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1))
11428 {
11429 /* Count leading zeros: CLZ. */
11430 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11431 arm_insn_r->reg_rec_count = 1;
11432 }
11433 else if (!bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM)
11434 && (8 == arm_insn_r->opcode || 10 == arm_insn_r->opcode)
11435 && sbo_sbz (arm_insn_r->arm_insn, 17, 4, 1)
11436 && sbo_sbz (arm_insn_r->arm_insn, 1, 12, 0)
11437 )
11438 {
11439 /* Handle MRS insn. */
11440 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11441 arm_insn_r->reg_rec_count = 1;
11442 }
11443 else if (arm_insn_r->opcode <= 15)
11444 {
11445 /* Normal data processing insns. */
11446 /* Out of 11 shifter operands mode, all the insn modifies destination
11447 register, which is specified by 13-16 decode. */
11448 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11449 record_buf[1] = ARM_PS_REGNUM;
11450 arm_insn_r->reg_rec_count = 2;
11451 }
11452 else
11453 {
11454 return -1;
11455 }
11456
11457 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11458 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11459 return 0;
11460 }
11461
11462 /* Handling opcode 001 insns. */
11463
11464 static int
11465 arm_record_data_proc_imm (insn_decode_record *arm_insn_r)
11466 {
11467 uint32_t record_buf[8], record_buf_mem[8];
11468
11469 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11470 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11471
11472 if ((9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode)
11473 && 2 == bits (arm_insn_r->arm_insn, 20, 21)
11474 && sbo_sbz (arm_insn_r->arm_insn, 13, 4, 1)
11475 )
11476 {
11477 /* Handle MSR insn. */
11478 if (9 == arm_insn_r->opcode)
11479 {
11480 /* CSPR is going to be changed. */
11481 record_buf[0] = ARM_PS_REGNUM;
11482 arm_insn_r->reg_rec_count = 1;
11483 }
11484 else
11485 {
11486 /* SPSR is going to be changed. */
11487 }
11488 }
11489 else if (arm_insn_r->opcode <= 15)
11490 {
11491 /* Normal data processing insns. */
11492 /* Out of 11 shifter operands mode, all the insn modifies destination
11493 register, which is specified by 13-16 decode. */
11494 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11495 record_buf[1] = ARM_PS_REGNUM;
11496 arm_insn_r->reg_rec_count = 2;
11497 }
11498 else
11499 {
11500 return -1;
11501 }
11502
11503 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11504 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11505 return 0;
11506 }
11507
11508 /* Handle ARM mode instructions with opcode 010. */
11509
11510 static int
11511 arm_record_ld_st_imm_offset (insn_decode_record *arm_insn_r)
11512 {
11513 struct regcache *reg_cache = arm_insn_r->regcache;
11514
11515 uint32_t reg_base , reg_dest;
11516 uint32_t offset_12, tgt_mem_addr;
11517 uint32_t record_buf[8], record_buf_mem[8];
11518 unsigned char wback;
11519 ULONGEST u_regval;
11520
11521 /* Calculate wback. */
11522 wback = (bit (arm_insn_r->arm_insn, 24) == 0)
11523 || (bit (arm_insn_r->arm_insn, 21) == 1);
11524
11525 arm_insn_r->reg_rec_count = 0;
11526 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11527
11528 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11529 {
11530 /* LDR (immediate), LDR (literal), LDRB (immediate), LDRB (literal), LDRBT
11531 and LDRT. */
11532
11533 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11534 record_buf[arm_insn_r->reg_rec_count++] = reg_dest;
11535
11536 /* The LDR instruction is capable of doing branching. If MOV LR, PC
11537 preceeds a LDR instruction having R15 as reg_base, it
11538 emulates a branch and link instruction, and hence we need to save
11539 CPSR and PC as well. */
11540 if (ARM_PC_REGNUM == reg_dest)
11541 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11542
11543 /* If wback is true, also save the base register, which is going to be
11544 written to. */
11545 if (wback)
11546 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11547 }
11548 else
11549 {
11550 /* STR (immediate), STRB (immediate), STRBT and STRT. */
11551
11552 offset_12 = bits (arm_insn_r->arm_insn, 0, 11);
11553 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11554
11555 /* Handle bit U. */
11556 if (bit (arm_insn_r->arm_insn, 23))
11557 {
11558 /* U == 1: Add the offset. */
11559 tgt_mem_addr = (uint32_t) u_regval + offset_12;
11560 }
11561 else
11562 {
11563 /* U == 0: subtract the offset. */
11564 tgt_mem_addr = (uint32_t) u_regval - offset_12;
11565 }
11566
11567 /* Bit 22 tells us whether the store instruction writes 1 byte or 4
11568 bytes. */
11569 if (bit (arm_insn_r->arm_insn, 22))
11570 {
11571 /* STRB and STRBT: 1 byte. */
11572 record_buf_mem[0] = 1;
11573 }
11574 else
11575 {
11576 /* STR and STRT: 4 bytes. */
11577 record_buf_mem[0] = 4;
11578 }
11579
11580 /* Handle bit P. */
11581 if (bit (arm_insn_r->arm_insn, 24))
11582 record_buf_mem[1] = tgt_mem_addr;
11583 else
11584 record_buf_mem[1] = (uint32_t) u_regval;
11585
11586 arm_insn_r->mem_rec_count = 1;
11587
11588 /* If wback is true, also save the base register, which is going to be
11589 written to. */
11590 if (wback)
11591 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11592 }
11593
11594 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11595 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11596 return 0;
11597 }
11598
11599 /* Handling opcode 011 insns. */
11600
11601 static int
11602 arm_record_ld_st_reg_offset (insn_decode_record *arm_insn_r)
11603 {
11604 struct regcache *reg_cache = arm_insn_r->regcache;
11605
11606 uint32_t shift_imm = 0;
11607 uint32_t reg_src1 = 0, reg_src2 = 0, reg_dest = 0;
11608 uint32_t offset_12 = 0, tgt_mem_addr = 0;
11609 uint32_t record_buf[8], record_buf_mem[8];
11610
11611 LONGEST s_word;
11612 ULONGEST u_regval[2];
11613
11614 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 21, 24);
11615 arm_insn_r->decode = bits (arm_insn_r->arm_insn, 4, 7);
11616
11617 /* Handle enhanced store insns and LDRD DSP insn,
11618 order begins according to addressing modes for store insns
11619 STRH insn. */
11620
11621 /* LDR or STR? */
11622 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11623 {
11624 reg_dest = bits (arm_insn_r->arm_insn, 12, 15);
11625 /* LDR insn has a capability to do branching, if
11626 MOV LR, PC is precedded by LDR insn having Rn as R15
11627 in that case, it emulates branch and link insn, and hence we
11628 need to save CSPR and PC as well. */
11629 if (15 != reg_dest)
11630 {
11631 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
11632 arm_insn_r->reg_rec_count = 1;
11633 }
11634 else
11635 {
11636 record_buf[0] = reg_dest;
11637 record_buf[1] = ARM_PS_REGNUM;
11638 arm_insn_r->reg_rec_count = 2;
11639 }
11640 }
11641 else
11642 {
11643 if (! bits (arm_insn_r->arm_insn, 4, 11))
11644 {
11645 /* Store insn, register offset and register pre-indexed,
11646 register post-indexed. */
11647 /* Get Rm. */
11648 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11649 /* Get Rn. */
11650 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11651 regcache_raw_read_unsigned (reg_cache, reg_src1
11652 , &u_regval[0]);
11653 regcache_raw_read_unsigned (reg_cache, reg_src2
11654 , &u_regval[1]);
11655 if (15 == reg_src2)
11656 {
11657 /* If R15 was used as Rn, hence current PC+8. */
11658 /* Pre-indexed mode doesnt reach here ; illegal insn. */
11659 u_regval[0] = u_regval[0] + 8;
11660 }
11661 /* Calculate target store address, Rn +/- Rm, register offset. */
11662 /* U == 1. */
11663 if (bit (arm_insn_r->arm_insn, 23))
11664 {
11665 tgt_mem_addr = u_regval[0] + u_regval[1];
11666 }
11667 else
11668 {
11669 tgt_mem_addr = u_regval[1] - u_regval[0];
11670 }
11671
11672 switch (arm_insn_r->opcode)
11673 {
11674 /* STR. */
11675 case 8:
11676 case 12:
11677 /* STR. */
11678 case 9:
11679 case 13:
11680 /* STRT. */
11681 case 1:
11682 case 5:
11683 /* STR. */
11684 case 0:
11685 case 4:
11686 record_buf_mem[0] = 4;
11687 break;
11688
11689 /* STRB. */
11690 case 10:
11691 case 14:
11692 /* STRB. */
11693 case 11:
11694 case 15:
11695 /* STRBT. */
11696 case 3:
11697 case 7:
11698 /* STRB. */
11699 case 2:
11700 case 6:
11701 record_buf_mem[0] = 1;
11702 break;
11703
11704 default:
11705 gdb_assert_not_reached ("no decoding pattern found");
11706 break;
11707 }
11708 record_buf_mem[1] = tgt_mem_addr;
11709 arm_insn_r->mem_rec_count = 1;
11710
11711 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11712 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11713 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11714 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11715 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11716 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11717 )
11718 {
11719 /* Rn is going to be changed in pre-indexed mode and
11720 post-indexed mode as well. */
11721 record_buf[0] = reg_src2;
11722 arm_insn_r->reg_rec_count = 1;
11723 }
11724 }
11725 else
11726 {
11727 /* Store insn, scaled register offset; scaled pre-indexed. */
11728 offset_12 = bits (arm_insn_r->arm_insn, 5, 6);
11729 /* Get Rm. */
11730 reg_src1 = bits (arm_insn_r->arm_insn, 0, 3);
11731 /* Get Rn. */
11732 reg_src2 = bits (arm_insn_r->arm_insn, 16, 19);
11733 /* Get shift_imm. */
11734 shift_imm = bits (arm_insn_r->arm_insn, 7, 11);
11735 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
11736 regcache_raw_read_signed (reg_cache, reg_src1, &s_word);
11737 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11738 /* Offset_12 used as shift. */
11739 switch (offset_12)
11740 {
11741 case 0:
11742 /* Offset_12 used as index. */
11743 offset_12 = u_regval[0] << shift_imm;
11744 break;
11745
11746 case 1:
11747 offset_12 = (!shift_imm)?0:u_regval[0] >> shift_imm;
11748 break;
11749
11750 case 2:
11751 if (!shift_imm)
11752 {
11753 if (bit (u_regval[0], 31))
11754 {
11755 offset_12 = 0xFFFFFFFF;
11756 }
11757 else
11758 {
11759 offset_12 = 0;
11760 }
11761 }
11762 else
11763 {
11764 /* This is arithmetic shift. */
11765 offset_12 = s_word >> shift_imm;
11766 }
11767 break;
11768
11769 case 3:
11770 if (!shift_imm)
11771 {
11772 regcache_raw_read_unsigned (reg_cache, ARM_PS_REGNUM,
11773 &u_regval[1]);
11774 /* Get C flag value and shift it by 31. */
11775 offset_12 = (((bit (u_regval[1], 29)) << 31) \
11776 | (u_regval[0]) >> 1);
11777 }
11778 else
11779 {
11780 offset_12 = (u_regval[0] >> shift_imm) \
11781 | (u_regval[0] <<
11782 (sizeof(uint32_t) - shift_imm));
11783 }
11784 break;
11785
11786 default:
11787 gdb_assert_not_reached ("no decoding pattern found");
11788 break;
11789 }
11790
11791 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
11792 /* bit U set. */
11793 if (bit (arm_insn_r->arm_insn, 23))
11794 {
11795 tgt_mem_addr = u_regval[1] + offset_12;
11796 }
11797 else
11798 {
11799 tgt_mem_addr = u_regval[1] - offset_12;
11800 }
11801
11802 switch (arm_insn_r->opcode)
11803 {
11804 /* STR. */
11805 case 8:
11806 case 12:
11807 /* STR. */
11808 case 9:
11809 case 13:
11810 /* STRT. */
11811 case 1:
11812 case 5:
11813 /* STR. */
11814 case 0:
11815 case 4:
11816 record_buf_mem[0] = 4;
11817 break;
11818
11819 /* STRB. */
11820 case 10:
11821 case 14:
11822 /* STRB. */
11823 case 11:
11824 case 15:
11825 /* STRBT. */
11826 case 3:
11827 case 7:
11828 /* STRB. */
11829 case 2:
11830 case 6:
11831 record_buf_mem[0] = 1;
11832 break;
11833
11834 default:
11835 gdb_assert_not_reached ("no decoding pattern found");
11836 break;
11837 }
11838 record_buf_mem[1] = tgt_mem_addr;
11839 arm_insn_r->mem_rec_count = 1;
11840
11841 if (9 == arm_insn_r->opcode || 11 == arm_insn_r->opcode
11842 || 13 == arm_insn_r->opcode || 15 == arm_insn_r->opcode
11843 || 0 == arm_insn_r->opcode || 2 == arm_insn_r->opcode
11844 || 4 == arm_insn_r->opcode || 6 == arm_insn_r->opcode
11845 || 1 == arm_insn_r->opcode || 3 == arm_insn_r->opcode
11846 || 5 == arm_insn_r->opcode || 7 == arm_insn_r->opcode
11847 )
11848 {
11849 /* Rn is going to be changed in register scaled pre-indexed
11850 mode,and scaled post indexed mode. */
11851 record_buf[0] = reg_src2;
11852 arm_insn_r->reg_rec_count = 1;
11853 }
11854 }
11855 }
11856
11857 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11858 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11859 return 0;
11860 }
11861
11862 /* Handle ARM mode instructions with opcode 100. */
11863
11864 static int
11865 arm_record_ld_st_multiple (insn_decode_record *arm_insn_r)
11866 {
11867 struct regcache *reg_cache = arm_insn_r->regcache;
11868 uint32_t register_count = 0, register_bits;
11869 uint32_t reg_base, addr_mode;
11870 uint32_t record_buf[24], record_buf_mem[48];
11871 uint32_t wback;
11872 ULONGEST u_regval;
11873
11874 /* Fetch the list of registers. */
11875 register_bits = bits (arm_insn_r->arm_insn, 0, 15);
11876 arm_insn_r->reg_rec_count = 0;
11877
11878 /* Fetch the base register that contains the address we are loading data
11879 to. */
11880 reg_base = bits (arm_insn_r->arm_insn, 16, 19);
11881
11882 /* Calculate wback. */
11883 wback = (bit (arm_insn_r->arm_insn, 21) == 1);
11884
11885 if (bit (arm_insn_r->arm_insn, INSN_S_L_BIT_NUM))
11886 {
11887 /* LDM/LDMIA/LDMFD, LDMDA/LDMFA, LDMDB and LDMIB. */
11888
11889 /* Find out which registers are going to be loaded from memory. */
11890 while (register_bits)
11891 {
11892 if (register_bits & 0x00000001)
11893 record_buf[arm_insn_r->reg_rec_count++] = register_count;
11894 register_bits = register_bits >> 1;
11895 register_count++;
11896 }
11897
11898
11899 /* If wback is true, also save the base register, which is going to be
11900 written to. */
11901 if (wback)
11902 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11903
11904 /* Save the CPSR register. */
11905 record_buf[arm_insn_r->reg_rec_count++] = ARM_PS_REGNUM;
11906 }
11907 else
11908 {
11909 /* STM (STMIA, STMEA), STMDA (STMED), STMDB (STMFD) and STMIB (STMFA). */
11910
11911 addr_mode = bits (arm_insn_r->arm_insn, 23, 24);
11912
11913 regcache_raw_read_unsigned (reg_cache, reg_base, &u_regval);
11914
11915 /* Find out how many registers are going to be stored to memory. */
11916 while (register_bits)
11917 {
11918 if (register_bits & 0x00000001)
11919 register_count++;
11920 register_bits = register_bits >> 1;
11921 }
11922
11923 switch (addr_mode)
11924 {
11925 /* STMDA (STMED): Decrement after. */
11926 case 0:
11927 record_buf_mem[1] = (uint32_t) u_regval
11928 - register_count * INT_REGISTER_SIZE + 4;
11929 break;
11930 /* STM (STMIA, STMEA): Increment after. */
11931 case 1:
11932 record_buf_mem[1] = (uint32_t) u_regval;
11933 break;
11934 /* STMDB (STMFD): Decrement before. */
11935 case 2:
11936 record_buf_mem[1] = (uint32_t) u_regval
11937 - register_count * INT_REGISTER_SIZE;
11938 break;
11939 /* STMIB (STMFA): Increment before. */
11940 case 3:
11941 record_buf_mem[1] = (uint32_t) u_regval + INT_REGISTER_SIZE;
11942 break;
11943 default:
11944 gdb_assert_not_reached ("no decoding pattern found");
11945 break;
11946 }
11947
11948 record_buf_mem[0] = register_count * INT_REGISTER_SIZE;
11949 arm_insn_r->mem_rec_count = 1;
11950
11951 /* If wback is true, also save the base register, which is going to be
11952 written to. */
11953 if (wback)
11954 record_buf[arm_insn_r->reg_rec_count++] = reg_base;
11955 }
11956
11957 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11958 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
11959 return 0;
11960 }
11961
11962 /* Handling opcode 101 insns. */
11963
11964 static int
11965 arm_record_b_bl (insn_decode_record *arm_insn_r)
11966 {
11967 uint32_t record_buf[8];
11968
11969 /* Handle B, BL, BLX(1) insns. */
11970 /* B simply branches so we do nothing here. */
11971 /* Note: BLX(1) doesnt fall here but instead it falls into
11972 extension space. */
11973 if (bit (arm_insn_r->arm_insn, 24))
11974 {
11975 record_buf[0] = ARM_LR_REGNUM;
11976 arm_insn_r->reg_rec_count = 1;
11977 }
11978
11979 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
11980
11981 return 0;
11982 }
11983
11984 /* Handling opcode 110 insns. */
11985
11986 static int
11987 arm_record_unsupported_insn (insn_decode_record *arm_insn_r)
11988 {
11989 printf_unfiltered (_("Process record does not support instruction "
11990 "0x%0x at address %s.\n"),arm_insn_r->arm_insn,
11991 paddress (arm_insn_r->gdbarch, arm_insn_r->this_addr));
11992
11993 return -1;
11994 }
11995
11996 /* Record handler for vector data transfer instructions. */
11997
11998 static int
11999 arm_record_vdata_transfer_insn (insn_decode_record *arm_insn_r)
12000 {
12001 uint32_t bits_a, bit_c, bit_l, reg_t, reg_v;
12002 uint32_t record_buf[4];
12003
12004 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12005 reg_t = bits (arm_insn_r->arm_insn, 12, 15);
12006 reg_v = bits (arm_insn_r->arm_insn, 21, 23);
12007 bits_a = bits (arm_insn_r->arm_insn, 21, 23);
12008 bit_l = bit (arm_insn_r->arm_insn, 20);
12009 bit_c = bit (arm_insn_r->arm_insn, 8);
12010
12011 /* Handle VMOV instruction. */
12012 if (bit_l && bit_c)
12013 {
12014 record_buf[0] = reg_t;
12015 arm_insn_r->reg_rec_count = 1;
12016 }
12017 else if (bit_l && !bit_c)
12018 {
12019 /* Handle VMOV instruction. */
12020 if (bits_a == 0x00)
12021 {
12022 if (bit (arm_insn_r->arm_insn, 20))
12023 record_buf[0] = reg_t;
12024 else
12025 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12026 (reg_v << 1));
12027
12028 arm_insn_r->reg_rec_count = 1;
12029 }
12030 /* Handle VMRS instruction. */
12031 else if (bits_a == 0x07)
12032 {
12033 if (reg_t == 15)
12034 reg_t = ARM_PS_REGNUM;
12035
12036 record_buf[0] = reg_t;
12037 arm_insn_r->reg_rec_count = 1;
12038 }
12039 }
12040 else if (!bit_l && !bit_c)
12041 {
12042 /* Handle VMOV instruction. */
12043 if (bits_a == 0x00)
12044 {
12045 if (bit (arm_insn_r->arm_insn, 20))
12046 record_buf[0] = reg_t;
12047 else
12048 record_buf[0] = num_regs + (bit (arm_insn_r->arm_insn, 7) |
12049 (reg_v << 1));
12050
12051 arm_insn_r->reg_rec_count = 1;
12052 }
12053 /* Handle VMSR instruction. */
12054 else if (bits_a == 0x07)
12055 {
12056 record_buf[0] = ARM_FPSCR_REGNUM;
12057 arm_insn_r->reg_rec_count = 1;
12058 }
12059 }
12060 else if (!bit_l && bit_c)
12061 {
12062 /* Handle VMOV instruction. */
12063 if (!(bits_a & 0x04))
12064 {
12065 record_buf[0] = (reg_v | (bit (arm_insn_r->arm_insn, 7) << 4))
12066 + ARM_D0_REGNUM;
12067 arm_insn_r->reg_rec_count = 1;
12068 }
12069 /* Handle VDUP instruction. */
12070 else
12071 {
12072 if (bit (arm_insn_r->arm_insn, 21))
12073 {
12074 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12075 record_buf[0] = reg_v + ARM_D0_REGNUM;
12076 record_buf[1] = reg_v + ARM_D0_REGNUM + 1;
12077 arm_insn_r->reg_rec_count = 2;
12078 }
12079 else
12080 {
12081 reg_v = reg_v | (bit (arm_insn_r->arm_insn, 7) << 4);
12082 record_buf[0] = reg_v + ARM_D0_REGNUM;
12083 arm_insn_r->reg_rec_count = 1;
12084 }
12085 }
12086 }
12087
12088 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12089 return 0;
12090 }
12091
12092 /* Record handler for extension register load/store instructions. */
12093
12094 static int
12095 arm_record_exreg_ld_st_insn (insn_decode_record *arm_insn_r)
12096 {
12097 uint32_t opcode, single_reg;
12098 uint8_t op_vldm_vstm;
12099 uint32_t record_buf[8], record_buf_mem[128];
12100 ULONGEST u_regval = 0;
12101
12102 struct regcache *reg_cache = arm_insn_r->regcache;
12103 const int num_regs = gdbarch_num_regs (arm_insn_r->gdbarch);
12104
12105 opcode = bits (arm_insn_r->arm_insn, 20, 24);
12106 single_reg = bit (arm_insn_r->arm_insn, 8);
12107 op_vldm_vstm = opcode & 0x1b;
12108
12109 /* Handle VMOV instructions. */
12110 if ((opcode & 0x1e) == 0x04)
12111 {
12112 if (bit (arm_insn_r->arm_insn, 4))
12113 {
12114 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12115 record_buf[1] = bits (arm_insn_r->arm_insn, 16, 19);
12116 arm_insn_r->reg_rec_count = 2;
12117 }
12118 else
12119 {
12120 uint8_t reg_m = (bits (arm_insn_r->arm_insn, 0, 3) << 1)
12121 | bit (arm_insn_r->arm_insn, 5);
12122
12123 if (!single_reg)
12124 {
12125 record_buf[0] = num_regs + reg_m;
12126 record_buf[1] = num_regs + reg_m + 1;
12127 arm_insn_r->reg_rec_count = 2;
12128 }
12129 else
12130 {
12131 record_buf[0] = reg_m + ARM_D0_REGNUM;
12132 arm_insn_r->reg_rec_count = 1;
12133 }
12134 }
12135 }
12136 /* Handle VSTM and VPUSH instructions. */
12137 else if (op_vldm_vstm == 0x08 || op_vldm_vstm == 0x0a
12138 || op_vldm_vstm == 0x12)
12139 {
12140 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12141 uint32_t memory_index = 0;
12142
12143 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12144 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12145 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12146 imm_off32 = imm_off8 << 24;
12147 memory_count = imm_off8;
12148
12149 if (bit (arm_insn_r->arm_insn, 23))
12150 start_address = u_regval;
12151 else
12152 start_address = u_regval - imm_off32;
12153
12154 if (bit (arm_insn_r->arm_insn, 21))
12155 {
12156 record_buf[0] = reg_rn;
12157 arm_insn_r->reg_rec_count = 1;
12158 }
12159
12160 while (memory_count > 0)
12161 {
12162 if (!single_reg)
12163 {
12164 record_buf_mem[memory_index] = start_address;
12165 record_buf_mem[memory_index + 1] = 4;
12166 start_address = start_address + 4;
12167 memory_index = memory_index + 2;
12168 }
12169 else
12170 {
12171 record_buf_mem[memory_index] = start_address;
12172 record_buf_mem[memory_index + 1] = 4;
12173 record_buf_mem[memory_index + 2] = start_address + 4;
12174 record_buf_mem[memory_index + 3] = 4;
12175 start_address = start_address + 8;
12176 memory_index = memory_index + 4;
12177 }
12178 memory_count--;
12179 }
12180 arm_insn_r->mem_rec_count = (memory_index >> 1);
12181 }
12182 /* Handle VLDM instructions. */
12183 else if (op_vldm_vstm == 0x09 || op_vldm_vstm == 0x0b
12184 || op_vldm_vstm == 0x13)
12185 {
12186 uint32_t reg_count, reg_vd;
12187 uint32_t reg_index = 0;
12188
12189 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12190 reg_count = bits (arm_insn_r->arm_insn, 0, 7);
12191
12192 if (single_reg)
12193 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12194 else
12195 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12196
12197 if (bit (arm_insn_r->arm_insn, 21))
12198 record_buf[reg_index++] = bits (arm_insn_r->arm_insn, 16, 19);
12199
12200 while (reg_count > 0)
12201 {
12202 if (single_reg)
12203 record_buf[reg_index++] = num_regs + reg_vd + reg_count - 1;
12204 else
12205 record_buf[reg_index++] = ARM_D0_REGNUM + reg_vd + reg_count - 1;
12206
12207 reg_count--;
12208 }
12209 arm_insn_r->reg_rec_count = reg_index;
12210 }
12211 /* VSTR Vector store register. */
12212 else if ((opcode & 0x13) == 0x10)
12213 {
12214 uint32_t start_address, reg_rn, imm_off32, imm_off8, memory_count;
12215 uint32_t memory_index = 0;
12216
12217 reg_rn = bits (arm_insn_r->arm_insn, 16, 19);
12218 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
12219 imm_off8 = bits (arm_insn_r->arm_insn, 0, 7);
12220 imm_off32 = imm_off8 << 24;
12221 memory_count = imm_off8;
12222
12223 if (bit (arm_insn_r->arm_insn, 23))
12224 start_address = u_regval + imm_off32;
12225 else
12226 start_address = u_regval - imm_off32;
12227
12228 if (single_reg)
12229 {
12230 record_buf_mem[memory_index] = start_address;
12231 record_buf_mem[memory_index + 1] = 4;
12232 arm_insn_r->mem_rec_count = 1;
12233 }
12234 else
12235 {
12236 record_buf_mem[memory_index] = start_address;
12237 record_buf_mem[memory_index + 1] = 4;
12238 record_buf_mem[memory_index + 2] = start_address + 4;
12239 record_buf_mem[memory_index + 3] = 4;
12240 arm_insn_r->mem_rec_count = 2;
12241 }
12242 }
12243 /* VLDR Vector load register. */
12244 else if ((opcode & 0x13) == 0x11)
12245 {
12246 uint32_t reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12247
12248 if (!single_reg)
12249 {
12250 reg_vd = reg_vd | (bit (arm_insn_r->arm_insn, 22) << 4);
12251 record_buf[0] = ARM_D0_REGNUM + reg_vd;
12252 }
12253 else
12254 {
12255 reg_vd = (reg_vd << 1) | bit (arm_insn_r->arm_insn, 22);
12256 record_buf[0] = num_regs + reg_vd;
12257 }
12258 arm_insn_r->reg_rec_count = 1;
12259 }
12260
12261 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12262 MEM_ALLOC (arm_insn_r->arm_mems, arm_insn_r->mem_rec_count, record_buf_mem);
12263 return 0;
12264 }
12265
12266 /* Record handler for arm/thumb mode VFP data processing instructions. */
12267
12268 static int
12269 arm_record_vfp_data_proc_insn (insn_decode_record *arm_insn_r)
12270 {
12271 uint32_t opc1, opc2, opc3, dp_op_sz, bit_d, reg_vd;
12272 uint32_t record_buf[4];
12273 enum insn_types {INSN_T0, INSN_T1, INSN_T2, INSN_T3, INSN_INV};
12274 enum insn_types curr_insn_type = INSN_INV;
12275
12276 reg_vd = bits (arm_insn_r->arm_insn, 12, 15);
12277 opc1 = bits (arm_insn_r->arm_insn, 20, 23);
12278 opc2 = bits (arm_insn_r->arm_insn, 16, 19);
12279 opc3 = bits (arm_insn_r->arm_insn, 6, 7);
12280 dp_op_sz = bit (arm_insn_r->arm_insn, 8);
12281 bit_d = bit (arm_insn_r->arm_insn, 22);
12282 opc1 = opc1 & 0x04;
12283
12284 /* Handle VMLA, VMLS. */
12285 if (opc1 == 0x00)
12286 {
12287 if (bit (arm_insn_r->arm_insn, 10))
12288 {
12289 if (bit (arm_insn_r->arm_insn, 6))
12290 curr_insn_type = INSN_T0;
12291 else
12292 curr_insn_type = INSN_T1;
12293 }
12294 else
12295 {
12296 if (dp_op_sz)
12297 curr_insn_type = INSN_T1;
12298 else
12299 curr_insn_type = INSN_T2;
12300 }
12301 }
12302 /* Handle VNMLA, VNMLS, VNMUL. */
12303 else if (opc1 == 0x01)
12304 {
12305 if (dp_op_sz)
12306 curr_insn_type = INSN_T1;
12307 else
12308 curr_insn_type = INSN_T2;
12309 }
12310 /* Handle VMUL. */
12311 else if (opc1 == 0x02 && !(opc3 & 0x01))
12312 {
12313 if (bit (arm_insn_r->arm_insn, 10))
12314 {
12315 if (bit (arm_insn_r->arm_insn, 6))
12316 curr_insn_type = INSN_T0;
12317 else
12318 curr_insn_type = INSN_T1;
12319 }
12320 else
12321 {
12322 if (dp_op_sz)
12323 curr_insn_type = INSN_T1;
12324 else
12325 curr_insn_type = INSN_T2;
12326 }
12327 }
12328 /* Handle VADD, VSUB. */
12329 else if (opc1 == 0x03)
12330 {
12331 if (!bit (arm_insn_r->arm_insn, 9))
12332 {
12333 if (bit (arm_insn_r->arm_insn, 6))
12334 curr_insn_type = INSN_T0;
12335 else
12336 curr_insn_type = INSN_T1;
12337 }
12338 else
12339 {
12340 if (dp_op_sz)
12341 curr_insn_type = INSN_T1;
12342 else
12343 curr_insn_type = INSN_T2;
12344 }
12345 }
12346 /* Handle VDIV. */
12347 else if (opc1 == 0x0b)
12348 {
12349 if (dp_op_sz)
12350 curr_insn_type = INSN_T1;
12351 else
12352 curr_insn_type = INSN_T2;
12353 }
12354 /* Handle all other vfp data processing instructions. */
12355 else if (opc1 == 0x0b)
12356 {
12357 /* Handle VMOV. */
12358 if (!(opc3 & 0x01) || (opc2 == 0x00 && opc3 == 0x01))
12359 {
12360 if (bit (arm_insn_r->arm_insn, 4))
12361 {
12362 if (bit (arm_insn_r->arm_insn, 6))
12363 curr_insn_type = INSN_T0;
12364 else
12365 curr_insn_type = INSN_T1;
12366 }
12367 else
12368 {
12369 if (dp_op_sz)
12370 curr_insn_type = INSN_T1;
12371 else
12372 curr_insn_type = INSN_T2;
12373 }
12374 }
12375 /* Handle VNEG and VABS. */
12376 else if ((opc2 == 0x01 && opc3 == 0x01)
12377 || (opc2 == 0x00 && opc3 == 0x03))
12378 {
12379 if (!bit (arm_insn_r->arm_insn, 11))
12380 {
12381 if (bit (arm_insn_r->arm_insn, 6))
12382 curr_insn_type = INSN_T0;
12383 else
12384 curr_insn_type = INSN_T1;
12385 }
12386 else
12387 {
12388 if (dp_op_sz)
12389 curr_insn_type = INSN_T1;
12390 else
12391 curr_insn_type = INSN_T2;
12392 }
12393 }
12394 /* Handle VSQRT. */
12395 else if (opc2 == 0x01 && opc3 == 0x03)
12396 {
12397 if (dp_op_sz)
12398 curr_insn_type = INSN_T1;
12399 else
12400 curr_insn_type = INSN_T2;
12401 }
12402 /* Handle VCVT. */
12403 else if (opc2 == 0x07 && opc3 == 0x03)
12404 {
12405 if (!dp_op_sz)
12406 curr_insn_type = INSN_T1;
12407 else
12408 curr_insn_type = INSN_T2;
12409 }
12410 else if (opc3 & 0x01)
12411 {
12412 /* Handle VCVT. */
12413 if ((opc2 == 0x08) || (opc2 & 0x0e) == 0x0c)
12414 {
12415 if (!bit (arm_insn_r->arm_insn, 18))
12416 curr_insn_type = INSN_T2;
12417 else
12418 {
12419 if (dp_op_sz)
12420 curr_insn_type = INSN_T1;
12421 else
12422 curr_insn_type = INSN_T2;
12423 }
12424 }
12425 /* Handle VCVT. */
12426 else if ((opc2 & 0x0e) == 0x0a || (opc2 & 0x0e) == 0x0e)
12427 {
12428 if (dp_op_sz)
12429 curr_insn_type = INSN_T1;
12430 else
12431 curr_insn_type = INSN_T2;
12432 }
12433 /* Handle VCVTB, VCVTT. */
12434 else if ((opc2 & 0x0e) == 0x02)
12435 curr_insn_type = INSN_T2;
12436 /* Handle VCMP, VCMPE. */
12437 else if ((opc2 & 0x0e) == 0x04)
12438 curr_insn_type = INSN_T3;
12439 }
12440 }
12441
12442 switch (curr_insn_type)
12443 {
12444 case INSN_T0:
12445 reg_vd = reg_vd | (bit_d << 4);
12446 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12447 record_buf[1] = reg_vd + ARM_D0_REGNUM + 1;
12448 arm_insn_r->reg_rec_count = 2;
12449 break;
12450
12451 case INSN_T1:
12452 reg_vd = reg_vd | (bit_d << 4);
12453 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12454 arm_insn_r->reg_rec_count = 1;
12455 break;
12456
12457 case INSN_T2:
12458 reg_vd = (reg_vd << 1) | bit_d;
12459 record_buf[0] = reg_vd + ARM_D0_REGNUM;
12460 arm_insn_r->reg_rec_count = 1;
12461 break;
12462
12463 case INSN_T3:
12464 record_buf[0] = ARM_FPSCR_REGNUM;
12465 arm_insn_r->reg_rec_count = 1;
12466 break;
12467
12468 default:
12469 gdb_assert_not_reached ("no decoding pattern found");
12470 break;
12471 }
12472
12473 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, record_buf);
12474 return 0;
12475 }
12476
12477 /* Handling opcode 110 insns. */
12478
12479 static int
12480 arm_record_asimd_vfp_coproc (insn_decode_record *arm_insn_r)
12481 {
12482 uint32_t op, op1, op1_sbit, op1_ebit, coproc;
12483
12484 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12485 op1 = bits (arm_insn_r->arm_insn, 20, 25);
12486 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12487
12488 if ((coproc & 0x0e) == 0x0a)
12489 {
12490 /* Handle extension register ld/st instructions. */
12491 if (!(op1 & 0x20))
12492 return arm_record_exreg_ld_st_insn (arm_insn_r);
12493
12494 /* 64-bit transfers between arm core and extension registers. */
12495 if ((op1 & 0x3e) == 0x04)
12496 return arm_record_exreg_ld_st_insn (arm_insn_r);
12497 }
12498 else
12499 {
12500 /* Handle coprocessor ld/st instructions. */
12501 if (!(op1 & 0x3a))
12502 {
12503 /* Store. */
12504 if (!op1_ebit)
12505 return arm_record_unsupported_insn (arm_insn_r);
12506 else
12507 /* Load. */
12508 return arm_record_unsupported_insn (arm_insn_r);
12509 }
12510
12511 /* Move to coprocessor from two arm core registers. */
12512 if (op1 == 0x4)
12513 return arm_record_unsupported_insn (arm_insn_r);
12514
12515 /* Move to two arm core registers from coprocessor. */
12516 if (op1 == 0x5)
12517 {
12518 uint32_t reg_t[2];
12519
12520 reg_t[0] = bits (arm_insn_r->arm_insn, 12, 15);
12521 reg_t[1] = bits (arm_insn_r->arm_insn, 16, 19);
12522 arm_insn_r->reg_rec_count = 2;
12523
12524 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count, reg_t);
12525 return 0;
12526 }
12527 }
12528 return arm_record_unsupported_insn (arm_insn_r);
12529 }
12530
12531 /* Handling opcode 111 insns. */
12532
12533 static int
12534 arm_record_coproc_data_proc (insn_decode_record *arm_insn_r)
12535 {
12536 uint32_t op, op1_sbit, op1_ebit, coproc;
12537 struct gdbarch_tdep *tdep = gdbarch_tdep (arm_insn_r->gdbarch);
12538 struct regcache *reg_cache = arm_insn_r->regcache;
12539 ULONGEST u_regval = 0;
12540
12541 arm_insn_r->opcode = bits (arm_insn_r->arm_insn, 24, 27);
12542 coproc = bits (arm_insn_r->arm_insn, 8, 11);
12543 op1_sbit = bit (arm_insn_r->arm_insn, 24);
12544 op1_ebit = bit (arm_insn_r->arm_insn, 20);
12545 op = bit (arm_insn_r->arm_insn, 4);
12546
12547 /* Handle arm SWI/SVC system call instructions. */
12548 if (op1_sbit)
12549 {
12550 if (tdep->arm_syscall_record != NULL)
12551 {
12552 ULONGEST svc_operand, svc_number;
12553
12554 svc_operand = (0x00ffffff & arm_insn_r->arm_insn);
12555
12556 if (svc_operand) /* OABI. */
12557 svc_number = svc_operand - 0x900000;
12558 else /* EABI. */
12559 regcache_raw_read_unsigned (reg_cache, 7, &svc_number);
12560
12561 return tdep->arm_syscall_record (reg_cache, svc_number);
12562 }
12563 else
12564 {
12565 printf_unfiltered (_("no syscall record support\n"));
12566 return -1;
12567 }
12568 }
12569
12570 if ((coproc & 0x0e) == 0x0a)
12571 {
12572 /* VFP data-processing instructions. */
12573 if (!op1_sbit && !op)
12574 return arm_record_vfp_data_proc_insn (arm_insn_r);
12575
12576 /* Advanced SIMD, VFP instructions. */
12577 if (!op1_sbit && op)
12578 return arm_record_vdata_transfer_insn (arm_insn_r);
12579 }
12580 else
12581 {
12582 /* Coprocessor data operations. */
12583 if (!op1_sbit && !op)
12584 return arm_record_unsupported_insn (arm_insn_r);
12585
12586 /* Move to Coprocessor from ARM core register. */
12587 if (!op1_sbit && !op1_ebit && op)
12588 return arm_record_unsupported_insn (arm_insn_r);
12589
12590 /* Move to arm core register from coprocessor. */
12591 if (!op1_sbit && op1_ebit && op)
12592 {
12593 uint32_t record_buf[1];
12594
12595 record_buf[0] = bits (arm_insn_r->arm_insn, 12, 15);
12596 if (record_buf[0] == 15)
12597 record_buf[0] = ARM_PS_REGNUM;
12598
12599 arm_insn_r->reg_rec_count = 1;
12600 REG_ALLOC (arm_insn_r->arm_regs, arm_insn_r->reg_rec_count,
12601 record_buf);
12602 return 0;
12603 }
12604 }
12605
12606 return arm_record_unsupported_insn (arm_insn_r);
12607 }
12608
12609 /* Handling opcode 000 insns. */
12610
12611 static int
12612 thumb_record_shift_add_sub (insn_decode_record *thumb_insn_r)
12613 {
12614 uint32_t record_buf[8];
12615 uint32_t reg_src1 = 0;
12616
12617 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12618
12619 record_buf[0] = ARM_PS_REGNUM;
12620 record_buf[1] = reg_src1;
12621 thumb_insn_r->reg_rec_count = 2;
12622
12623 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12624
12625 return 0;
12626 }
12627
12628
12629 /* Handling opcode 001 insns. */
12630
12631 static int
12632 thumb_record_add_sub_cmp_mov (insn_decode_record *thumb_insn_r)
12633 {
12634 uint32_t record_buf[8];
12635 uint32_t reg_src1 = 0;
12636
12637 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12638
12639 record_buf[0] = ARM_PS_REGNUM;
12640 record_buf[1] = reg_src1;
12641 thumb_insn_r->reg_rec_count = 2;
12642
12643 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12644
12645 return 0;
12646 }
12647
12648 /* Handling opcode 010 insns. */
12649
12650 static int
12651 thumb_record_ld_st_reg_offset (insn_decode_record *thumb_insn_r)
12652 {
12653 struct regcache *reg_cache = thumb_insn_r->regcache;
12654 uint32_t record_buf[8], record_buf_mem[8];
12655
12656 uint32_t reg_src1 = 0, reg_src2 = 0;
12657 uint32_t opcode1 = 0, opcode2 = 0, opcode3 = 0;
12658
12659 ULONGEST u_regval[2] = {0};
12660
12661 opcode1 = bits (thumb_insn_r->arm_insn, 10, 12);
12662
12663 if (bit (thumb_insn_r->arm_insn, 12))
12664 {
12665 /* Handle load/store register offset. */
12666 opcode2 = bits (thumb_insn_r->arm_insn, 9, 10);
12667 if (opcode2 >= 12 && opcode2 <= 15)
12668 {
12669 /* LDR(2), LDRB(2) , LDRH(2), LDRSB, LDRSH. */
12670 reg_src1 = bits (thumb_insn_r->arm_insn,0, 2);
12671 record_buf[0] = reg_src1;
12672 thumb_insn_r->reg_rec_count = 1;
12673 }
12674 else if (opcode2 >= 8 && opcode2 <= 10)
12675 {
12676 /* STR(2), STRB(2), STRH(2) . */
12677 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12678 reg_src2 = bits (thumb_insn_r->arm_insn, 6, 8);
12679 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval[0]);
12680 regcache_raw_read_unsigned (reg_cache, reg_src2, &u_regval[1]);
12681 if (8 == opcode2)
12682 record_buf_mem[0] = 4; /* STR (2). */
12683 else if (10 == opcode2)
12684 record_buf_mem[0] = 1; /* STRB (2). */
12685 else if (9 == opcode2)
12686 record_buf_mem[0] = 2; /* STRH (2). */
12687 record_buf_mem[1] = u_regval[0] + u_regval[1];
12688 thumb_insn_r->mem_rec_count = 1;
12689 }
12690 }
12691 else if (bit (thumb_insn_r->arm_insn, 11))
12692 {
12693 /* Handle load from literal pool. */
12694 /* LDR(3). */
12695 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12696 record_buf[0] = reg_src1;
12697 thumb_insn_r->reg_rec_count = 1;
12698 }
12699 else if (opcode1)
12700 {
12701 opcode2 = bits (thumb_insn_r->arm_insn, 8, 9);
12702 opcode3 = bits (thumb_insn_r->arm_insn, 0, 2);
12703 if ((3 == opcode2) && (!opcode3))
12704 {
12705 /* Branch with exchange. */
12706 record_buf[0] = ARM_PS_REGNUM;
12707 thumb_insn_r->reg_rec_count = 1;
12708 }
12709 else
12710 {
12711 /* Format 8; special data processing insns. */
12712 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12713 record_buf[0] = ARM_PS_REGNUM;
12714 record_buf[1] = reg_src1;
12715 thumb_insn_r->reg_rec_count = 2;
12716 }
12717 }
12718 else
12719 {
12720 /* Format 5; data processing insns. */
12721 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12722 if (bit (thumb_insn_r->arm_insn, 7))
12723 {
12724 reg_src1 = reg_src1 + 8;
12725 }
12726 record_buf[0] = ARM_PS_REGNUM;
12727 record_buf[1] = reg_src1;
12728 thumb_insn_r->reg_rec_count = 2;
12729 }
12730
12731 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12732 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12733 record_buf_mem);
12734
12735 return 0;
12736 }
12737
12738 /* Handling opcode 001 insns. */
12739
12740 static int
12741 thumb_record_ld_st_imm_offset (insn_decode_record *thumb_insn_r)
12742 {
12743 struct regcache *reg_cache = thumb_insn_r->regcache;
12744 uint32_t record_buf[8], record_buf_mem[8];
12745
12746 uint32_t reg_src1 = 0;
12747 uint32_t opcode = 0, immed_5 = 0;
12748
12749 ULONGEST u_regval = 0;
12750
12751 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12752
12753 if (opcode)
12754 {
12755 /* LDR(1). */
12756 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12757 record_buf[0] = reg_src1;
12758 thumb_insn_r->reg_rec_count = 1;
12759 }
12760 else
12761 {
12762 /* STR(1). */
12763 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12764 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12765 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12766 record_buf_mem[0] = 4;
12767 record_buf_mem[1] = u_regval + (immed_5 * 4);
12768 thumb_insn_r->mem_rec_count = 1;
12769 }
12770
12771 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12772 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12773 record_buf_mem);
12774
12775 return 0;
12776 }
12777
12778 /* Handling opcode 100 insns. */
12779
12780 static int
12781 thumb_record_ld_st_stack (insn_decode_record *thumb_insn_r)
12782 {
12783 struct regcache *reg_cache = thumb_insn_r->regcache;
12784 uint32_t record_buf[8], record_buf_mem[8];
12785
12786 uint32_t reg_src1 = 0;
12787 uint32_t opcode = 0, immed_8 = 0, immed_5 = 0;
12788
12789 ULONGEST u_regval = 0;
12790
12791 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12792
12793 if (3 == opcode)
12794 {
12795 /* LDR(4). */
12796 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12797 record_buf[0] = reg_src1;
12798 thumb_insn_r->reg_rec_count = 1;
12799 }
12800 else if (1 == opcode)
12801 {
12802 /* LDRH(1). */
12803 reg_src1 = bits (thumb_insn_r->arm_insn, 0, 2);
12804 record_buf[0] = reg_src1;
12805 thumb_insn_r->reg_rec_count = 1;
12806 }
12807 else if (2 == opcode)
12808 {
12809 /* STR(3). */
12810 immed_8 = bits (thumb_insn_r->arm_insn, 0, 7);
12811 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12812 record_buf_mem[0] = 4;
12813 record_buf_mem[1] = u_regval + (immed_8 * 4);
12814 thumb_insn_r->mem_rec_count = 1;
12815 }
12816 else if (0 == opcode)
12817 {
12818 /* STRH(1). */
12819 immed_5 = bits (thumb_insn_r->arm_insn, 6, 10);
12820 reg_src1 = bits (thumb_insn_r->arm_insn, 3, 5);
12821 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12822 record_buf_mem[0] = 2;
12823 record_buf_mem[1] = u_regval + (immed_5 * 2);
12824 thumb_insn_r->mem_rec_count = 1;
12825 }
12826
12827 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12828 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12829 record_buf_mem);
12830
12831 return 0;
12832 }
12833
12834 /* Handling opcode 101 insns. */
12835
12836 static int
12837 thumb_record_misc (insn_decode_record *thumb_insn_r)
12838 {
12839 struct regcache *reg_cache = thumb_insn_r->regcache;
12840
12841 uint32_t opcode = 0, opcode1 = 0, opcode2 = 0;
12842 uint32_t register_bits = 0, register_count = 0;
12843 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12844 uint32_t record_buf[24], record_buf_mem[48];
12845 uint32_t reg_src1;
12846
12847 ULONGEST u_regval = 0;
12848
12849 opcode = bits (thumb_insn_r->arm_insn, 11, 12);
12850 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12851 opcode2 = bits (thumb_insn_r->arm_insn, 9, 12);
12852
12853 if (14 == opcode2)
12854 {
12855 /* POP. */
12856 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12857 while (register_bits)
12858 {
12859 if (register_bits & 0x00000001)
12860 record_buf[index++] = register_count;
12861 register_bits = register_bits >> 1;
12862 register_count++;
12863 }
12864 record_buf[index++] = ARM_PS_REGNUM;
12865 record_buf[index++] = ARM_SP_REGNUM;
12866 thumb_insn_r->reg_rec_count = index;
12867 }
12868 else if (10 == opcode2)
12869 {
12870 /* PUSH. */
12871 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12872 regcache_raw_read_unsigned (reg_cache, ARM_SP_REGNUM, &u_regval);
12873 while (register_bits)
12874 {
12875 if (register_bits & 0x00000001)
12876 register_count++;
12877 register_bits = register_bits >> 1;
12878 }
12879 start_address = u_regval - \
12880 (4 * (bit (thumb_insn_r->arm_insn, 8) + register_count));
12881 thumb_insn_r->mem_rec_count = register_count;
12882 while (register_count)
12883 {
12884 record_buf_mem[(register_count * 2) - 1] = start_address;
12885 record_buf_mem[(register_count * 2) - 2] = 4;
12886 start_address = start_address + 4;
12887 register_count--;
12888 }
12889 record_buf[0] = ARM_SP_REGNUM;
12890 thumb_insn_r->reg_rec_count = 1;
12891 }
12892 else if (0x1E == opcode1)
12893 {
12894 /* BKPT insn. */
12895 /* Handle enhanced software breakpoint insn, BKPT. */
12896 /* CPSR is changed to be executed in ARM state, disabling normal
12897 interrupts, entering abort mode. */
12898 /* According to high vector configuration PC is set. */
12899 /* User hits breakpoint and type reverse, in that case, we need to go back with
12900 previous CPSR and Program Counter. */
12901 record_buf[0] = ARM_PS_REGNUM;
12902 record_buf[1] = ARM_LR_REGNUM;
12903 thumb_insn_r->reg_rec_count = 2;
12904 /* We need to save SPSR value, which is not yet done. */
12905 printf_unfiltered (_("Process record does not support instruction "
12906 "0x%0x at address %s.\n"),
12907 thumb_insn_r->arm_insn,
12908 paddress (thumb_insn_r->gdbarch,
12909 thumb_insn_r->this_addr));
12910 return -1;
12911 }
12912 else if ((0 == opcode) || (1 == opcode))
12913 {
12914 /* ADD(5), ADD(6). */
12915 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12916 record_buf[0] = reg_src1;
12917 thumb_insn_r->reg_rec_count = 1;
12918 }
12919 else if (2 == opcode)
12920 {
12921 /* ADD(7), SUB(4). */
12922 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12923 record_buf[0] = ARM_SP_REGNUM;
12924 thumb_insn_r->reg_rec_count = 1;
12925 }
12926
12927 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
12928 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
12929 record_buf_mem);
12930
12931 return 0;
12932 }
12933
12934 /* Handling opcode 110 insns. */
12935
12936 static int
12937 thumb_record_ldm_stm_swi (insn_decode_record *thumb_insn_r)
12938 {
12939 struct gdbarch_tdep *tdep = gdbarch_tdep (thumb_insn_r->gdbarch);
12940 struct regcache *reg_cache = thumb_insn_r->regcache;
12941
12942 uint32_t ret = 0; /* function return value: -1:record failure ; 0:success */
12943 uint32_t reg_src1 = 0;
12944 uint32_t opcode1 = 0, opcode2 = 0, register_bits = 0, register_count = 0;
12945 uint32_t register_list[8] = {0}, index = 0, start_address = 0;
12946 uint32_t record_buf[24], record_buf_mem[48];
12947
12948 ULONGEST u_regval = 0;
12949
12950 opcode1 = bits (thumb_insn_r->arm_insn, 8, 12);
12951 opcode2 = bits (thumb_insn_r->arm_insn, 11, 12);
12952
12953 if (1 == opcode2)
12954 {
12955
12956 /* LDMIA. */
12957 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12958 /* Get Rn. */
12959 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12960 while (register_bits)
12961 {
12962 if (register_bits & 0x00000001)
12963 record_buf[index++] = register_count;
12964 register_bits = register_bits >> 1;
12965 register_count++;
12966 }
12967 record_buf[index++] = reg_src1;
12968 thumb_insn_r->reg_rec_count = index;
12969 }
12970 else if (0 == opcode2)
12971 {
12972 /* It handles both STMIA. */
12973 register_bits = bits (thumb_insn_r->arm_insn, 0, 7);
12974 /* Get Rn. */
12975 reg_src1 = bits (thumb_insn_r->arm_insn, 8, 10);
12976 regcache_raw_read_unsigned (reg_cache, reg_src1, &u_regval);
12977 while (register_bits)
12978 {
12979 if (register_bits & 0x00000001)
12980 register_count++;
12981 register_bits = register_bits >> 1;
12982 }
12983 start_address = u_regval;
12984 thumb_insn_r->mem_rec_count = register_count;
12985 while (register_count)
12986 {
12987 record_buf_mem[(register_count * 2) - 1] = start_address;
12988 record_buf_mem[(register_count * 2) - 2] = 4;
12989 start_address = start_address + 4;
12990 register_count--;
12991 }
12992 }
12993 else if (0x1F == opcode1)
12994 {
12995 /* Handle arm syscall insn. */
12996 if (tdep->arm_syscall_record != NULL)
12997 {
12998 regcache_raw_read_unsigned (reg_cache, 7, &u_regval);
12999 ret = tdep->arm_syscall_record (reg_cache, u_regval);
13000 }
13001 else
13002 {
13003 printf_unfiltered (_("no syscall record support\n"));
13004 return -1;
13005 }
13006 }
13007
13008 /* B (1), conditional branch is automatically taken care in process_record,
13009 as PC is saved there. */
13010
13011 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13012 MEM_ALLOC (thumb_insn_r->arm_mems, thumb_insn_r->mem_rec_count,
13013 record_buf_mem);
13014
13015 return ret;
13016 }
13017
13018 /* Handling opcode 111 insns. */
13019
13020 static int
13021 thumb_record_branch (insn_decode_record *thumb_insn_r)
13022 {
13023 uint32_t record_buf[8];
13024 uint32_t bits_h = 0;
13025
13026 bits_h = bits (thumb_insn_r->arm_insn, 11, 12);
13027
13028 if (2 == bits_h || 3 == bits_h)
13029 {
13030 /* BL */
13031 record_buf[0] = ARM_LR_REGNUM;
13032 thumb_insn_r->reg_rec_count = 1;
13033 }
13034 else if (1 == bits_h)
13035 {
13036 /* BLX(1). */
13037 record_buf[0] = ARM_PS_REGNUM;
13038 record_buf[1] = ARM_LR_REGNUM;
13039 thumb_insn_r->reg_rec_count = 2;
13040 }
13041
13042 /* B(2) is automatically taken care in process_record, as PC is
13043 saved there. */
13044
13045 REG_ALLOC (thumb_insn_r->arm_regs, thumb_insn_r->reg_rec_count, record_buf);
13046
13047 return 0;
13048 }
13049
13050 /* Handler for thumb2 load/store multiple instructions. */
13051
13052 static int
13053 thumb2_record_ld_st_multiple (insn_decode_record *thumb2_insn_r)
13054 {
13055 struct regcache *reg_cache = thumb2_insn_r->regcache;
13056
13057 uint32_t reg_rn, op;
13058 uint32_t register_bits = 0, register_count = 0;
13059 uint32_t index = 0, start_address = 0;
13060 uint32_t record_buf[24], record_buf_mem[48];
13061
13062 ULONGEST u_regval = 0;
13063
13064 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13065 op = bits (thumb2_insn_r->arm_insn, 23, 24);
13066
13067 if (0 == op || 3 == op)
13068 {
13069 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13070 {
13071 /* Handle RFE instruction. */
13072 record_buf[0] = ARM_PS_REGNUM;
13073 thumb2_insn_r->reg_rec_count = 1;
13074 }
13075 else
13076 {
13077 /* Handle SRS instruction after reading banked SP. */
13078 return arm_record_unsupported_insn (thumb2_insn_r);
13079 }
13080 }
13081 else if (1 == op || 2 == op)
13082 {
13083 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13084 {
13085 /* Handle LDM/LDMIA/LDMFD and LDMDB/LDMEA instructions. */
13086 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13087 while (register_bits)
13088 {
13089 if (register_bits & 0x00000001)
13090 record_buf[index++] = register_count;
13091
13092 register_count++;
13093 register_bits = register_bits >> 1;
13094 }
13095 record_buf[index++] = reg_rn;
13096 record_buf[index++] = ARM_PS_REGNUM;
13097 thumb2_insn_r->reg_rec_count = index;
13098 }
13099 else
13100 {
13101 /* Handle STM/STMIA/STMEA and STMDB/STMFD. */
13102 register_bits = bits (thumb2_insn_r->arm_insn, 0, 15);
13103 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13104 while (register_bits)
13105 {
13106 if (register_bits & 0x00000001)
13107 register_count++;
13108
13109 register_bits = register_bits >> 1;
13110 }
13111
13112 if (1 == op)
13113 {
13114 /* Start address calculation for LDMDB/LDMEA. */
13115 start_address = u_regval;
13116 }
13117 else if (2 == op)
13118 {
13119 /* Start address calculation for LDMDB/LDMEA. */
13120 start_address = u_regval - register_count * 4;
13121 }
13122
13123 thumb2_insn_r->mem_rec_count = register_count;
13124 while (register_count)
13125 {
13126 record_buf_mem[register_count * 2 - 1] = start_address;
13127 record_buf_mem[register_count * 2 - 2] = 4;
13128 start_address = start_address + 4;
13129 register_count--;
13130 }
13131 record_buf[0] = reg_rn;
13132 record_buf[1] = ARM_PS_REGNUM;
13133 thumb2_insn_r->reg_rec_count = 2;
13134 }
13135 }
13136
13137 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13138 record_buf_mem);
13139 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13140 record_buf);
13141 return ARM_RECORD_SUCCESS;
13142 }
13143
13144 /* Handler for thumb2 load/store (dual/exclusive) and table branch
13145 instructions. */
13146
13147 static int
13148 thumb2_record_ld_st_dual_ex_tbb (insn_decode_record *thumb2_insn_r)
13149 {
13150 struct regcache *reg_cache = thumb2_insn_r->regcache;
13151
13152 uint32_t reg_rd, reg_rn, offset_imm;
13153 uint32_t reg_dest1, reg_dest2;
13154 uint32_t address, offset_addr;
13155 uint32_t record_buf[8], record_buf_mem[8];
13156 uint32_t op1, op2, op3;
13157 LONGEST s_word;
13158
13159 ULONGEST u_regval[2];
13160
13161 op1 = bits (thumb2_insn_r->arm_insn, 23, 24);
13162 op2 = bits (thumb2_insn_r->arm_insn, 20, 21);
13163 op3 = bits (thumb2_insn_r->arm_insn, 4, 7);
13164
13165 if (bit (thumb2_insn_r->arm_insn, INSN_S_L_BIT_NUM))
13166 {
13167 if(!(1 == op1 && 1 == op2 && (0 == op3 || 1 == op3)))
13168 {
13169 reg_dest1 = bits (thumb2_insn_r->arm_insn, 12, 15);
13170 record_buf[0] = reg_dest1;
13171 record_buf[1] = ARM_PS_REGNUM;
13172 thumb2_insn_r->reg_rec_count = 2;
13173 }
13174
13175 if (3 == op2 || (op1 & 2) || (1 == op1 && 1 == op2 && 7 == op3))
13176 {
13177 reg_dest2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13178 record_buf[2] = reg_dest2;
13179 thumb2_insn_r->reg_rec_count = 3;
13180 }
13181 }
13182 else
13183 {
13184 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13185 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13186
13187 if (0 == op1 && 0 == op2)
13188 {
13189 /* Handle STREX. */
13190 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13191 address = u_regval[0] + (offset_imm * 4);
13192 record_buf_mem[0] = 4;
13193 record_buf_mem[1] = address;
13194 thumb2_insn_r->mem_rec_count = 1;
13195 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13196 record_buf[0] = reg_rd;
13197 thumb2_insn_r->reg_rec_count = 1;
13198 }
13199 else if (1 == op1 && 0 == op2)
13200 {
13201 reg_rd = bits (thumb2_insn_r->arm_insn, 0, 3);
13202 record_buf[0] = reg_rd;
13203 thumb2_insn_r->reg_rec_count = 1;
13204 address = u_regval[0];
13205 record_buf_mem[1] = address;
13206
13207 if (4 == op3)
13208 {
13209 /* Handle STREXB. */
13210 record_buf_mem[0] = 1;
13211 thumb2_insn_r->mem_rec_count = 1;
13212 }
13213 else if (5 == op3)
13214 {
13215 /* Handle STREXH. */
13216 record_buf_mem[0] = 2 ;
13217 thumb2_insn_r->mem_rec_count = 1;
13218 }
13219 else if (7 == op3)
13220 {
13221 /* Handle STREXD. */
13222 address = u_regval[0];
13223 record_buf_mem[0] = 4;
13224 record_buf_mem[2] = 4;
13225 record_buf_mem[3] = address + 4;
13226 thumb2_insn_r->mem_rec_count = 2;
13227 }
13228 }
13229 else
13230 {
13231 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13232
13233 if (bit (thumb2_insn_r->arm_insn, 24))
13234 {
13235 if (bit (thumb2_insn_r->arm_insn, 23))
13236 offset_addr = u_regval[0] + (offset_imm * 4);
13237 else
13238 offset_addr = u_regval[0] - (offset_imm * 4);
13239
13240 address = offset_addr;
13241 }
13242 else
13243 address = u_regval[0];
13244
13245 record_buf_mem[0] = 4;
13246 record_buf_mem[1] = address;
13247 record_buf_mem[2] = 4;
13248 record_buf_mem[3] = address + 4;
13249 thumb2_insn_r->mem_rec_count = 2;
13250 record_buf[0] = reg_rn;
13251 thumb2_insn_r->reg_rec_count = 1;
13252 }
13253 }
13254
13255 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13256 record_buf);
13257 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13258 record_buf_mem);
13259 return ARM_RECORD_SUCCESS;
13260 }
13261
13262 /* Handler for thumb2 data processing (shift register and modified immediate)
13263 instructions. */
13264
13265 static int
13266 thumb2_record_data_proc_sreg_mimm (insn_decode_record *thumb2_insn_r)
13267 {
13268 uint32_t reg_rd, op;
13269 uint32_t record_buf[8];
13270
13271 op = bits (thumb2_insn_r->arm_insn, 21, 24);
13272 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13273
13274 if ((0 == op || 4 == op || 8 == op || 13 == op) && 15 == reg_rd)
13275 {
13276 record_buf[0] = ARM_PS_REGNUM;
13277 thumb2_insn_r->reg_rec_count = 1;
13278 }
13279 else
13280 {
13281 record_buf[0] = reg_rd;
13282 record_buf[1] = ARM_PS_REGNUM;
13283 thumb2_insn_r->reg_rec_count = 2;
13284 }
13285
13286 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13287 record_buf);
13288 return ARM_RECORD_SUCCESS;
13289 }
13290
13291 /* Generic handler for thumb2 instructions which effect destination and PS
13292 registers. */
13293
13294 static int
13295 thumb2_record_ps_dest_generic (insn_decode_record *thumb2_insn_r)
13296 {
13297 uint32_t reg_rd;
13298 uint32_t record_buf[8];
13299
13300 reg_rd = bits (thumb2_insn_r->arm_insn, 8, 11);
13301
13302 record_buf[0] = reg_rd;
13303 record_buf[1] = ARM_PS_REGNUM;
13304 thumb2_insn_r->reg_rec_count = 2;
13305
13306 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13307 record_buf);
13308 return ARM_RECORD_SUCCESS;
13309 }
13310
13311 /* Handler for thumb2 branch and miscellaneous control instructions. */
13312
13313 static int
13314 thumb2_record_branch_misc_cntrl (insn_decode_record *thumb2_insn_r)
13315 {
13316 uint32_t op, op1, op2;
13317 uint32_t record_buf[8];
13318
13319 op = bits (thumb2_insn_r->arm_insn, 20, 26);
13320 op1 = bits (thumb2_insn_r->arm_insn, 12, 14);
13321 op2 = bits (thumb2_insn_r->arm_insn, 8, 11);
13322
13323 /* Handle MSR insn. */
13324 if (!(op1 & 0x2) && 0x38 == op)
13325 {
13326 if (!(op2 & 0x3))
13327 {
13328 /* CPSR is going to be changed. */
13329 record_buf[0] = ARM_PS_REGNUM;
13330 thumb2_insn_r->reg_rec_count = 1;
13331 }
13332 else
13333 {
13334 arm_record_unsupported_insn(thumb2_insn_r);
13335 return -1;
13336 }
13337 }
13338 else if (4 == (op1 & 0x5) || 5 == (op1 & 0x5))
13339 {
13340 /* BLX. */
13341 record_buf[0] = ARM_PS_REGNUM;
13342 record_buf[1] = ARM_LR_REGNUM;
13343 thumb2_insn_r->reg_rec_count = 2;
13344 }
13345
13346 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13347 record_buf);
13348 return ARM_RECORD_SUCCESS;
13349 }
13350
13351 /* Handler for thumb2 store single data item instructions. */
13352
13353 static int
13354 thumb2_record_str_single_data (insn_decode_record *thumb2_insn_r)
13355 {
13356 struct regcache *reg_cache = thumb2_insn_r->regcache;
13357
13358 uint32_t reg_rn, reg_rm, offset_imm, shift_imm;
13359 uint32_t address, offset_addr;
13360 uint32_t record_buf[8], record_buf_mem[8];
13361 uint32_t op1, op2;
13362
13363 ULONGEST u_regval[2];
13364
13365 op1 = bits (thumb2_insn_r->arm_insn, 21, 23);
13366 op2 = bits (thumb2_insn_r->arm_insn, 6, 11);
13367 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13368 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval[0]);
13369
13370 if (bit (thumb2_insn_r->arm_insn, 23))
13371 {
13372 /* T2 encoding. */
13373 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 11);
13374 offset_addr = u_regval[0] + offset_imm;
13375 address = offset_addr;
13376 }
13377 else
13378 {
13379 /* T3 encoding. */
13380 if ((0 == op1 || 1 == op1 || 2 == op1) && !(op2 & 0x20))
13381 {
13382 /* Handle STRB (register). */
13383 reg_rm = bits (thumb2_insn_r->arm_insn, 0, 3);
13384 regcache_raw_read_unsigned (reg_cache, reg_rm, &u_regval[1]);
13385 shift_imm = bits (thumb2_insn_r->arm_insn, 4, 5);
13386 offset_addr = u_regval[1] << shift_imm;
13387 address = u_regval[0] + offset_addr;
13388 }
13389 else
13390 {
13391 offset_imm = bits (thumb2_insn_r->arm_insn, 0, 7);
13392 if (bit (thumb2_insn_r->arm_insn, 10))
13393 {
13394 if (bit (thumb2_insn_r->arm_insn, 9))
13395 offset_addr = u_regval[0] + offset_imm;
13396 else
13397 offset_addr = u_regval[0] - offset_imm;
13398
13399 address = offset_addr;
13400 }
13401 else
13402 address = u_regval[0];
13403 }
13404 }
13405
13406 switch (op1)
13407 {
13408 /* Store byte instructions. */
13409 case 4:
13410 case 0:
13411 record_buf_mem[0] = 1;
13412 break;
13413 /* Store half word instructions. */
13414 case 1:
13415 case 5:
13416 record_buf_mem[0] = 2;
13417 break;
13418 /* Store word instructions. */
13419 case 2:
13420 case 6:
13421 record_buf_mem[0] = 4;
13422 break;
13423
13424 default:
13425 gdb_assert_not_reached ("no decoding pattern found");
13426 break;
13427 }
13428
13429 record_buf_mem[1] = address;
13430 thumb2_insn_r->mem_rec_count = 1;
13431 record_buf[0] = reg_rn;
13432 thumb2_insn_r->reg_rec_count = 1;
13433
13434 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13435 record_buf);
13436 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13437 record_buf_mem);
13438 return ARM_RECORD_SUCCESS;
13439 }
13440
13441 /* Handler for thumb2 load memory hints instructions. */
13442
13443 static int
13444 thumb2_record_ld_mem_hints (insn_decode_record *thumb2_insn_r)
13445 {
13446 uint32_t record_buf[8];
13447 uint32_t reg_rt, reg_rn;
13448
13449 reg_rt = bits (thumb2_insn_r->arm_insn, 12, 15);
13450 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13451
13452 if (ARM_PC_REGNUM != reg_rt)
13453 {
13454 record_buf[0] = reg_rt;
13455 record_buf[1] = reg_rn;
13456 record_buf[2] = ARM_PS_REGNUM;
13457 thumb2_insn_r->reg_rec_count = 3;
13458
13459 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13460 record_buf);
13461 return ARM_RECORD_SUCCESS;
13462 }
13463
13464 return ARM_RECORD_FAILURE;
13465 }
13466
13467 /* Handler for thumb2 load word instructions. */
13468
13469 static int
13470 thumb2_record_ld_word (insn_decode_record *thumb2_insn_r)
13471 {
13472 uint32_t opcode1 = 0, opcode2 = 0;
13473 uint32_t record_buf[8];
13474
13475 record_buf[0] = bits (thumb2_insn_r->arm_insn, 12, 15);
13476 record_buf[1] = ARM_PS_REGNUM;
13477 thumb2_insn_r->reg_rec_count = 2;
13478
13479 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13480 record_buf);
13481 return ARM_RECORD_SUCCESS;
13482 }
13483
13484 /* Handler for thumb2 long multiply, long multiply accumulate, and
13485 divide instructions. */
13486
13487 static int
13488 thumb2_record_lmul_lmla_div (insn_decode_record *thumb2_insn_r)
13489 {
13490 uint32_t opcode1 = 0, opcode2 = 0;
13491 uint32_t record_buf[8];
13492 uint32_t reg_src1 = 0;
13493
13494 opcode1 = bits (thumb2_insn_r->arm_insn, 20, 22);
13495 opcode2 = bits (thumb2_insn_r->arm_insn, 4, 7);
13496
13497 if (0 == opcode1 || 2 == opcode1 || (opcode1 >= 4 && opcode1 <= 6))
13498 {
13499 /* Handle SMULL, UMULL, SMULAL. */
13500 /* Handle SMLAL(S), SMULL(S), UMLAL(S), UMULL(S). */
13501 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13502 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13503 record_buf[2] = ARM_PS_REGNUM;
13504 thumb2_insn_r->reg_rec_count = 3;
13505 }
13506 else if (1 == opcode1 || 3 == opcode2)
13507 {
13508 /* Handle SDIV and UDIV. */
13509 record_buf[0] = bits (thumb2_insn_r->arm_insn, 16, 19);
13510 record_buf[1] = bits (thumb2_insn_r->arm_insn, 12, 15);
13511 record_buf[2] = ARM_PS_REGNUM;
13512 thumb2_insn_r->reg_rec_count = 3;
13513 }
13514 else
13515 return ARM_RECORD_FAILURE;
13516
13517 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13518 record_buf);
13519 return ARM_RECORD_SUCCESS;
13520 }
13521
13522 /* Record handler for thumb32 coprocessor instructions. */
13523
13524 static int
13525 thumb2_record_coproc_insn (insn_decode_record *thumb2_insn_r)
13526 {
13527 if (bit (thumb2_insn_r->arm_insn, 25))
13528 return arm_record_coproc_data_proc (thumb2_insn_r);
13529 else
13530 return arm_record_asimd_vfp_coproc (thumb2_insn_r);
13531 }
13532
13533 /* Record handler for advance SIMD structure load/store instructions. */
13534
13535 static int
13536 thumb2_record_asimd_struct_ld_st (insn_decode_record *thumb2_insn_r)
13537 {
13538 struct regcache *reg_cache = thumb2_insn_r->regcache;
13539 uint32_t l_bit, a_bit, b_bits;
13540 uint32_t record_buf[128], record_buf_mem[128];
13541 uint32_t reg_rn, reg_vd, address, f_esize, f_elem;
13542 uint32_t index_r = 0, index_e = 0, bf_regs = 0, index_m = 0, loop_t = 0;
13543 uint8_t f_ebytes;
13544
13545 l_bit = bit (thumb2_insn_r->arm_insn, 21);
13546 a_bit = bit (thumb2_insn_r->arm_insn, 23);
13547 b_bits = bits (thumb2_insn_r->arm_insn, 8, 11);
13548 reg_rn = bits (thumb2_insn_r->arm_insn, 16, 19);
13549 reg_vd = bits (thumb2_insn_r->arm_insn, 12, 15);
13550 reg_vd = (bit (thumb2_insn_r->arm_insn, 22) << 4) | reg_vd;
13551 f_ebytes = (1 << bits (thumb2_insn_r->arm_insn, 6, 7));
13552 f_esize = 8 * f_ebytes;
13553 f_elem = 8 / f_ebytes;
13554
13555 if (!l_bit)
13556 {
13557 ULONGEST u_regval = 0;
13558 regcache_raw_read_unsigned (reg_cache, reg_rn, &u_regval);
13559 address = u_regval;
13560
13561 if (!a_bit)
13562 {
13563 /* Handle VST1. */
13564 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13565 {
13566 if (b_bits == 0x07)
13567 bf_regs = 1;
13568 else if (b_bits == 0x0a)
13569 bf_regs = 2;
13570 else if (b_bits == 0x06)
13571 bf_regs = 3;
13572 else if (b_bits == 0x02)
13573 bf_regs = 4;
13574 else
13575 bf_regs = 0;
13576
13577 for (index_r = 0; index_r < bf_regs; index_r++)
13578 {
13579 for (index_e = 0; index_e < f_elem; index_e++)
13580 {
13581 record_buf_mem[index_m++] = f_ebytes;
13582 record_buf_mem[index_m++] = address;
13583 address = address + f_ebytes;
13584 thumb2_insn_r->mem_rec_count += 1;
13585 }
13586 }
13587 }
13588 /* Handle VST2. */
13589 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13590 {
13591 if (b_bits == 0x09 || b_bits == 0x08)
13592 bf_regs = 1;
13593 else if (b_bits == 0x03)
13594 bf_regs = 2;
13595 else
13596 bf_regs = 0;
13597
13598 for (index_r = 0; index_r < bf_regs; index_r++)
13599 for (index_e = 0; index_e < f_elem; index_e++)
13600 {
13601 for (loop_t = 0; loop_t < 2; loop_t++)
13602 {
13603 record_buf_mem[index_m++] = f_ebytes;
13604 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13605 thumb2_insn_r->mem_rec_count += 1;
13606 }
13607 address = address + (2 * f_ebytes);
13608 }
13609 }
13610 /* Handle VST3. */
13611 else if ((b_bits & 0x0e) == 0x04)
13612 {
13613 for (index_e = 0; index_e < f_elem; index_e++)
13614 {
13615 for (loop_t = 0; loop_t < 3; loop_t++)
13616 {
13617 record_buf_mem[index_m++] = f_ebytes;
13618 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13619 thumb2_insn_r->mem_rec_count += 1;
13620 }
13621 address = address + (3 * f_ebytes);
13622 }
13623 }
13624 /* Handle VST4. */
13625 else if (!(b_bits & 0x0e))
13626 {
13627 for (index_e = 0; index_e < f_elem; index_e++)
13628 {
13629 for (loop_t = 0; loop_t < 4; loop_t++)
13630 {
13631 record_buf_mem[index_m++] = f_ebytes;
13632 record_buf_mem[index_m++] = address + (loop_t * f_ebytes);
13633 thumb2_insn_r->mem_rec_count += 1;
13634 }
13635 address = address + (4 * f_ebytes);
13636 }
13637 }
13638 }
13639 else
13640 {
13641 uint8_t bft_size = bits (thumb2_insn_r->arm_insn, 10, 11);
13642
13643 if (bft_size == 0x00)
13644 f_ebytes = 1;
13645 else if (bft_size == 0x01)
13646 f_ebytes = 2;
13647 else if (bft_size == 0x02)
13648 f_ebytes = 4;
13649 else
13650 f_ebytes = 0;
13651
13652 /* Handle VST1. */
13653 if (!(b_bits & 0x0b) || b_bits == 0x08)
13654 thumb2_insn_r->mem_rec_count = 1;
13655 /* Handle VST2. */
13656 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09)
13657 thumb2_insn_r->mem_rec_count = 2;
13658 /* Handle VST3. */
13659 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a)
13660 thumb2_insn_r->mem_rec_count = 3;
13661 /* Handle VST4. */
13662 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b)
13663 thumb2_insn_r->mem_rec_count = 4;
13664
13665 for (index_m = 0; index_m < thumb2_insn_r->mem_rec_count; index_m++)
13666 {
13667 record_buf_mem[index_m] = f_ebytes;
13668 record_buf_mem[index_m] = address + (index_m * f_ebytes);
13669 }
13670 }
13671 }
13672 else
13673 {
13674 if (!a_bit)
13675 {
13676 /* Handle VLD1. */
13677 if (b_bits == 0x02 || b_bits == 0x0a || (b_bits & 0x0e) == 0x06)
13678 thumb2_insn_r->reg_rec_count = 1;
13679 /* Handle VLD2. */
13680 else if (b_bits == 0x03 || (b_bits & 0x0e) == 0x08)
13681 thumb2_insn_r->reg_rec_count = 2;
13682 /* Handle VLD3. */
13683 else if ((b_bits & 0x0e) == 0x04)
13684 thumb2_insn_r->reg_rec_count = 3;
13685 /* Handle VLD4. */
13686 else if (!(b_bits & 0x0e))
13687 thumb2_insn_r->reg_rec_count = 4;
13688 }
13689 else
13690 {
13691 /* Handle VLD1. */
13692 if (!(b_bits & 0x0b) || b_bits == 0x08 || b_bits == 0x0c)
13693 thumb2_insn_r->reg_rec_count = 1;
13694 /* Handle VLD2. */
13695 else if ((b_bits & 0x0b) == 0x01 || b_bits == 0x09 || b_bits == 0x0d)
13696 thumb2_insn_r->reg_rec_count = 2;
13697 /* Handle VLD3. */
13698 else if ((b_bits & 0x0b) == 0x02 || b_bits == 0x0a || b_bits == 0x0e)
13699 thumb2_insn_r->reg_rec_count = 3;
13700 /* Handle VLD4. */
13701 else if ((b_bits & 0x0b) == 0x03 || b_bits == 0x0b || b_bits == 0x0f)
13702 thumb2_insn_r->reg_rec_count = 4;
13703
13704 for (index_r = 0; index_r < thumb2_insn_r->reg_rec_count; index_r++)
13705 record_buf[index_r] = reg_vd + ARM_D0_REGNUM + index_r;
13706 }
13707 }
13708
13709 if (bits (thumb2_insn_r->arm_insn, 0, 3) != 15)
13710 {
13711 record_buf[index_r] = reg_rn;
13712 thumb2_insn_r->reg_rec_count += 1;
13713 }
13714
13715 REG_ALLOC (thumb2_insn_r->arm_regs, thumb2_insn_r->reg_rec_count,
13716 record_buf);
13717 MEM_ALLOC (thumb2_insn_r->arm_mems, thumb2_insn_r->mem_rec_count,
13718 record_buf_mem);
13719 return 0;
13720 }
13721
13722 /* Decodes thumb2 instruction type and invokes its record handler. */
13723
13724 static unsigned int
13725 thumb2_record_decode_insn_handler (insn_decode_record *thumb2_insn_r)
13726 {
13727 uint32_t op, op1, op2;
13728
13729 op = bit (thumb2_insn_r->arm_insn, 15);
13730 op1 = bits (thumb2_insn_r->arm_insn, 27, 28);
13731 op2 = bits (thumb2_insn_r->arm_insn, 20, 26);
13732
13733 if (op1 == 0x01)
13734 {
13735 if (!(op2 & 0x64 ))
13736 {
13737 /* Load/store multiple instruction. */
13738 return thumb2_record_ld_st_multiple (thumb2_insn_r);
13739 }
13740 else if (!((op2 & 0x64) ^ 0x04))
13741 {
13742 /* Load/store (dual/exclusive) and table branch instruction. */
13743 return thumb2_record_ld_st_dual_ex_tbb (thumb2_insn_r);
13744 }
13745 else if (!((op2 & 0x20) ^ 0x20))
13746 {
13747 /* Data-processing (shifted register). */
13748 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13749 }
13750 else if (op2 & 0x40)
13751 {
13752 /* Co-processor instructions. */
13753 return thumb2_record_coproc_insn (thumb2_insn_r);
13754 }
13755 }
13756 else if (op1 == 0x02)
13757 {
13758 if (op)
13759 {
13760 /* Branches and miscellaneous control instructions. */
13761 return thumb2_record_branch_misc_cntrl (thumb2_insn_r);
13762 }
13763 else if (op2 & 0x20)
13764 {
13765 /* Data-processing (plain binary immediate) instruction. */
13766 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13767 }
13768 else
13769 {
13770 /* Data-processing (modified immediate). */
13771 return thumb2_record_data_proc_sreg_mimm (thumb2_insn_r);
13772 }
13773 }
13774 else if (op1 == 0x03)
13775 {
13776 if (!(op2 & 0x71 ))
13777 {
13778 /* Store single data item. */
13779 return thumb2_record_str_single_data (thumb2_insn_r);
13780 }
13781 else if (!((op2 & 0x71) ^ 0x10))
13782 {
13783 /* Advanced SIMD or structure load/store instructions. */
13784 return thumb2_record_asimd_struct_ld_st (thumb2_insn_r);
13785 }
13786 else if (!((op2 & 0x67) ^ 0x01))
13787 {
13788 /* Load byte, memory hints instruction. */
13789 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13790 }
13791 else if (!((op2 & 0x67) ^ 0x03))
13792 {
13793 /* Load halfword, memory hints instruction. */
13794 return thumb2_record_ld_mem_hints (thumb2_insn_r);
13795 }
13796 else if (!((op2 & 0x67) ^ 0x05))
13797 {
13798 /* Load word instruction. */
13799 return thumb2_record_ld_word (thumb2_insn_r);
13800 }
13801 else if (!((op2 & 0x70) ^ 0x20))
13802 {
13803 /* Data-processing (register) instruction. */
13804 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13805 }
13806 else if (!((op2 & 0x78) ^ 0x30))
13807 {
13808 /* Multiply, multiply accumulate, abs diff instruction. */
13809 return thumb2_record_ps_dest_generic (thumb2_insn_r);
13810 }
13811 else if (!((op2 & 0x78) ^ 0x38))
13812 {
13813 /* Long multiply, long multiply accumulate, and divide. */
13814 return thumb2_record_lmul_lmla_div (thumb2_insn_r);
13815 }
13816 else if (op2 & 0x40)
13817 {
13818 /* Co-processor instructions. */
13819 return thumb2_record_coproc_insn (thumb2_insn_r);
13820 }
13821 }
13822
13823 return -1;
13824 }
13825
13826 /* Extracts arm/thumb/thumb2 insn depending on the size, and returns 0 on success
13827 and positive val on fauilure. */
13828
13829 static int
13830 extract_arm_insn (insn_decode_record *insn_record, uint32_t insn_size)
13831 {
13832 gdb_byte buf[insn_size];
13833
13834 memset (&buf[0], 0, insn_size);
13835
13836 if (target_read_memory (insn_record->this_addr, &buf[0], insn_size))
13837 return 1;
13838 insn_record->arm_insn = (uint32_t) extract_unsigned_integer (&buf[0],
13839 insn_size,
13840 gdbarch_byte_order_for_code (insn_record->gdbarch));
13841 return 0;
13842 }
13843
13844 typedef int (*sti_arm_hdl_fp_t) (insn_decode_record*);
13845
13846 /* Decode arm/thumb insn depending on condition cods and opcodes; and
13847 dispatch it. */
13848
13849 static int
13850 decode_insn (insn_decode_record *arm_record, record_type_t record_type,
13851 uint32_t insn_size)
13852 {
13853
13854 /* (Starting from numerical 0); bits 25, 26, 27 decodes type of arm instruction. */
13855 static const sti_arm_hdl_fp_t arm_handle_insn[8] =
13856 {
13857 arm_record_data_proc_misc_ld_str, /* 000. */
13858 arm_record_data_proc_imm, /* 001. */
13859 arm_record_ld_st_imm_offset, /* 010. */
13860 arm_record_ld_st_reg_offset, /* 011. */
13861 arm_record_ld_st_multiple, /* 100. */
13862 arm_record_b_bl, /* 101. */
13863 arm_record_asimd_vfp_coproc, /* 110. */
13864 arm_record_coproc_data_proc /* 111. */
13865 };
13866
13867 /* (Starting from numerical 0); bits 13,14,15 decodes type of thumb instruction. */
13868 static const sti_arm_hdl_fp_t thumb_handle_insn[8] =
13869 { \
13870 thumb_record_shift_add_sub, /* 000. */
13871 thumb_record_add_sub_cmp_mov, /* 001. */
13872 thumb_record_ld_st_reg_offset, /* 010. */
13873 thumb_record_ld_st_imm_offset, /* 011. */
13874 thumb_record_ld_st_stack, /* 100. */
13875 thumb_record_misc, /* 101. */
13876 thumb_record_ldm_stm_swi, /* 110. */
13877 thumb_record_branch /* 111. */
13878 };
13879
13880 uint32_t ret = 0; /* return value: negative:failure 0:success. */
13881 uint32_t insn_id = 0;
13882
13883 if (extract_arm_insn (arm_record, insn_size))
13884 {
13885 if (record_debug)
13886 {
13887 printf_unfiltered (_("Process record: error reading memory at "
13888 "addr %s len = %d.\n"),
13889 paddress (arm_record->gdbarch, arm_record->this_addr), insn_size);
13890 }
13891 return -1;
13892 }
13893 else if (ARM_RECORD == record_type)
13894 {
13895 arm_record->cond = bits (arm_record->arm_insn, 28, 31);
13896 insn_id = bits (arm_record->arm_insn, 25, 27);
13897 ret = arm_record_extension_space (arm_record);
13898 /* If this insn has fallen into extension space
13899 then we need not decode it anymore. */
13900 if (ret != -1 && !INSN_RECORDED(arm_record))
13901 {
13902 ret = arm_handle_insn[insn_id] (arm_record);
13903 }
13904 }
13905 else if (THUMB_RECORD == record_type)
13906 {
13907 /* As thumb does not have condition codes, we set negative. */
13908 arm_record->cond = -1;
13909 insn_id = bits (arm_record->arm_insn, 13, 15);
13910 ret = thumb_handle_insn[insn_id] (arm_record);
13911 }
13912 else if (THUMB2_RECORD == record_type)
13913 {
13914 /* As thumb does not have condition codes, we set negative. */
13915 arm_record->cond = -1;
13916
13917 /* Swap first half of 32bit thumb instruction with second half. */
13918 arm_record->arm_insn
13919 = (arm_record->arm_insn >> 16) | (arm_record->arm_insn << 16);
13920
13921 insn_id = thumb2_record_decode_insn_handler (arm_record);
13922
13923 if (insn_id != ARM_RECORD_SUCCESS)
13924 {
13925 arm_record_unsupported_insn (arm_record);
13926 ret = -1;
13927 }
13928 }
13929 else
13930 {
13931 /* Throw assertion. */
13932 gdb_assert_not_reached ("not a valid instruction, could not decode");
13933 }
13934
13935 return ret;
13936 }
13937
13938
13939 /* Cleans up local record registers and memory allocations. */
13940
13941 static void
13942 deallocate_reg_mem (insn_decode_record *record)
13943 {
13944 xfree (record->arm_regs);
13945 xfree (record->arm_mems);
13946 }
13947
13948
13949 /* Parse the current instruction and record the values of the registers and
13950 memory that will be changed in current instruction to record_arch_list".
13951 Return -1 if something is wrong. */
13952
13953 int
13954 arm_process_record (struct gdbarch *gdbarch, struct regcache *regcache,
13955 CORE_ADDR insn_addr)
13956 {
13957
13958 enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
13959 uint32_t no_of_rec = 0;
13960 uint32_t ret = 0; /* return value: -1:record failure ; 0:success */
13961 ULONGEST t_bit = 0, insn_id = 0;
13962
13963 ULONGEST u_regval = 0;
13964
13965 insn_decode_record arm_record;
13966
13967 memset (&arm_record, 0, sizeof (insn_decode_record));
13968 arm_record.regcache = regcache;
13969 arm_record.this_addr = insn_addr;
13970 arm_record.gdbarch = gdbarch;
13971
13972
13973 if (record_debug > 1)
13974 {
13975 fprintf_unfiltered (gdb_stdlog, "Process record: arm_process_record "
13976 "addr = %s\n",
13977 paddress (gdbarch, arm_record.this_addr));
13978 }
13979
13980 if (extract_arm_insn (&arm_record, 2))
13981 {
13982 if (record_debug)
13983 {
13984 printf_unfiltered (_("Process record: error reading memory at "
13985 "addr %s len = %d.\n"),
13986 paddress (arm_record.gdbarch,
13987 arm_record.this_addr), 2);
13988 }
13989 return -1;
13990 }
13991
13992 /* Check the insn, whether it is thumb or arm one. */
13993
13994 t_bit = arm_psr_thumb_bit (arm_record.gdbarch);
13995 regcache_raw_read_unsigned (arm_record.regcache, ARM_PS_REGNUM, &u_regval);
13996
13997
13998 if (!(u_regval & t_bit))
13999 {
14000 /* We are decoding arm insn. */
14001 ret = decode_insn (&arm_record, ARM_RECORD, ARM_INSN_SIZE_BYTES);
14002 }
14003 else
14004 {
14005 insn_id = bits (arm_record.arm_insn, 11, 15);
14006 /* is it thumb2 insn? */
14007 if ((0x1D == insn_id) || (0x1E == insn_id) || (0x1F == insn_id))
14008 {
14009 ret = decode_insn (&arm_record, THUMB2_RECORD,
14010 THUMB2_INSN_SIZE_BYTES);
14011 }
14012 else
14013 {
14014 /* We are decoding thumb insn. */
14015 ret = decode_insn (&arm_record, THUMB_RECORD, THUMB_INSN_SIZE_BYTES);
14016 }
14017 }
14018
14019 if (0 == ret)
14020 {
14021 /* Record registers. */
14022 record_full_arch_list_add_reg (arm_record.regcache, ARM_PC_REGNUM);
14023 if (arm_record.arm_regs)
14024 {
14025 for (no_of_rec = 0; no_of_rec < arm_record.reg_rec_count; no_of_rec++)
14026 {
14027 if (record_full_arch_list_add_reg
14028 (arm_record.regcache , arm_record.arm_regs[no_of_rec]))
14029 ret = -1;
14030 }
14031 }
14032 /* Record memories. */
14033 if (arm_record.arm_mems)
14034 {
14035 for (no_of_rec = 0; no_of_rec < arm_record.mem_rec_count; no_of_rec++)
14036 {
14037 if (record_full_arch_list_add_mem
14038 ((CORE_ADDR)arm_record.arm_mems[no_of_rec].addr,
14039 arm_record.arm_mems[no_of_rec].len))
14040 ret = -1;
14041 }
14042 }
14043
14044 if (record_full_arch_list_add_end ())
14045 ret = -1;
14046 }
14047
14048
14049 deallocate_reg_mem (&arm_record);
14050
14051 return ret;
14052 }
14053