]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blob - gdb/arch/arm-get-next-pcs.c
Update copyright year range in header of all files managed by GDB
[thirdparty/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
1 /* Common code for ARM software single stepping support.
2
3 Copyright (C) 1988-2023 Free Software Foundation, Inc.
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20 #include "gdbsupport/common-defs.h"
21 #include "gdbsupport/gdb_vecs.h"
22 #include "gdbsupport/common-regcache.h"
23 #include "arm.h"
24 #include "arm-get-next-pcs.h"
25 #include "count-one-bits.h"
26
27 /* See arm-get-next-pcs.h. */
28
29 void
30 arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
31 struct arm_get_next_pcs_ops *ops,
32 int byte_order,
33 int byte_order_for_code,
34 int has_thumb2_breakpoint,
35 struct regcache *regcache)
36 {
37 self->ops = ops;
38 self->byte_order = byte_order;
39 self->byte_order_for_code = byte_order_for_code;
40 self->has_thumb2_breakpoint = has_thumb2_breakpoint;
41 self->regcache = regcache;
42 }
43
44 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
45 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
46 is found, attempt to step through it. The end of the sequence address is
47 added to the next_pcs list. */
48
49 static std::vector<CORE_ADDR>
50 thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
51 {
52 int byte_order_for_code = self->byte_order_for_code;
53 CORE_ADDR breaks[2] = {CORE_ADDR_MAX, CORE_ADDR_MAX};
54 CORE_ADDR pc = regcache_read_pc (self->regcache);
55 CORE_ADDR loc = pc;
56 unsigned short insn1, insn2;
57 int insn_count;
58 int index;
59 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
60 const int atomic_sequence_length = 16; /* Instruction sequence length. */
61 ULONGEST status, itstate;
62
63 /* We currently do not support atomic sequences within an IT block. */
64 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
65 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
66 if (itstate & 0x0f)
67 return {};
68
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
71
72 loc += 2;
73 if (thumb_insn_size (insn1) != 4)
74 return {};
75
76 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
77
78 loc += 2;
79 if (!((insn1 & 0xfff0) == 0xe850
80 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
81 return {};
82
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
84 instructions. */
85 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
86 {
87 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
88 loc += 2;
89
90 if (thumb_insn_size (insn1) != 4)
91 {
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
96 {
97 if (last_breakpoint > 0)
98 return {}; /* More than one conditional branch found,
99 fallback to the standard code. */
100
101 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
102 last_breakpoint++;
103 }
104
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
108 execution. */
109 else if (thumb_instruction_changes_pc (insn1))
110 return {};
111 }
112 else
113 {
114 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
115
116 loc += 2;
117
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1 & 0xf800) == 0xf000
122 && (insn2 & 0xd000) == 0x8000
123 && (insn1 & 0x0380) != 0x0380)
124 {
125 int sign, j1, j2, imm1, imm2;
126 unsigned int offset;
127
128 sign = sbits (insn1, 10, 10);
129 imm1 = bits (insn1, 0, 5);
130 imm2 = bits (insn2, 0, 10);
131 j1 = bit (insn2, 13);
132 j2 = bit (insn2, 11);
133
134 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
135 offset += (imm1 << 12) + (imm2 << 1);
136
137 if (last_breakpoint > 0)
138 return {}; /* More than one conditional branch found,
139 fallback to the standard code. */
140
141 breaks[1] = loc + offset;
142 last_breakpoint++;
143 }
144
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
148 execution. */
149 else if (thumb2_instruction_changes_pc (insn1, insn2))
150 return {};
151
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1 & 0xfff0) == 0xe840
154 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
155 break;
156 }
157 }
158
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count == atomic_sequence_length)
161 return {};
162
163 /* Insert a breakpoint right after the end of the atomic sequence. */
164 breaks[0] = loc;
165
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
168 if (last_breakpoint
169 && (breaks[1] == breaks[0]
170 || (breaks[1] >= pc && breaks[1] < loc)))
171 last_breakpoint = 0;
172
173 std::vector<CORE_ADDR> next_pcs;
174
175 /* Adds the breakpoints to the list to be inserted. */
176 for (index = 0; index <= last_breakpoint; index++)
177 next_pcs.push_back (MAKE_THUMB_ADDR (breaks[index]));
178
179 return next_pcs;
180 }
181
182 /* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
183 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
184 is found, attempt to step through it. The end of the sequence address is
185 added to the next_pcs list. */
186
187 static std::vector<CORE_ADDR>
188 arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
189 {
190 int byte_order_for_code = self->byte_order_for_code;
191 CORE_ADDR breaks[2] = {CORE_ADDR_MAX, CORE_ADDR_MAX};
192 CORE_ADDR pc = regcache_read_pc (self->regcache);
193 CORE_ADDR loc = pc;
194 unsigned int insn;
195 int insn_count;
196 int index;
197 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
198 const int atomic_sequence_length = 16; /* Instruction sequence length. */
199
200 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
201 Note that we do not currently support conditionally executed atomic
202 instructions. */
203 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
204
205 loc += 4;
206 if ((insn & 0xff9000f0) != 0xe1900090)
207 return {};
208
209 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
210 instructions. */
211 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
212 {
213 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
214
215 loc += 4;
216
217 /* Assume that there is at most one conditional branch in the atomic
218 sequence. If a conditional branch is found, put a breakpoint in
219 its destination address. */
220 if (bits (insn, 24, 27) == 0xa)
221 {
222 if (last_breakpoint > 0)
223 return {}; /* More than one conditional branch found, fallback
224 to the standard single-step code. */
225
226 breaks[1] = BranchDest (loc - 4, insn);
227 last_breakpoint++;
228 }
229
230 /* We do not support atomic sequences that use any *other* instructions
231 but conditional branches to change the PC. Fall back to standard
232 code to avoid losing control of execution. */
233 else if (arm_instruction_changes_pc (insn))
234 return {};
235
236 /* If we find a strex{,b,h,d}, we're done. */
237 if ((insn & 0xff9000f0) == 0xe1800090)
238 break;
239 }
240
241 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
242 if (insn_count == atomic_sequence_length)
243 return {};
244
245 /* Insert a breakpoint right after the end of the atomic sequence. */
246 breaks[0] = loc;
247
248 /* Check for duplicated breakpoints. Check also for a breakpoint
249 placed (branch instruction's destination) anywhere in sequence. */
250 if (last_breakpoint
251 && (breaks[1] == breaks[0]
252 || (breaks[1] >= pc && breaks[1] < loc)))
253 last_breakpoint = 0;
254
255 std::vector<CORE_ADDR> next_pcs;
256
257 /* Adds the breakpoints to the list to be inserted. */
258 for (index = 0; index <= last_breakpoint; index++)
259 next_pcs.push_back (breaks[index]);
260
261 return next_pcs;
262 }
263
264 /* Find the next possible PCs for thumb mode. */
265
266 static std::vector<CORE_ADDR>
267 thumb_get_next_pcs_raw (struct arm_get_next_pcs *self)
268 {
269 int byte_order = self->byte_order;
270 int byte_order_for_code = self->byte_order_for_code;
271 CORE_ADDR pc = regcache_read_pc (self->regcache);
272 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
273 unsigned short inst1;
274 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
275 ULONGEST status, itstate;
276 struct regcache *regcache = self->regcache;
277 std::vector<CORE_ADDR> next_pcs;
278
279 nextpc = MAKE_THUMB_ADDR (nextpc);
280 pc_val = MAKE_THUMB_ADDR (pc_val);
281
282 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
283
284 /* Thumb-2 conditional execution support. There are eight bits in
285 the CPSR which describe conditional execution state. Once
286 reconstructed (they're in a funny order), the low five bits
287 describe the low bit of the condition for each instruction and
288 how many instructions remain. The high three bits describe the
289 base condition. One of the low four bits will be set if an IT
290 block is active. These bits read as zero on earlier
291 processors. */
292 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
293 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
294
295 /* If-Then handling. On GNU/Linux, where this routine is used, we
296 use an undefined instruction as a breakpoint. Unlike BKPT, IT
297 can disable execution of the undefined instruction. So we might
298 miss the breakpoint if we set it on a skipped conditional
299 instruction. Because conditional instructions can change the
300 flags, affecting the execution of further instructions, we may
301 need to set two breakpoints. */
302
303 if (self->has_thumb2_breakpoint)
304 {
305 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
306 {
307 /* An IT instruction. Because this instruction does not
308 modify the flags, we can accurately predict the next
309 executed instruction. */
310 itstate = inst1 & 0x00ff;
311 pc += thumb_insn_size (inst1);
312
313 while (itstate != 0 && ! condition_true (itstate >> 4, status))
314 {
315 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
316 pc += thumb_insn_size (inst1);
317 itstate = thumb_advance_itstate (itstate);
318 }
319
320 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
321 return next_pcs;
322 }
323 else if (itstate != 0)
324 {
325 /* We are in a conditional block. Check the condition. */
326 if (! condition_true (itstate >> 4, status))
327 {
328 /* Advance to the next executed instruction. */
329 pc += thumb_insn_size (inst1);
330 itstate = thumb_advance_itstate (itstate);
331
332 while (itstate != 0 && ! condition_true (itstate >> 4, status))
333 {
334 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
335
336 pc += thumb_insn_size (inst1);
337 itstate = thumb_advance_itstate (itstate);
338 }
339
340 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
341 return next_pcs;
342 }
343 else if ((itstate & 0x0f) == 0x08)
344 {
345 /* This is the last instruction of the conditional
346 block, and it is executed. We can handle it normally
347 because the following instruction is not conditional,
348 and we must handle it normally because it is
349 permitted to branch. Fall through. */
350 }
351 else
352 {
353 int cond_negated;
354
355 /* There are conditional instructions after this one.
356 If this instruction modifies the flags, then we can
357 not predict what the next executed instruction will
358 be. Fortunately, this instruction is architecturally
359 forbidden to branch; we know it will fall through.
360 Start by skipping past it. */
361 pc += thumb_insn_size (inst1);
362 itstate = thumb_advance_itstate (itstate);
363
364 /* Set a breakpoint on the following instruction. */
365 gdb_assert ((itstate & 0x0f) != 0);
366 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
367
368 cond_negated = (itstate >> 4) & 1;
369
370 /* Skip all following instructions with the same
371 condition. If there is a later instruction in the IT
372 block with the opposite condition, set the other
373 breakpoint there. If not, then set a breakpoint on
374 the instruction after the IT block. */
375 do
376 {
377 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
378 pc += thumb_insn_size (inst1);
379 itstate = thumb_advance_itstate (itstate);
380 }
381 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
382
383 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
384
385 return next_pcs;
386 }
387 }
388 }
389 else if (itstate & 0x0f)
390 {
391 /* We are in a conditional block. Check the condition. */
392 int cond = itstate >> 4;
393
394 if (! condition_true (cond, status))
395 {
396 /* Advance to the next instruction. All the 32-bit
397 instructions share a common prefix. */
398 next_pcs.push_back (MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
399 }
400
401 return next_pcs;
402
403 /* Otherwise, handle the instruction normally. */
404 }
405
406 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
407 {
408 CORE_ADDR sp;
409
410 /* Fetch the saved PC from the stack. It's stored above
411 all of the other registers. */
412 unsigned long offset
413 = count_one_bits (bits (inst1, 0, 7)) * ARM_INT_REGISTER_SIZE;
414 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
415 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
416 }
417 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
418 {
419 unsigned long cond = bits (inst1, 8, 11);
420 if (cond == 0x0f) /* 0x0f = SWI */
421 {
422 nextpc = self->ops->syscall_next_pc (self);
423 }
424 else if (cond != 0x0f && condition_true (cond, status))
425 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
426 }
427 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
428 {
429 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
430 }
431 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
432 {
433 unsigned short inst2;
434 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
435
436 /* Default to the next instruction. */
437 nextpc = pc + 4;
438 nextpc = MAKE_THUMB_ADDR (nextpc);
439
440 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
441 {
442 /* Branches and miscellaneous control instructions. */
443
444 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
445 {
446 /* B, BL, BLX. */
447 int j1, j2, imm1, imm2;
448
449 imm1 = sbits (inst1, 0, 10);
450 imm2 = bits (inst2, 0, 10);
451 j1 = bit (inst2, 13);
452 j2 = bit (inst2, 11);
453
454 unsigned long offset = ((imm1 << 12) + (imm2 << 1));
455 offset ^= ((!j2) << 22) | ((!j1) << 23);
456
457 nextpc = pc_val + offset;
458 /* For BLX make sure to clear the low bits. */
459 if (bit (inst2, 12) == 0)
460 nextpc = nextpc & 0xfffffffc;
461 }
462 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
463 {
464 /* SUBS PC, LR, #imm8. */
465 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
466 nextpc -= inst2 & 0x00ff;
467 }
468 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
469 {
470 /* Conditional branch. */
471 if (condition_true (bits (inst1, 6, 9), status))
472 {
473 int sign, j1, j2, imm1, imm2;
474
475 sign = sbits (inst1, 10, 10);
476 imm1 = bits (inst1, 0, 5);
477 imm2 = bits (inst2, 0, 10);
478 j1 = bit (inst2, 13);
479 j2 = bit (inst2, 11);
480
481 unsigned long offset
482 = (sign << 20) + (j2 << 19) + (j1 << 18);
483 offset += (imm1 << 12) + (imm2 << 1);
484
485 nextpc = pc_val + offset;
486 }
487 }
488 }
489 else if ((inst1 & 0xfe50) == 0xe810)
490 {
491 /* Load multiple or RFE. */
492 int rn, offset, load_pc = 1;
493
494 rn = bits (inst1, 0, 3);
495 if (bit (inst1, 7) && !bit (inst1, 8))
496 {
497 /* LDMIA or POP */
498 if (!bit (inst2, 15))
499 load_pc = 0;
500 offset = count_one_bits (inst2) * 4 - 4;
501 }
502 else if (!bit (inst1, 7) && bit (inst1, 8))
503 {
504 /* LDMDB */
505 if (!bit (inst2, 15))
506 load_pc = 0;
507 offset = -4;
508 }
509 else if (bit (inst1, 7) && bit (inst1, 8))
510 {
511 /* RFEIA */
512 offset = 0;
513 }
514 else if (!bit (inst1, 7) && !bit (inst1, 8))
515 {
516 /* RFEDB */
517 offset = -8;
518 }
519 else
520 load_pc = 0;
521
522 if (load_pc)
523 {
524 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
525 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
526 }
527 }
528 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
529 {
530 /* MOV PC or MOVS PC. */
531 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
532 nextpc = MAKE_THUMB_ADDR (nextpc);
533 }
534 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
535 {
536 /* LDR PC. */
537 CORE_ADDR base;
538 int rn, load_pc = 1;
539
540 rn = bits (inst1, 0, 3);
541 base = regcache_raw_get_unsigned (regcache, rn);
542 if (rn == ARM_PC_REGNUM)
543 {
544 base = (base + 4) & ~(CORE_ADDR) 0x3;
545 if (bit (inst1, 7))
546 base += bits (inst2, 0, 11);
547 else
548 base -= bits (inst2, 0, 11);
549 }
550 else if (bit (inst1, 7))
551 base += bits (inst2, 0, 11);
552 else if (bit (inst2, 11))
553 {
554 if (bit (inst2, 10))
555 {
556 if (bit (inst2, 9))
557 base += bits (inst2, 0, 7);
558 else
559 base -= bits (inst2, 0, 7);
560 }
561 }
562 else if ((inst2 & 0x0fc0) == 0x0000)
563 {
564 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
565 base += regcache_raw_get_unsigned (regcache, rm) << shift;
566 }
567 else
568 /* Reserved. */
569 load_pc = 0;
570
571 if (load_pc)
572 nextpc
573 = self->ops->read_mem_uint (base, 4, byte_order);
574 }
575 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
576 {
577 /* TBB. */
578 CORE_ADDR tbl_reg, table, offset, length;
579
580 tbl_reg = bits (inst1, 0, 3);
581 if (tbl_reg == 0x0f)
582 table = pc + 4; /* Regcache copy of PC isn't right yet. */
583 else
584 table = regcache_raw_get_unsigned (regcache, tbl_reg);
585
586 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
587 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
588 nextpc = pc_val + length;
589 }
590 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
591 {
592 /* TBH. */
593 CORE_ADDR tbl_reg, table, offset, length;
594
595 tbl_reg = bits (inst1, 0, 3);
596 if (tbl_reg == 0x0f)
597 table = pc + 4; /* Regcache copy of PC isn't right yet. */
598 else
599 table = regcache_raw_get_unsigned (regcache, tbl_reg);
600
601 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
602 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
603 nextpc = pc_val + length;
604 }
605 }
606 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
607 {
608 if (bits (inst1, 3, 6) == 0x0f)
609 nextpc = UNMAKE_THUMB_ADDR (pc_val);
610 else
611 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
612 }
613 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
614 {
615 if (bits (inst1, 3, 6) == 0x0f)
616 nextpc = pc_val;
617 else
618 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
619
620 nextpc = MAKE_THUMB_ADDR (nextpc);
621 }
622 else if ((inst1 & 0xf500) == 0xb100)
623 {
624 /* CBNZ or CBZ. */
625 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
626 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
627
628 if (bit (inst1, 11) && reg != 0)
629 nextpc = pc_val + imm;
630 else if (!bit (inst1, 11) && reg == 0)
631 nextpc = pc_val + imm;
632 }
633
634 next_pcs.push_back (nextpc);
635
636 return next_pcs;
637 }
638
639 /* Get the raw next possible addresses. PC in next_pcs is the current program
640 counter, which is assumed to be executing in ARM mode.
641
642 The values returned have the execution state of the next instruction
643 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
644 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
645 address in GDB and arm_addr_bits_remove in GDBServer. */
646
647 static std::vector<CORE_ADDR>
648 arm_get_next_pcs_raw (struct arm_get_next_pcs *self)
649 {
650 int byte_order = self->byte_order;
651 int byte_order_for_code = self->byte_order_for_code;
652 unsigned long pc_val;
653 unsigned long this_instr = 0;
654 unsigned long status;
655 CORE_ADDR nextpc;
656 struct regcache *regcache = self->regcache;
657 CORE_ADDR pc = regcache_read_pc (self->regcache);
658 std::vector<CORE_ADDR> next_pcs;
659
660 pc_val = (unsigned long) pc;
661 this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code);
662
663 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
664 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
665
666 if (bits (this_instr, 28, 31) == INST_NV)
667 switch (bits (this_instr, 24, 27))
668 {
669 case 0xa:
670 case 0xb:
671 {
672 /* Branch with Link and change to Thumb. */
673 nextpc = BranchDest (pc, this_instr);
674 nextpc |= bit (this_instr, 24) << 1;
675 nextpc = MAKE_THUMB_ADDR (nextpc);
676 break;
677 }
678 case 0xc:
679 case 0xd:
680 case 0xe:
681 /* Coprocessor register transfer. */
682 if (bits (this_instr, 12, 15) == 15)
683 error (_("Invalid update to pc in instruction"));
684 break;
685 }
686 else if (condition_true (bits (this_instr, 28, 31), status))
687 {
688 switch (bits (this_instr, 24, 27))
689 {
690 case 0x0:
691 case 0x1: /* data processing */
692 case 0x2:
693 case 0x3:
694 {
695 unsigned long operand1, operand2, result = 0;
696 unsigned long rn;
697 int c;
698
699 if (bits (this_instr, 12, 15) != 15)
700 break;
701
702 if (bits (this_instr, 22, 25) == 0
703 && bits (this_instr, 4, 7) == 9) /* multiply */
704 error (_("Invalid update to pc in instruction"));
705
706 /* BX <reg>, BLX <reg> */
707 if (bits (this_instr, 4, 27) == 0x12fff1
708 || bits (this_instr, 4, 27) == 0x12fff3)
709 {
710 rn = bits (this_instr, 0, 3);
711 nextpc = ((rn == ARM_PC_REGNUM)
712 ? (pc_val + 8)
713 : regcache_raw_get_unsigned (regcache, rn));
714
715 next_pcs.push_back (nextpc);
716 return next_pcs;
717 }
718
719 /* Multiply into PC. */
720 c = (status & FLAG_C) ? 1 : 0;
721 rn = bits (this_instr, 16, 19);
722 operand1 = ((rn == ARM_PC_REGNUM)
723 ? (pc_val + 8)
724 : regcache_raw_get_unsigned (regcache, rn));
725
726 if (bit (this_instr, 25))
727 {
728 unsigned long immval = bits (this_instr, 0, 7);
729 unsigned long rotate = 2 * bits (this_instr, 8, 11);
730 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
731 & 0xffffffff;
732 }
733 else /* operand 2 is a shifted register. */
734 operand2 = shifted_reg_val (regcache, this_instr, c,
735 pc_val, status);
736
737 switch (bits (this_instr, 21, 24))
738 {
739 case 0x0: /*and */
740 result = operand1 & operand2;
741 break;
742
743 case 0x1: /*eor */
744 result = operand1 ^ operand2;
745 break;
746
747 case 0x2: /*sub */
748 result = operand1 - operand2;
749 break;
750
751 case 0x3: /*rsb */
752 result = operand2 - operand1;
753 break;
754
755 case 0x4: /*add */
756 result = operand1 + operand2;
757 break;
758
759 case 0x5: /*adc */
760 result = operand1 + operand2 + c;
761 break;
762
763 case 0x6: /*sbc */
764 result = operand1 - operand2 + c;
765 break;
766
767 case 0x7: /*rsc */
768 result = operand2 - operand1 + c;
769 break;
770
771 case 0x8:
772 case 0x9:
773 case 0xa:
774 case 0xb: /* tst, teq, cmp, cmn */
775 result = (unsigned long) nextpc;
776 break;
777
778 case 0xc: /*orr */
779 result = operand1 | operand2;
780 break;
781
782 case 0xd: /*mov */
783 /* Always step into a function. */
784 result = operand2;
785 break;
786
787 case 0xe: /*bic */
788 result = operand1 & ~operand2;
789 break;
790
791 case 0xf: /*mvn */
792 result = ~operand2;
793 break;
794 }
795 nextpc = self->ops->addr_bits_remove (self, result);
796 break;
797 }
798
799 case 0x4:
800 case 0x5: /* data transfer */
801 case 0x6:
802 case 0x7:
803 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
804 {
805 /* Media instructions and architecturally undefined
806 instructions. */
807 break;
808 }
809
810 if (bit (this_instr, 20))
811 {
812 /* load */
813 if (bits (this_instr, 12, 15) == 15)
814 {
815 /* rd == pc */
816 unsigned long rn;
817 unsigned long base;
818
819 if (bit (this_instr, 22))
820 error (_("Invalid update to pc in instruction"));
821
822 /* byte write to PC */
823 rn = bits (this_instr, 16, 19);
824 base = ((rn == ARM_PC_REGNUM)
825 ? (pc_val + 8)
826 : regcache_raw_get_unsigned (regcache, rn));
827
828 if (bit (this_instr, 24))
829 {
830 /* pre-indexed */
831 int c = (status & FLAG_C) ? 1 : 0;
832 unsigned long offset =
833 (bit (this_instr, 25)
834 ? shifted_reg_val (regcache, this_instr, c,
835 pc_val, status)
836 : bits (this_instr, 0, 11));
837
838 if (bit (this_instr, 23))
839 base += offset;
840 else
841 base -= offset;
842 }
843 nextpc
844 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
845 4, byte_order);
846 }
847 }
848 break;
849
850 case 0x8:
851 case 0x9: /* block transfer */
852 if (bit (this_instr, 20))
853 {
854 /* LDM */
855 if (bit (this_instr, 15))
856 {
857 /* loading pc */
858 int offset = 0;
859 CORE_ADDR rn_val_offset = 0;
860 unsigned long rn_val
861 = regcache_raw_get_unsigned (regcache,
862 bits (this_instr, 16, 19));
863
864 if (bit (this_instr, 23))
865 {
866 /* up */
867 unsigned long reglist = bits (this_instr, 0, 14);
868 offset = count_one_bits_l (reglist) * 4;
869 if (bit (this_instr, 24)) /* pre */
870 offset += 4;
871 }
872 else if (bit (this_instr, 24))
873 offset = -4;
874
875 rn_val_offset = rn_val + offset;
876 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
877 4, byte_order);
878 }
879 }
880 break;
881
882 case 0xb: /* branch & link */
883 case 0xa: /* branch */
884 {
885 nextpc = BranchDest (pc, this_instr);
886 break;
887 }
888
889 case 0xc:
890 case 0xd:
891 case 0xe: /* coproc ops */
892 break;
893 case 0xf: /* SWI */
894 {
895 nextpc = self->ops->syscall_next_pc (self);
896 }
897 break;
898
899 default:
900 error (_("Bad bit-field extraction"));
901 return next_pcs;
902 }
903 }
904
905 next_pcs.push_back (nextpc);
906
907 return next_pcs;
908 }
909
910 /* See arm-get-next-pcs.h. */
911
912 std::vector<CORE_ADDR>
913 arm_get_next_pcs (struct arm_get_next_pcs *self)
914 {
915 std::vector<CORE_ADDR> next_pcs;
916
917 if (self->ops->is_thumb (self))
918 {
919 next_pcs = thumb_deal_with_atomic_sequence_raw (self);
920 if (next_pcs.empty ())
921 next_pcs = thumb_get_next_pcs_raw (self);
922 }
923 else
924 {
925 next_pcs = arm_deal_with_atomic_sequence_raw (self);
926 if (next_pcs.empty ())
927 next_pcs = arm_get_next_pcs_raw (self);
928 }
929
930 if (self->ops->fixup != NULL)
931 {
932 for (CORE_ADDR &pc_ref : next_pcs)
933 pc_ref = self->ops->fixup (self, pc_ref);
934 }
935
936 return next_pcs;
937 }