]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blame - gdb/arch/arm-get-next-pcs.c
Update copyright year range in header of all files managed by GDB
[thirdparty/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
CommitLineData
d9311bfa
AT
1/* Common code for ARM software single stepping support.
2
213516ef 3 Copyright (C) 1988-2023 Free Software Foundation, Inc.
d9311bfa
AT
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
268a13a5
TT
20#include "gdbsupport/common-defs.h"
21#include "gdbsupport/gdb_vecs.h"
22#include "gdbsupport/common-regcache.h"
d9311bfa
AT
23#include "arm.h"
24#include "arm-get-next-pcs.h"
5f661e03 25#include "count-one-bits.h"
d9311bfa
AT
26
27/* See arm-get-next-pcs.h. */
28
29void
30arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
31 struct arm_get_next_pcs_ops *ops,
32 int byte_order,
33 int byte_order_for_code,
1b451dda 34 int has_thumb2_breakpoint,
d9311bfa
AT
35 struct regcache *regcache)
36{
37 self->ops = ops;
38 self->byte_order = byte_order;
39 self->byte_order_for_code = byte_order_for_code;
1b451dda 40 self->has_thumb2_breakpoint = has_thumb2_breakpoint;
d9311bfa
AT
41 self->regcache = regcache;
42}
43
44/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
45 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
46 is found, attempt to step through it. The end of the sequence address is
47 added to the next_pcs list. */
48
a0ff9e1a 49static std::vector<CORE_ADDR>
4d18591b 50thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
d9311bfa
AT
51{
52 int byte_order_for_code = self->byte_order_for_code;
70ab8ccd 53 CORE_ADDR breaks[2] = {CORE_ADDR_MAX, CORE_ADDR_MAX};
4d18591b 54 CORE_ADDR pc = regcache_read_pc (self->regcache);
d9311bfa
AT
55 CORE_ADDR loc = pc;
56 unsigned short insn1, insn2;
57 int insn_count;
58 int index;
59 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
60 const int atomic_sequence_length = 16; /* Instruction sequence length. */
61 ULONGEST status, itstate;
d9311bfa
AT
62
63 /* We currently do not support atomic sequences within an IT block. */
64 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
65 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
66 if (itstate & 0x0f)
a0ff9e1a 67 return {};
d9311bfa
AT
68
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
71
72 loc += 2;
73 if (thumb_insn_size (insn1) != 4)
a0ff9e1a 74 return {};
d9311bfa
AT
75
76 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
77
78 loc += 2;
79 if (!((insn1 & 0xfff0) == 0xe850
dda83cd7 80 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
a0ff9e1a 81 return {};
d9311bfa
AT
82
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
84 instructions. */
85 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
86 {
87 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
88 loc += 2;
89
90 if (thumb_insn_size (insn1) != 4)
91 {
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
96 {
97 if (last_breakpoint > 0)
a0ff9e1a
SM
98 return {}; /* More than one conditional branch found,
99 fallback to the standard code. */
d9311bfa
AT
100
101 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
102 last_breakpoint++;
103 }
104
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
108 execution. */
109 else if (thumb_instruction_changes_pc (insn1))
a0ff9e1a 110 return {};
d9311bfa
AT
111 }
112 else
113 {
114 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
115
116 loc += 2;
117
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1 & 0xf800) == 0xf000
122 && (insn2 & 0xd000) == 0x8000
123 && (insn1 & 0x0380) != 0x0380)
124 {
125 int sign, j1, j2, imm1, imm2;
126 unsigned int offset;
127
128 sign = sbits (insn1, 10, 10);
129 imm1 = bits (insn1, 0, 5);
130 imm2 = bits (insn2, 0, 10);
131 j1 = bit (insn2, 13);
132 j2 = bit (insn2, 11);
133
134 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
135 offset += (imm1 << 12) + (imm2 << 1);
136
137 if (last_breakpoint > 0)
a0ff9e1a
SM
138 return {}; /* More than one conditional branch found,
139 fallback to the standard code. */
d9311bfa
AT
140
141 breaks[1] = loc + offset;
142 last_breakpoint++;
143 }
144
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
148 execution. */
149 else if (thumb2_instruction_changes_pc (insn1, insn2))
a0ff9e1a 150 return {};
d9311bfa
AT
151
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1 & 0xfff0) == 0xe840
154 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
155 break;
156 }
157 }
158
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count == atomic_sequence_length)
a0ff9e1a 161 return {};
d9311bfa
AT
162
163 /* Insert a breakpoint right after the end of the atomic sequence. */
164 breaks[0] = loc;
165
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
168 if (last_breakpoint
169 && (breaks[1] == breaks[0]
170 || (breaks[1] >= pc && breaks[1] < loc)))
171 last_breakpoint = 0;
172
a0ff9e1a
SM
173 std::vector<CORE_ADDR> next_pcs;
174
d9311bfa
AT
175 /* Adds the breakpoints to the list to be inserted. */
176 for (index = 0; index <= last_breakpoint; index++)
a0ff9e1a 177 next_pcs.push_back (MAKE_THUMB_ADDR (breaks[index]));
d9311bfa
AT
178
179 return next_pcs;
180}
181
182/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
183 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
184 is found, attempt to step through it. The end of the sequence address is
185 added to the next_pcs list. */
186
a0ff9e1a 187static std::vector<CORE_ADDR>
4d18591b 188arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self)
d9311bfa
AT
189{
190 int byte_order_for_code = self->byte_order_for_code;
70ab8ccd 191 CORE_ADDR breaks[2] = {CORE_ADDR_MAX, CORE_ADDR_MAX};
4d18591b 192 CORE_ADDR pc = regcache_read_pc (self->regcache);
d9311bfa
AT
193 CORE_ADDR loc = pc;
194 unsigned int insn;
195 int insn_count;
196 int index;
197 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
198 const int atomic_sequence_length = 16; /* Instruction sequence length. */
d9311bfa
AT
199
200 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
201 Note that we do not currently support conditionally executed atomic
202 instructions. */
203 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
204
205 loc += 4;
206 if ((insn & 0xff9000f0) != 0xe1900090)
a0ff9e1a 207 return {};
d9311bfa
AT
208
209 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
210 instructions. */
211 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
212 {
213 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
214
215 loc += 4;
216
217 /* Assume that there is at most one conditional branch in the atomic
dda83cd7
SM
218 sequence. If a conditional branch is found, put a breakpoint in
219 its destination address. */
d9311bfa
AT
220 if (bits (insn, 24, 27) == 0xa)
221 {
dda83cd7
SM
222 if (last_breakpoint > 0)
223 return {}; /* More than one conditional branch found, fallback
a0ff9e1a 224 to the standard single-step code. */
d9311bfa
AT
225
226 breaks[1] = BranchDest (loc - 4, insn);
227 last_breakpoint++;
dda83cd7 228 }
d9311bfa
AT
229
230 /* We do not support atomic sequences that use any *other* instructions
dda83cd7 231 but conditional branches to change the PC. Fall back to standard
d9311bfa
AT
232 code to avoid losing control of execution. */
233 else if (arm_instruction_changes_pc (insn))
a0ff9e1a 234 return {};
d9311bfa
AT
235
236 /* If we find a strex{,b,h,d}, we're done. */
237 if ((insn & 0xff9000f0) == 0xe1800090)
238 break;
239 }
240
241 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
242 if (insn_count == atomic_sequence_length)
a0ff9e1a 243 return {};
d9311bfa
AT
244
245 /* Insert a breakpoint right after the end of the atomic sequence. */
246 breaks[0] = loc;
247
248 /* Check for duplicated breakpoints. Check also for a breakpoint
249 placed (branch instruction's destination) anywhere in sequence. */
250 if (last_breakpoint
251 && (breaks[1] == breaks[0]
252 || (breaks[1] >= pc && breaks[1] < loc)))
253 last_breakpoint = 0;
254
a0ff9e1a
SM
255 std::vector<CORE_ADDR> next_pcs;
256
d9311bfa
AT
257 /* Adds the breakpoints to the list to be inserted. */
258 for (index = 0; index <= last_breakpoint; index++)
a0ff9e1a 259 next_pcs.push_back (breaks[index]);
d9311bfa
AT
260
261 return next_pcs;
262}
263
f5aa3069 264/* Find the next possible PCs for thumb mode. */
d9311bfa 265
a0ff9e1a 266static std::vector<CORE_ADDR>
4d18591b 267thumb_get_next_pcs_raw (struct arm_get_next_pcs *self)
d9311bfa
AT
268{
269 int byte_order = self->byte_order;
270 int byte_order_for_code = self->byte_order_for_code;
4d18591b 271 CORE_ADDR pc = regcache_read_pc (self->regcache);
d9311bfa
AT
272 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
273 unsigned short inst1;
274 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
d9311bfa
AT
275 ULONGEST status, itstate;
276 struct regcache *regcache = self->regcache;
a0ff9e1a 277 std::vector<CORE_ADDR> next_pcs;
d9311bfa
AT
278
279 nextpc = MAKE_THUMB_ADDR (nextpc);
280 pc_val = MAKE_THUMB_ADDR (pc_val);
281
282 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
283
284 /* Thumb-2 conditional execution support. There are eight bits in
285 the CPSR which describe conditional execution state. Once
286 reconstructed (they're in a funny order), the low five bits
287 describe the low bit of the condition for each instruction and
288 how many instructions remain. The high three bits describe the
289 base condition. One of the low four bits will be set if an IT
290 block is active. These bits read as zero on earlier
291 processors. */
292 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
293 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
294
295 /* If-Then handling. On GNU/Linux, where this routine is used, we
296 use an undefined instruction as a breakpoint. Unlike BKPT, IT
297 can disable execution of the undefined instruction. So we might
298 miss the breakpoint if we set it on a skipped conditional
299 instruction. Because conditional instructions can change the
300 flags, affecting the execution of further instructions, we may
301 need to set two breakpoints. */
302
1b451dda 303 if (self->has_thumb2_breakpoint)
d9311bfa
AT
304 {
305 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
306 {
307 /* An IT instruction. Because this instruction does not
308 modify the flags, we can accurately predict the next
309 executed instruction. */
310 itstate = inst1 & 0x00ff;
311 pc += thumb_insn_size (inst1);
312
313 while (itstate != 0 && ! condition_true (itstate >> 4, status))
314 {
315 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
316 pc += thumb_insn_size (inst1);
317 itstate = thumb_advance_itstate (itstate);
318 }
319
a0ff9e1a 320 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
d9311bfa
AT
321 return next_pcs;
322 }
323 else if (itstate != 0)
324 {
325 /* We are in a conditional block. Check the condition. */
326 if (! condition_true (itstate >> 4, status))
327 {
328 /* Advance to the next executed instruction. */
329 pc += thumb_insn_size (inst1);
330 itstate = thumb_advance_itstate (itstate);
331
332 while (itstate != 0 && ! condition_true (itstate >> 4, status))
333 {
334 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
335
336 pc += thumb_insn_size (inst1);
337 itstate = thumb_advance_itstate (itstate);
338 }
339
a0ff9e1a 340 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
d9311bfa
AT
341 return next_pcs;
342 }
343 else if ((itstate & 0x0f) == 0x08)
344 {
345 /* This is the last instruction of the conditional
346 block, and it is executed. We can handle it normally
347 because the following instruction is not conditional,
348 and we must handle it normally because it is
349 permitted to branch. Fall through. */
350 }
351 else
352 {
353 int cond_negated;
354
355 /* There are conditional instructions after this one.
356 If this instruction modifies the flags, then we can
357 not predict what the next executed instruction will
358 be. Fortunately, this instruction is architecturally
359 forbidden to branch; we know it will fall through.
360 Start by skipping past it. */
361 pc += thumb_insn_size (inst1);
362 itstate = thumb_advance_itstate (itstate);
363
364 /* Set a breakpoint on the following instruction. */
365 gdb_assert ((itstate & 0x0f) != 0);
a0ff9e1a 366 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
d9311bfa
AT
367
368 cond_negated = (itstate >> 4) & 1;
369
370 /* Skip all following instructions with the same
371 condition. If there is a later instruction in the IT
372 block with the opposite condition, set the other
373 breakpoint there. If not, then set a breakpoint on
374 the instruction after the IT block. */
375 do
376 {
377 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
378 pc += thumb_insn_size (inst1);
379 itstate = thumb_advance_itstate (itstate);
380 }
381 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
382
a0ff9e1a 383 next_pcs.push_back (MAKE_THUMB_ADDR (pc));
d9311bfa
AT
384
385 return next_pcs;
386 }
387 }
388 }
389 else if (itstate & 0x0f)
390 {
391 /* We are in a conditional block. Check the condition. */
392 int cond = itstate >> 4;
393
394 if (! condition_true (cond, status))
395 {
396 /* Advance to the next instruction. All the 32-bit
397 instructions share a common prefix. */
a0ff9e1a 398 next_pcs.push_back (MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
d9311bfa
AT
399 }
400
401 return next_pcs;
402
403 /* Otherwise, handle the instruction normally. */
404 }
405
406 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
407 {
408 CORE_ADDR sp;
409
410 /* Fetch the saved PC from the stack. It's stored above
dda83cd7 411 all of the other registers. */
5f661e03
SM
412 unsigned long offset
413 = count_one_bits (bits (inst1, 0, 7)) * ARM_INT_REGISTER_SIZE;
d9311bfa
AT
414 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
415 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
416 }
417 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
418 {
419 unsigned long cond = bits (inst1, 8, 11);
420 if (cond == 0x0f) /* 0x0f = SWI */
421 {
553cb527 422 nextpc = self->ops->syscall_next_pc (self);
d9311bfa
AT
423 }
424 else if (cond != 0x0f && condition_true (cond, status))
425 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
426 }
427 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
428 {
429 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
430 }
431 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
432 {
433 unsigned short inst2;
434 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
435
436 /* Default to the next instruction. */
437 nextpc = pc + 4;
438 nextpc = MAKE_THUMB_ADDR (nextpc);
439
440 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
441 {
442 /* Branches and miscellaneous control instructions. */
443
444 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
445 {
446 /* B, BL, BLX. */
447 int j1, j2, imm1, imm2;
448
449 imm1 = sbits (inst1, 0, 10);
450 imm2 = bits (inst2, 0, 10);
451 j1 = bit (inst2, 13);
452 j2 = bit (inst2, 11);
453
b926417a 454 unsigned long offset = ((imm1 << 12) + (imm2 << 1));
d9311bfa
AT
455 offset ^= ((!j2) << 22) | ((!j1) << 23);
456
457 nextpc = pc_val + offset;
458 /* For BLX make sure to clear the low bits. */
459 if (bit (inst2, 12) == 0)
460 nextpc = nextpc & 0xfffffffc;
461 }
462 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
463 {
464 /* SUBS PC, LR, #imm8. */
465 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
466 nextpc -= inst2 & 0x00ff;
467 }
468 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
469 {
470 /* Conditional branch. */
471 if (condition_true (bits (inst1, 6, 9), status))
472 {
473 int sign, j1, j2, imm1, imm2;
474
475 sign = sbits (inst1, 10, 10);
476 imm1 = bits (inst1, 0, 5);
477 imm2 = bits (inst2, 0, 10);
478 j1 = bit (inst2, 13);
479 j2 = bit (inst2, 11);
480
b926417a
TT
481 unsigned long offset
482 = (sign << 20) + (j2 << 19) + (j1 << 18);
d9311bfa
AT
483 offset += (imm1 << 12) + (imm2 << 1);
484
485 nextpc = pc_val + offset;
486 }
487 }
488 }
489 else if ((inst1 & 0xfe50) == 0xe810)
490 {
491 /* Load multiple or RFE. */
492 int rn, offset, load_pc = 1;
493
494 rn = bits (inst1, 0, 3);
495 if (bit (inst1, 7) && !bit (inst1, 8))
496 {
497 /* LDMIA or POP */
498 if (!bit (inst2, 15))
499 load_pc = 0;
5f661e03 500 offset = count_one_bits (inst2) * 4 - 4;
d9311bfa
AT
501 }
502 else if (!bit (inst1, 7) && bit (inst1, 8))
503 {
504 /* LDMDB */
505 if (!bit (inst2, 15))
506 load_pc = 0;
507 offset = -4;
508 }
509 else if (bit (inst1, 7) && bit (inst1, 8))
510 {
511 /* RFEIA */
512 offset = 0;
513 }
514 else if (!bit (inst1, 7) && !bit (inst1, 8))
515 {
516 /* RFEDB */
517 offset = -8;
518 }
519 else
520 load_pc = 0;
521
522 if (load_pc)
523 {
524 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
525 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
526 }
527 }
528 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
529 {
530 /* MOV PC or MOVS PC. */
531 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
532 nextpc = MAKE_THUMB_ADDR (nextpc);
533 }
534 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
535 {
536 /* LDR PC. */
537 CORE_ADDR base;
538 int rn, load_pc = 1;
539
540 rn = bits (inst1, 0, 3);
541 base = regcache_raw_get_unsigned (regcache, rn);
542 if (rn == ARM_PC_REGNUM)
543 {
544 base = (base + 4) & ~(CORE_ADDR) 0x3;
545 if (bit (inst1, 7))
546 base += bits (inst2, 0, 11);
547 else
548 base -= bits (inst2, 0, 11);
549 }
550 else if (bit (inst1, 7))
551 base += bits (inst2, 0, 11);
552 else if (bit (inst2, 11))
553 {
554 if (bit (inst2, 10))
555 {
556 if (bit (inst2, 9))
557 base += bits (inst2, 0, 7);
558 else
559 base -= bits (inst2, 0, 7);
560 }
561 }
562 else if ((inst2 & 0x0fc0) == 0x0000)
563 {
564 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
565 base += regcache_raw_get_unsigned (regcache, rm) << shift;
566 }
567 else
568 /* Reserved. */
569 load_pc = 0;
570
571 if (load_pc)
572 nextpc
573 = self->ops->read_mem_uint (base, 4, byte_order);
574 }
575 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
576 {
577 /* TBB. */
578 CORE_ADDR tbl_reg, table, offset, length;
579
580 tbl_reg = bits (inst1, 0, 3);
581 if (tbl_reg == 0x0f)
582 table = pc + 4; /* Regcache copy of PC isn't right yet. */
583 else
584 table = regcache_raw_get_unsigned (regcache, tbl_reg);
585
586 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
587 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
588 nextpc = pc_val + length;
589 }
590 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
591 {
592 /* TBH. */
593 CORE_ADDR tbl_reg, table, offset, length;
594
595 tbl_reg = bits (inst1, 0, 3);
596 if (tbl_reg == 0x0f)
597 table = pc + 4; /* Regcache copy of PC isn't right yet. */
598 else
599 table = regcache_raw_get_unsigned (regcache, tbl_reg);
600
601 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
602 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
603 nextpc = pc_val + length;
604 }
605 }
606 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
607 {
608 if (bits (inst1, 3, 6) == 0x0f)
609 nextpc = UNMAKE_THUMB_ADDR (pc_val);
610 else
611 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
612 }
613 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
614 {
615 if (bits (inst1, 3, 6) == 0x0f)
616 nextpc = pc_val;
617 else
618 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
619
620 nextpc = MAKE_THUMB_ADDR (nextpc);
621 }
622 else if ((inst1 & 0xf500) == 0xb100)
623 {
624 /* CBNZ or CBZ. */
625 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
626 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
627
628 if (bit (inst1, 11) && reg != 0)
629 nextpc = pc_val + imm;
630 else if (!bit (inst1, 11) && reg == 0)
631 nextpc = pc_val + imm;
632 }
633
a0ff9e1a 634 next_pcs.push_back (nextpc);
d9311bfa
AT
635
636 return next_pcs;
637}
638
639/* Get the raw next possible addresses. PC in next_pcs is the current program
640 counter, which is assumed to be executing in ARM mode.
641
642 The values returned have the execution state of the next instruction
643 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
644 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
645 address in GDB and arm_addr_bits_remove in GDBServer. */
646
a0ff9e1a 647static std::vector<CORE_ADDR>
4d18591b 648arm_get_next_pcs_raw (struct arm_get_next_pcs *self)
d9311bfa
AT
649{
650 int byte_order = self->byte_order;
4e7b8bea 651 int byte_order_for_code = self->byte_order_for_code;
d9311bfa
AT
652 unsigned long pc_val;
653 unsigned long this_instr = 0;
654 unsigned long status;
655 CORE_ADDR nextpc;
656 struct regcache *regcache = self->regcache;
4d18591b 657 CORE_ADDR pc = regcache_read_pc (self->regcache);
a0ff9e1a 658 std::vector<CORE_ADDR> next_pcs;
d9311bfa
AT
659
660 pc_val = (unsigned long) pc;
4e7b8bea 661 this_instr = self->ops->read_mem_uint (pc, 4, byte_order_for_code);
d9311bfa
AT
662
663 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
664 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
665
666 if (bits (this_instr, 28, 31) == INST_NV)
667 switch (bits (this_instr, 24, 27))
668 {
669 case 0xa:
670 case 0xb:
671 {
672 /* Branch with Link and change to Thumb. */
673 nextpc = BranchDest (pc, this_instr);
674 nextpc |= bit (this_instr, 24) << 1;
675 nextpc = MAKE_THUMB_ADDR (nextpc);
676 break;
677 }
678 case 0xc:
679 case 0xd:
680 case 0xe:
681 /* Coprocessor register transfer. */
dda83cd7 682 if (bits (this_instr, 12, 15) == 15)
d9311bfa
AT
683 error (_("Invalid update to pc in instruction"));
684 break;
685 }
686 else if (condition_true (bits (this_instr, 28, 31), status))
687 {
688 switch (bits (this_instr, 24, 27))
689 {
690 case 0x0:
691 case 0x1: /* data processing */
692 case 0x2:
693 case 0x3:
694 {
695 unsigned long operand1, operand2, result = 0;
696 unsigned long rn;
697 int c;
698
699 if (bits (this_instr, 12, 15) != 15)
700 break;
701
702 if (bits (this_instr, 22, 25) == 0
703 && bits (this_instr, 4, 7) == 9) /* multiply */
704 error (_("Invalid update to pc in instruction"));
705
706 /* BX <reg>, BLX <reg> */
707 if (bits (this_instr, 4, 27) == 0x12fff1
708 || bits (this_instr, 4, 27) == 0x12fff3)
709 {
710 rn = bits (this_instr, 0, 3);
711 nextpc = ((rn == ARM_PC_REGNUM)
712 ? (pc_val + 8)
713 : regcache_raw_get_unsigned (regcache, rn));
714
a0ff9e1a 715 next_pcs.push_back (nextpc);
d9311bfa
AT
716 return next_pcs;
717 }
718
719 /* Multiply into PC. */
720 c = (status & FLAG_C) ? 1 : 0;
721 rn = bits (this_instr, 16, 19);
722 operand1 = ((rn == ARM_PC_REGNUM)
723 ? (pc_val + 8)
724 : regcache_raw_get_unsigned (regcache, rn));
725
726 if (bit (this_instr, 25))
727 {
728 unsigned long immval = bits (this_instr, 0, 7);
729 unsigned long rotate = 2 * bits (this_instr, 8, 11);
730 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
731 & 0xffffffff;
732 }
733 else /* operand 2 is a shifted register. */
734 operand2 = shifted_reg_val (regcache, this_instr, c,
735 pc_val, status);
736
737 switch (bits (this_instr, 21, 24))
738 {
739 case 0x0: /*and */
740 result = operand1 & operand2;
741 break;
742
743 case 0x1: /*eor */
744 result = operand1 ^ operand2;
745 break;
746
747 case 0x2: /*sub */
748 result = operand1 - operand2;
749 break;
750
751 case 0x3: /*rsb */
752 result = operand2 - operand1;
753 break;
754
755 case 0x4: /*add */
756 result = operand1 + operand2;
757 break;
758
759 case 0x5: /*adc */
760 result = operand1 + operand2 + c;
761 break;
762
763 case 0x6: /*sbc */
764 result = operand1 - operand2 + c;
765 break;
766
767 case 0x7: /*rsc */
768 result = operand2 - operand1 + c;
769 break;
770
771 case 0x8:
772 case 0x9:
773 case 0xa:
774 case 0xb: /* tst, teq, cmp, cmn */
775 result = (unsigned long) nextpc;
776 break;
777
778 case 0xc: /*orr */
779 result = operand1 | operand2;
780 break;
781
782 case 0xd: /*mov */
783 /* Always step into a function. */
784 result = operand2;
785 break;
786
787 case 0xe: /*bic */
788 result = operand1 & ~operand2;
789 break;
790
791 case 0xf: /*mvn */
792 result = ~operand2;
793 break;
794 }
795 nextpc = self->ops->addr_bits_remove (self, result);
796 break;
797 }
798
799 case 0x4:
800 case 0x5: /* data transfer */
801 case 0x6:
802 case 0x7:
803 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
804 {
805 /* Media instructions and architecturally undefined
806 instructions. */
807 break;
808 }
809
810 if (bit (this_instr, 20))
811 {
812 /* load */
813 if (bits (this_instr, 12, 15) == 15)
814 {
815 /* rd == pc */
816 unsigned long rn;
817 unsigned long base;
818
819 if (bit (this_instr, 22))
820 error (_("Invalid update to pc in instruction"));
821
822 /* byte write to PC */
823 rn = bits (this_instr, 16, 19);
824 base = ((rn == ARM_PC_REGNUM)
825 ? (pc_val + 8)
826 : regcache_raw_get_unsigned (regcache, rn));
827
828 if (bit (this_instr, 24))
829 {
830 /* pre-indexed */
831 int c = (status & FLAG_C) ? 1 : 0;
832 unsigned long offset =
833 (bit (this_instr, 25)
834 ? shifted_reg_val (regcache, this_instr, c,
835 pc_val, status)
836 : bits (this_instr, 0, 11));
837
838 if (bit (this_instr, 23))
839 base += offset;
840 else
841 base -= offset;
842 }
843 nextpc
844 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
845 4, byte_order);
846 }
847 }
848 break;
849
850 case 0x8:
851 case 0x9: /* block transfer */
852 if (bit (this_instr, 20))
853 {
854 /* LDM */
855 if (bit (this_instr, 15))
856 {
857 /* loading pc */
858 int offset = 0;
859 CORE_ADDR rn_val_offset = 0;
860 unsigned long rn_val
861 = regcache_raw_get_unsigned (regcache,
862 bits (this_instr, 16, 19));
863
864 if (bit (this_instr, 23))
865 {
866 /* up */
867 unsigned long reglist = bits (this_instr, 0, 14);
5f661e03 868 offset = count_one_bits_l (reglist) * 4;
d9311bfa
AT
869 if (bit (this_instr, 24)) /* pre */
870 offset += 4;
871 }
872 else if (bit (this_instr, 24))
873 offset = -4;
874
875 rn_val_offset = rn_val + offset;
876 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
877 4, byte_order);
878 }
879 }
880 break;
881
882 case 0xb: /* branch & link */
883 case 0xa: /* branch */
884 {
885 nextpc = BranchDest (pc, this_instr);
886 break;
887 }
888
889 case 0xc:
890 case 0xd:
891 case 0xe: /* coproc ops */
892 break;
893 case 0xf: /* SWI */
894 {
553cb527 895 nextpc = self->ops->syscall_next_pc (self);
d9311bfa
AT
896 }
897 break;
898
899 default:
e7826da3 900 error (_("Bad bit-field extraction"));
d9311bfa
AT
901 return next_pcs;
902 }
903 }
904
a0ff9e1a
SM
905 next_pcs.push_back (nextpc);
906
d9311bfa
AT
907 return next_pcs;
908}
f5aa3069
YQ
909
910/* See arm-get-next-pcs.h. */
911
a0ff9e1a 912std::vector<CORE_ADDR>
4d18591b 913arm_get_next_pcs (struct arm_get_next_pcs *self)
f5aa3069 914{
a0ff9e1a 915 std::vector<CORE_ADDR> next_pcs;
f5aa3069
YQ
916
917 if (self->ops->is_thumb (self))
918 {
4d18591b 919 next_pcs = thumb_deal_with_atomic_sequence_raw (self);
a0ff9e1a 920 if (next_pcs.empty ())
4d18591b 921 next_pcs = thumb_get_next_pcs_raw (self);
f5aa3069
YQ
922 }
923 else
924 {
4d18591b 925 next_pcs = arm_deal_with_atomic_sequence_raw (self);
a0ff9e1a 926 if (next_pcs.empty ())
4d18591b 927 next_pcs = arm_get_next_pcs_raw (self);
f5aa3069
YQ
928 }
929
ed443b61
YQ
930 if (self->ops->fixup != NULL)
931 {
a0ff9e1a
SM
932 for (CORE_ADDR &pc_ref : next_pcs)
933 pc_ref = self->ops->fixup (self, pc_ref);
ed443b61 934 }
a0ff9e1a 935
f5aa3069
YQ
936 return next_pcs;
937}