]> git.ipfire.org Git - thirdparty/binutils-gdb.git/blame - gdb/arch/arm-get-next-pcs.c
[ARM/AArch64] Fix -Werror=unused-const-variable warnings in GDBserver
[thirdparty/binutils-gdb.git] / gdb / arch / arm-get-next-pcs.c
CommitLineData
d9311bfa
AT
1/* Common code for ARM software single stepping support.
2
618f726f 3 Copyright (C) 1988-2016 Free Software Foundation, Inc.
d9311bfa
AT
4
5 This file is part of GDB.
6
7 This program is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program. If not, see <http://www.gnu.org/licenses/>. */
19
20#include "common-defs.h"
21#include "gdb_vecs.h"
22#include "common-regcache.h"
23#include "arm.h"
24#include "arm-get-next-pcs.h"
25
26/* See arm-get-next-pcs.h. */
27
28void
29arm_get_next_pcs_ctor (struct arm_get_next_pcs *self,
30 struct arm_get_next_pcs_ops *ops,
31 int byte_order,
32 int byte_order_for_code,
33 const gdb_byte *arm_thumb2_breakpoint,
34 struct regcache *regcache)
35{
36 self->ops = ops;
37 self->byte_order = byte_order;
38 self->byte_order_for_code = byte_order_for_code;
39 self->arm_thumb2_breakpoint = arm_thumb2_breakpoint;
40 self->regcache = regcache;
41}
42
43/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
44 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
45 is found, attempt to step through it. The end of the sequence address is
46 added to the next_pcs list. */
47
48static VEC (CORE_ADDR) *
49thumb_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self,
50 CORE_ADDR pc)
51{
52 int byte_order_for_code = self->byte_order_for_code;
53 CORE_ADDR breaks[2] = {-1, -1};
54 CORE_ADDR loc = pc;
55 unsigned short insn1, insn2;
56 int insn_count;
57 int index;
58 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
59 const int atomic_sequence_length = 16; /* Instruction sequence length. */
60 ULONGEST status, itstate;
61 VEC (CORE_ADDR) *next_pcs = NULL;
62
63 /* We currently do not support atomic sequences within an IT block. */
64 status = regcache_raw_get_unsigned (self->regcache, ARM_PS_REGNUM);
65 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
66 if (itstate & 0x0f)
67 return NULL;
68
69 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction. */
70 insn1 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
71
72 loc += 2;
73 if (thumb_insn_size (insn1) != 4)
74 return NULL;
75
76 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
77
78 loc += 2;
79 if (!((insn1 & 0xfff0) == 0xe850
80 || ((insn1 & 0xfff0) == 0xe8d0 && (insn2 & 0x00c0) == 0x0040)))
81 return NULL;
82
83 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
84 instructions. */
85 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
86 {
87 insn1 = self->ops->read_mem_uint (loc, 2,byte_order_for_code);
88 loc += 2;
89
90 if (thumb_insn_size (insn1) != 4)
91 {
92 /* Assume that there is at most one conditional branch in the
93 atomic sequence. If a conditional branch is found, put a
94 breakpoint in its destination address. */
95 if ((insn1 & 0xf000) == 0xd000 && bits (insn1, 8, 11) != 0x0f)
96 {
97 if (last_breakpoint > 0)
98 return NULL; /* More than one conditional branch found,
99 fallback to the standard code. */
100
101 breaks[1] = loc + 2 + (sbits (insn1, 0, 7) << 1);
102 last_breakpoint++;
103 }
104
105 /* We do not support atomic sequences that use any *other*
106 instructions but conditional branches to change the PC.
107 Fall back to standard code to avoid losing control of
108 execution. */
109 else if (thumb_instruction_changes_pc (insn1))
110 return NULL;
111 }
112 else
113 {
114 insn2 = self->ops->read_mem_uint (loc, 2, byte_order_for_code);
115
116 loc += 2;
117
118 /* Assume that there is at most one conditional branch in the
119 atomic sequence. If a conditional branch is found, put a
120 breakpoint in its destination address. */
121 if ((insn1 & 0xf800) == 0xf000
122 && (insn2 & 0xd000) == 0x8000
123 && (insn1 & 0x0380) != 0x0380)
124 {
125 int sign, j1, j2, imm1, imm2;
126 unsigned int offset;
127
128 sign = sbits (insn1, 10, 10);
129 imm1 = bits (insn1, 0, 5);
130 imm2 = bits (insn2, 0, 10);
131 j1 = bit (insn2, 13);
132 j2 = bit (insn2, 11);
133
134 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
135 offset += (imm1 << 12) + (imm2 << 1);
136
137 if (last_breakpoint > 0)
138 return 0; /* More than one conditional branch found,
139 fallback to the standard code. */
140
141 breaks[1] = loc + offset;
142 last_breakpoint++;
143 }
144
145 /* We do not support atomic sequences that use any *other*
146 instructions but conditional branches to change the PC.
147 Fall back to standard code to avoid losing control of
148 execution. */
149 else if (thumb2_instruction_changes_pc (insn1, insn2))
150 return NULL;
151
152 /* If we find a strex{,b,h,d}, we're done. */
153 if ((insn1 & 0xfff0) == 0xe840
154 || ((insn1 & 0xfff0) == 0xe8c0 && (insn2 & 0x00c0) == 0x0040))
155 break;
156 }
157 }
158
159 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
160 if (insn_count == atomic_sequence_length)
161 return NULL;
162
163 /* Insert a breakpoint right after the end of the atomic sequence. */
164 breaks[0] = loc;
165
166 /* Check for duplicated breakpoints. Check also for a breakpoint
167 placed (branch instruction's destination) anywhere in sequence. */
168 if (last_breakpoint
169 && (breaks[1] == breaks[0]
170 || (breaks[1] >= pc && breaks[1] < loc)))
171 last_breakpoint = 0;
172
173 /* Adds the breakpoints to the list to be inserted. */
174 for (index = 0; index <= last_breakpoint; index++)
175 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (breaks[index]));
176
177 return next_pcs;
178}
179
180/* Checks for an atomic sequence of instructions beginning with a LDREX{,B,H,D}
181 instruction and ending with a STREX{,B,H,D} instruction. If such a sequence
182 is found, attempt to step through it. The end of the sequence address is
183 added to the next_pcs list. */
184
185static VEC (CORE_ADDR) *
186arm_deal_with_atomic_sequence_raw (struct arm_get_next_pcs *self,
187 CORE_ADDR pc)
188{
189 int byte_order_for_code = self->byte_order_for_code;
190 CORE_ADDR breaks[2] = {-1, -1};
191 CORE_ADDR loc = pc;
192 unsigned int insn;
193 int insn_count;
194 int index;
195 int last_breakpoint = 0; /* Defaults to 0 (no breakpoints placed). */
196 const int atomic_sequence_length = 16; /* Instruction sequence length. */
197 VEC (CORE_ADDR) *next_pcs = NULL;
198
199 /* Assume all atomic sequences start with a ldrex{,b,h,d} instruction.
200 Note that we do not currently support conditionally executed atomic
201 instructions. */
202 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
203
204 loc += 4;
205 if ((insn & 0xff9000f0) != 0xe1900090)
206 return NULL;
207
208 /* Assume that no atomic sequence is longer than "atomic_sequence_length"
209 instructions. */
210 for (insn_count = 0; insn_count < atomic_sequence_length; ++insn_count)
211 {
212 insn = self->ops->read_mem_uint (loc, 4, byte_order_for_code);
213
214 loc += 4;
215
216 /* Assume that there is at most one conditional branch in the atomic
217 sequence. If a conditional branch is found, put a breakpoint in
218 its destination address. */
219 if (bits (insn, 24, 27) == 0xa)
220 {
221 if (last_breakpoint > 0)
222 return NULL; /* More than one conditional branch found, fallback
223 to the standard single-step code. */
224
225 breaks[1] = BranchDest (loc - 4, insn);
226 last_breakpoint++;
227 }
228
229 /* We do not support atomic sequences that use any *other* instructions
230 but conditional branches to change the PC. Fall back to standard
231 code to avoid losing control of execution. */
232 else if (arm_instruction_changes_pc (insn))
233 return NULL;
234
235 /* If we find a strex{,b,h,d}, we're done. */
236 if ((insn & 0xff9000f0) == 0xe1800090)
237 break;
238 }
239
240 /* If we didn't find the strex{,b,h,d}, we cannot handle the sequence. */
241 if (insn_count == atomic_sequence_length)
242 return NULL;
243
244 /* Insert a breakpoint right after the end of the atomic sequence. */
245 breaks[0] = loc;
246
247 /* Check for duplicated breakpoints. Check also for a breakpoint
248 placed (branch instruction's destination) anywhere in sequence. */
249 if (last_breakpoint
250 && (breaks[1] == breaks[0]
251 || (breaks[1] >= pc && breaks[1] < loc)))
252 last_breakpoint = 0;
253
254 /* Adds the breakpoints to the list to be inserted. */
255 for (index = 0; index <= last_breakpoint; index++)
256 VEC_safe_push (CORE_ADDR, next_pcs, breaks[index]);
257
258 return next_pcs;
259}
260
261/* See arm-get-next-pcs.h. */
262
263VEC (CORE_ADDR) *
264arm_get_next_pcs (struct arm_get_next_pcs *self, CORE_ADDR pc)
265{
266 VEC (CORE_ADDR) *next_pcs = NULL;
267
268 if (self->ops->is_thumb (self))
269 {
270 next_pcs = thumb_deal_with_atomic_sequence_raw (self, pc);
271 if (next_pcs == NULL)
272 next_pcs = thumb_get_next_pcs_raw (self, pc);
273 }
274 else
275 {
276 next_pcs = arm_deal_with_atomic_sequence_raw (self, pc);
277 if (next_pcs == NULL)
278 next_pcs = arm_get_next_pcs_raw (self, pc);
279 }
280
281 return next_pcs;
282}
283
284/* See arm-get-next-pcs.h. */
285
286VEC (CORE_ADDR) *
287thumb_get_next_pcs_raw (struct arm_get_next_pcs *self,
288 CORE_ADDR pc)
289{
290 int byte_order = self->byte_order;
291 int byte_order_for_code = self->byte_order_for_code;
292 unsigned long pc_val = ((unsigned long) pc) + 4; /* PC after prefetch */
293 unsigned short inst1;
294 CORE_ADDR nextpc = pc + 2; /* Default is next instruction. */
295 unsigned long offset;
296 ULONGEST status, itstate;
297 struct regcache *regcache = self->regcache;
298 VEC (CORE_ADDR) * next_pcs = NULL;
299
300 nextpc = MAKE_THUMB_ADDR (nextpc);
301 pc_val = MAKE_THUMB_ADDR (pc_val);
302
303 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
304
305 /* Thumb-2 conditional execution support. There are eight bits in
306 the CPSR which describe conditional execution state. Once
307 reconstructed (they're in a funny order), the low five bits
308 describe the low bit of the condition for each instruction and
309 how many instructions remain. The high three bits describe the
310 base condition. One of the low four bits will be set if an IT
311 block is active. These bits read as zero on earlier
312 processors. */
313 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
314 itstate = ((status >> 8) & 0xfc) | ((status >> 25) & 0x3);
315
316 /* If-Then handling. On GNU/Linux, where this routine is used, we
317 use an undefined instruction as a breakpoint. Unlike BKPT, IT
318 can disable execution of the undefined instruction. So we might
319 miss the breakpoint if we set it on a skipped conditional
320 instruction. Because conditional instructions can change the
321 flags, affecting the execution of further instructions, we may
322 need to set two breakpoints. */
323
324 if (self->arm_thumb2_breakpoint != NULL)
325 {
326 if ((inst1 & 0xff00) == 0xbf00 && (inst1 & 0x000f) != 0)
327 {
328 /* An IT instruction. Because this instruction does not
329 modify the flags, we can accurately predict the next
330 executed instruction. */
331 itstate = inst1 & 0x00ff;
332 pc += thumb_insn_size (inst1);
333
334 while (itstate != 0 && ! condition_true (itstate >> 4, status))
335 {
336 inst1 = self->ops->read_mem_uint (pc, 2,byte_order_for_code);
337 pc += thumb_insn_size (inst1);
338 itstate = thumb_advance_itstate (itstate);
339 }
340
341 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
342 return next_pcs;
343 }
344 else if (itstate != 0)
345 {
346 /* We are in a conditional block. Check the condition. */
347 if (! condition_true (itstate >> 4, status))
348 {
349 /* Advance to the next executed instruction. */
350 pc += thumb_insn_size (inst1);
351 itstate = thumb_advance_itstate (itstate);
352
353 while (itstate != 0 && ! condition_true (itstate >> 4, status))
354 {
355 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
356
357 pc += thumb_insn_size (inst1);
358 itstate = thumb_advance_itstate (itstate);
359 }
360
361 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
362 return next_pcs;
363 }
364 else if ((itstate & 0x0f) == 0x08)
365 {
366 /* This is the last instruction of the conditional
367 block, and it is executed. We can handle it normally
368 because the following instruction is not conditional,
369 and we must handle it normally because it is
370 permitted to branch. Fall through. */
371 }
372 else
373 {
374 int cond_negated;
375
376 /* There are conditional instructions after this one.
377 If this instruction modifies the flags, then we can
378 not predict what the next executed instruction will
379 be. Fortunately, this instruction is architecturally
380 forbidden to branch; we know it will fall through.
381 Start by skipping past it. */
382 pc += thumb_insn_size (inst1);
383 itstate = thumb_advance_itstate (itstate);
384
385 /* Set a breakpoint on the following instruction. */
386 gdb_assert ((itstate & 0x0f) != 0);
387 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
388
389 cond_negated = (itstate >> 4) & 1;
390
391 /* Skip all following instructions with the same
392 condition. If there is a later instruction in the IT
393 block with the opposite condition, set the other
394 breakpoint there. If not, then set a breakpoint on
395 the instruction after the IT block. */
396 do
397 {
398 inst1 = self->ops->read_mem_uint (pc, 2, byte_order_for_code);
399 pc += thumb_insn_size (inst1);
400 itstate = thumb_advance_itstate (itstate);
401 }
402 while (itstate != 0 && ((itstate >> 4) & 1) == cond_negated);
403
404 VEC_safe_push (CORE_ADDR, next_pcs, MAKE_THUMB_ADDR (pc));
405
406 return next_pcs;
407 }
408 }
409 }
410 else if (itstate & 0x0f)
411 {
412 /* We are in a conditional block. Check the condition. */
413 int cond = itstate >> 4;
414
415 if (! condition_true (cond, status))
416 {
417 /* Advance to the next instruction. All the 32-bit
418 instructions share a common prefix. */
419 VEC_safe_push (CORE_ADDR, next_pcs,
420 MAKE_THUMB_ADDR (pc + thumb_insn_size (inst1)));
421 }
422
423 return next_pcs;
424
425 /* Otherwise, handle the instruction normally. */
426 }
427
428 if ((inst1 & 0xff00) == 0xbd00) /* pop {rlist, pc} */
429 {
430 CORE_ADDR sp;
431
432 /* Fetch the saved PC from the stack. It's stored above
433 all of the other registers. */
434 offset = bitcount (bits (inst1, 0, 7)) * INT_REGISTER_SIZE;
435 sp = regcache_raw_get_unsigned (regcache, ARM_SP_REGNUM);
436 nextpc = self->ops->read_mem_uint (sp + offset, 4, byte_order);
437 }
438 else if ((inst1 & 0xf000) == 0xd000) /* conditional branch */
439 {
440 unsigned long cond = bits (inst1, 8, 11);
441 if (cond == 0x0f) /* 0x0f = SWI */
442 {
443 nextpc = self->ops->syscall_next_pc (self, pc);
444 }
445 else if (cond != 0x0f && condition_true (cond, status))
446 nextpc = pc_val + (sbits (inst1, 0, 7) << 1);
447 }
448 else if ((inst1 & 0xf800) == 0xe000) /* unconditional branch */
449 {
450 nextpc = pc_val + (sbits (inst1, 0, 10) << 1);
451 }
452 else if (thumb_insn_size (inst1) == 4) /* 32-bit instruction */
453 {
454 unsigned short inst2;
455 inst2 = self->ops->read_mem_uint (pc + 2, 2, byte_order_for_code);
456
457 /* Default to the next instruction. */
458 nextpc = pc + 4;
459 nextpc = MAKE_THUMB_ADDR (nextpc);
460
461 if ((inst1 & 0xf800) == 0xf000 && (inst2 & 0x8000) == 0x8000)
462 {
463 /* Branches and miscellaneous control instructions. */
464
465 if ((inst2 & 0x1000) != 0 || (inst2 & 0xd001) == 0xc000)
466 {
467 /* B, BL, BLX. */
468 int j1, j2, imm1, imm2;
469
470 imm1 = sbits (inst1, 0, 10);
471 imm2 = bits (inst2, 0, 10);
472 j1 = bit (inst2, 13);
473 j2 = bit (inst2, 11);
474
475 offset = ((imm1 << 12) + (imm2 << 1));
476 offset ^= ((!j2) << 22) | ((!j1) << 23);
477
478 nextpc = pc_val + offset;
479 /* For BLX make sure to clear the low bits. */
480 if (bit (inst2, 12) == 0)
481 nextpc = nextpc & 0xfffffffc;
482 }
483 else if (inst1 == 0xf3de && (inst2 & 0xff00) == 0x3f00)
484 {
485 /* SUBS PC, LR, #imm8. */
486 nextpc = regcache_raw_get_unsigned (regcache, ARM_LR_REGNUM);
487 nextpc -= inst2 & 0x00ff;
488 }
489 else if ((inst2 & 0xd000) == 0x8000 && (inst1 & 0x0380) != 0x0380)
490 {
491 /* Conditional branch. */
492 if (condition_true (bits (inst1, 6, 9), status))
493 {
494 int sign, j1, j2, imm1, imm2;
495
496 sign = sbits (inst1, 10, 10);
497 imm1 = bits (inst1, 0, 5);
498 imm2 = bits (inst2, 0, 10);
499 j1 = bit (inst2, 13);
500 j2 = bit (inst2, 11);
501
502 offset = (sign << 20) + (j2 << 19) + (j1 << 18);
503 offset += (imm1 << 12) + (imm2 << 1);
504
505 nextpc = pc_val + offset;
506 }
507 }
508 }
509 else if ((inst1 & 0xfe50) == 0xe810)
510 {
511 /* Load multiple or RFE. */
512 int rn, offset, load_pc = 1;
513
514 rn = bits (inst1, 0, 3);
515 if (bit (inst1, 7) && !bit (inst1, 8))
516 {
517 /* LDMIA or POP */
518 if (!bit (inst2, 15))
519 load_pc = 0;
520 offset = bitcount (inst2) * 4 - 4;
521 }
522 else if (!bit (inst1, 7) && bit (inst1, 8))
523 {
524 /* LDMDB */
525 if (!bit (inst2, 15))
526 load_pc = 0;
527 offset = -4;
528 }
529 else if (bit (inst1, 7) && bit (inst1, 8))
530 {
531 /* RFEIA */
532 offset = 0;
533 }
534 else if (!bit (inst1, 7) && !bit (inst1, 8))
535 {
536 /* RFEDB */
537 offset = -8;
538 }
539 else
540 load_pc = 0;
541
542 if (load_pc)
543 {
544 CORE_ADDR addr = regcache_raw_get_unsigned (regcache, rn);
545 nextpc = self->ops->read_mem_uint (addr + offset, 4, byte_order);
546 }
547 }
548 else if ((inst1 & 0xffef) == 0xea4f && (inst2 & 0xfff0) == 0x0f00)
549 {
550 /* MOV PC or MOVS PC. */
551 nextpc = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
552 nextpc = MAKE_THUMB_ADDR (nextpc);
553 }
554 else if ((inst1 & 0xff70) == 0xf850 && (inst2 & 0xf000) == 0xf000)
555 {
556 /* LDR PC. */
557 CORE_ADDR base;
558 int rn, load_pc = 1;
559
560 rn = bits (inst1, 0, 3);
561 base = regcache_raw_get_unsigned (regcache, rn);
562 if (rn == ARM_PC_REGNUM)
563 {
564 base = (base + 4) & ~(CORE_ADDR) 0x3;
565 if (bit (inst1, 7))
566 base += bits (inst2, 0, 11);
567 else
568 base -= bits (inst2, 0, 11);
569 }
570 else if (bit (inst1, 7))
571 base += bits (inst2, 0, 11);
572 else if (bit (inst2, 11))
573 {
574 if (bit (inst2, 10))
575 {
576 if (bit (inst2, 9))
577 base += bits (inst2, 0, 7);
578 else
579 base -= bits (inst2, 0, 7);
580 }
581 }
582 else if ((inst2 & 0x0fc0) == 0x0000)
583 {
584 int shift = bits (inst2, 4, 5), rm = bits (inst2, 0, 3);
585 base += regcache_raw_get_unsigned (regcache, rm) << shift;
586 }
587 else
588 /* Reserved. */
589 load_pc = 0;
590
591 if (load_pc)
592 nextpc
593 = self->ops->read_mem_uint (base, 4, byte_order);
594 }
595 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf000)
596 {
597 /* TBB. */
598 CORE_ADDR tbl_reg, table, offset, length;
599
600 tbl_reg = bits (inst1, 0, 3);
601 if (tbl_reg == 0x0f)
602 table = pc + 4; /* Regcache copy of PC isn't right yet. */
603 else
604 table = regcache_raw_get_unsigned (regcache, tbl_reg);
605
606 offset = regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
607 length = 2 * self->ops->read_mem_uint (table + offset, 1, byte_order);
608 nextpc = pc_val + length;
609 }
610 else if ((inst1 & 0xfff0) == 0xe8d0 && (inst2 & 0xfff0) == 0xf010)
611 {
612 /* TBH. */
613 CORE_ADDR tbl_reg, table, offset, length;
614
615 tbl_reg = bits (inst1, 0, 3);
616 if (tbl_reg == 0x0f)
617 table = pc + 4; /* Regcache copy of PC isn't right yet. */
618 else
619 table = regcache_raw_get_unsigned (regcache, tbl_reg);
620
621 offset = 2 * regcache_raw_get_unsigned (regcache, bits (inst2, 0, 3));
622 length = 2 * self->ops->read_mem_uint (table + offset, 2, byte_order);
623 nextpc = pc_val + length;
624 }
625 }
626 else if ((inst1 & 0xff00) == 0x4700) /* bx REG, blx REG */
627 {
628 if (bits (inst1, 3, 6) == 0x0f)
629 nextpc = UNMAKE_THUMB_ADDR (pc_val);
630 else
631 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
632 }
633 else if ((inst1 & 0xff87) == 0x4687) /* mov pc, REG */
634 {
635 if (bits (inst1, 3, 6) == 0x0f)
636 nextpc = pc_val;
637 else
638 nextpc = regcache_raw_get_unsigned (regcache, bits (inst1, 3, 6));
639
640 nextpc = MAKE_THUMB_ADDR (nextpc);
641 }
642 else if ((inst1 & 0xf500) == 0xb100)
643 {
644 /* CBNZ or CBZ. */
645 int imm = (bit (inst1, 9) << 6) + (bits (inst1, 3, 7) << 1);
646 ULONGEST reg = regcache_raw_get_unsigned (regcache, bits (inst1, 0, 2));
647
648 if (bit (inst1, 11) && reg != 0)
649 nextpc = pc_val + imm;
650 else if (!bit (inst1, 11) && reg == 0)
651 nextpc = pc_val + imm;
652 }
653
654 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
655
656 return next_pcs;
657}
658
659/* Get the raw next possible addresses. PC in next_pcs is the current program
660 counter, which is assumed to be executing in ARM mode.
661
662 The values returned have the execution state of the next instruction
663 encoded in it. Use IS_THUMB_ADDR () to see whether the instruction is
664 in Thumb-State, and gdbarch_addr_bits_remove () to get the plain memory
665 address in GDB and arm_addr_bits_remove in GDBServer. */
666
667VEC (CORE_ADDR) *
668arm_get_next_pcs_raw (struct arm_get_next_pcs *self,
669 CORE_ADDR pc)
670{
671 int byte_order = self->byte_order;
672 unsigned long pc_val;
673 unsigned long this_instr = 0;
674 unsigned long status;
675 CORE_ADDR nextpc;
676 struct regcache *regcache = self->regcache;
677 VEC (CORE_ADDR) *next_pcs = NULL;
678
679 pc_val = (unsigned long) pc;
680 this_instr = self->ops->read_mem_uint (pc, 4, byte_order);
681
682 status = regcache_raw_get_unsigned (regcache, ARM_PS_REGNUM);
683 nextpc = (CORE_ADDR) (pc_val + 4); /* Default case */
684
685 if (bits (this_instr, 28, 31) == INST_NV)
686 switch (bits (this_instr, 24, 27))
687 {
688 case 0xa:
689 case 0xb:
690 {
691 /* Branch with Link and change to Thumb. */
692 nextpc = BranchDest (pc, this_instr);
693 nextpc |= bit (this_instr, 24) << 1;
694 nextpc = MAKE_THUMB_ADDR (nextpc);
695 break;
696 }
697 case 0xc:
698 case 0xd:
699 case 0xe:
700 /* Coprocessor register transfer. */
701 if (bits (this_instr, 12, 15) == 15)
702 error (_("Invalid update to pc in instruction"));
703 break;
704 }
705 else if (condition_true (bits (this_instr, 28, 31), status))
706 {
707 switch (bits (this_instr, 24, 27))
708 {
709 case 0x0:
710 case 0x1: /* data processing */
711 case 0x2:
712 case 0x3:
713 {
714 unsigned long operand1, operand2, result = 0;
715 unsigned long rn;
716 int c;
717
718 if (bits (this_instr, 12, 15) != 15)
719 break;
720
721 if (bits (this_instr, 22, 25) == 0
722 && bits (this_instr, 4, 7) == 9) /* multiply */
723 error (_("Invalid update to pc in instruction"));
724
725 /* BX <reg>, BLX <reg> */
726 if (bits (this_instr, 4, 27) == 0x12fff1
727 || bits (this_instr, 4, 27) == 0x12fff3)
728 {
729 rn = bits (this_instr, 0, 3);
730 nextpc = ((rn == ARM_PC_REGNUM)
731 ? (pc_val + 8)
732 : regcache_raw_get_unsigned (regcache, rn));
733
734 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
735 return next_pcs;
736 }
737
738 /* Multiply into PC. */
739 c = (status & FLAG_C) ? 1 : 0;
740 rn = bits (this_instr, 16, 19);
741 operand1 = ((rn == ARM_PC_REGNUM)
742 ? (pc_val + 8)
743 : regcache_raw_get_unsigned (regcache, rn));
744
745 if (bit (this_instr, 25))
746 {
747 unsigned long immval = bits (this_instr, 0, 7);
748 unsigned long rotate = 2 * bits (this_instr, 8, 11);
749 operand2 = ((immval >> rotate) | (immval << (32 - rotate)))
750 & 0xffffffff;
751 }
752 else /* operand 2 is a shifted register. */
753 operand2 = shifted_reg_val (regcache, this_instr, c,
754 pc_val, status);
755
756 switch (bits (this_instr, 21, 24))
757 {
758 case 0x0: /*and */
759 result = operand1 & operand2;
760 break;
761
762 case 0x1: /*eor */
763 result = operand1 ^ operand2;
764 break;
765
766 case 0x2: /*sub */
767 result = operand1 - operand2;
768 break;
769
770 case 0x3: /*rsb */
771 result = operand2 - operand1;
772 break;
773
774 case 0x4: /*add */
775 result = operand1 + operand2;
776 break;
777
778 case 0x5: /*adc */
779 result = operand1 + operand2 + c;
780 break;
781
782 case 0x6: /*sbc */
783 result = operand1 - operand2 + c;
784 break;
785
786 case 0x7: /*rsc */
787 result = operand2 - operand1 + c;
788 break;
789
790 case 0x8:
791 case 0x9:
792 case 0xa:
793 case 0xb: /* tst, teq, cmp, cmn */
794 result = (unsigned long) nextpc;
795 break;
796
797 case 0xc: /*orr */
798 result = operand1 | operand2;
799 break;
800
801 case 0xd: /*mov */
802 /* Always step into a function. */
803 result = operand2;
804 break;
805
806 case 0xe: /*bic */
807 result = operand1 & ~operand2;
808 break;
809
810 case 0xf: /*mvn */
811 result = ~operand2;
812 break;
813 }
814 nextpc = self->ops->addr_bits_remove (self, result);
815 break;
816 }
817
818 case 0x4:
819 case 0x5: /* data transfer */
820 case 0x6:
821 case 0x7:
822 if (bits (this_instr, 25, 27) == 0x3 && bit (this_instr, 4) == 1)
823 {
824 /* Media instructions and architecturally undefined
825 instructions. */
826 break;
827 }
828
829 if (bit (this_instr, 20))
830 {
831 /* load */
832 if (bits (this_instr, 12, 15) == 15)
833 {
834 /* rd == pc */
835 unsigned long rn;
836 unsigned long base;
837
838 if (bit (this_instr, 22))
839 error (_("Invalid update to pc in instruction"));
840
841 /* byte write to PC */
842 rn = bits (this_instr, 16, 19);
843 base = ((rn == ARM_PC_REGNUM)
844 ? (pc_val + 8)
845 : regcache_raw_get_unsigned (regcache, rn));
846
847 if (bit (this_instr, 24))
848 {
849 /* pre-indexed */
850 int c = (status & FLAG_C) ? 1 : 0;
851 unsigned long offset =
852 (bit (this_instr, 25)
853 ? shifted_reg_val (regcache, this_instr, c,
854 pc_val, status)
855 : bits (this_instr, 0, 11));
856
857 if (bit (this_instr, 23))
858 base += offset;
859 else
860 base -= offset;
861 }
862 nextpc
863 = (CORE_ADDR) self->ops->read_mem_uint ((CORE_ADDR) base,
864 4, byte_order);
865 }
866 }
867 break;
868
869 case 0x8:
870 case 0x9: /* block transfer */
871 if (bit (this_instr, 20))
872 {
873 /* LDM */
874 if (bit (this_instr, 15))
875 {
876 /* loading pc */
877 int offset = 0;
878 CORE_ADDR rn_val_offset = 0;
879 unsigned long rn_val
880 = regcache_raw_get_unsigned (regcache,
881 bits (this_instr, 16, 19));
882
883 if (bit (this_instr, 23))
884 {
885 /* up */
886 unsigned long reglist = bits (this_instr, 0, 14);
887 offset = bitcount (reglist) * 4;
888 if (bit (this_instr, 24)) /* pre */
889 offset += 4;
890 }
891 else if (bit (this_instr, 24))
892 offset = -4;
893
894 rn_val_offset = rn_val + offset;
895 nextpc = (CORE_ADDR) self->ops->read_mem_uint (rn_val_offset,
896 4, byte_order);
897 }
898 }
899 break;
900
901 case 0xb: /* branch & link */
902 case 0xa: /* branch */
903 {
904 nextpc = BranchDest (pc, this_instr);
905 break;
906 }
907
908 case 0xc:
909 case 0xd:
910 case 0xe: /* coproc ops */
911 break;
912 case 0xf: /* SWI */
913 {
914 nextpc = self->ops->syscall_next_pc (self, pc);
915 }
916 break;
917
918 default:
e7826da3 919 error (_("Bad bit-field extraction"));
d9311bfa
AT
920 return next_pcs;
921 }
922 }
923
924 VEC_safe_push (CORE_ADDR, next_pcs, nextpc);
925 return next_pcs;
926}