return reg->number;
}
-/* Like arm_reg_parse, but allow allow the following extra features:
+/* Like arm_reg_parse, but also allow the following extra features:
- If RTYPE is non-zero, return the (possibly restricted) type of the
register (e.g. Neon double or quad reg when either has been requested).
- If this is a Neon vector type with additional type information, fill
return reg * 16 + atype.index;
}
+/* Types of registers in a list. */
+
+enum reg_list_els
+{
+ REGLIST_RN,
+ REGLIST_CLRM,
+ REGLIST_VFP_S,
+ REGLIST_VFP_S_VPR,
+ REGLIST_VFP_D,
+ REGLIST_VFP_D_VPR,
+ REGLIST_NEON_D
+};
+
/* Parse an ARM register list. Returns the bitmask, or FAIL. */
static long
-parse_reg_list (char ** strp)
+parse_reg_list (char ** strp, enum reg_list_els etype)
{
- char * str = * strp;
- long range = 0;
- int another_range;
+ char *str = *strp;
+ long range = 0;
+ int another_range;
+
+ gas_assert (etype == REGLIST_RN || etype == REGLIST_CLRM);
/* We come back here if we get ranges concatenated by '+' or '|'. */
do
do
{
int reg;
+ const char apsr_str[] = "apsr";
+ int apsr_str_len = strlen (apsr_str);
- if ((reg = arm_reg_parse (&str, REG_TYPE_RN)) == FAIL)
+ reg = arm_reg_parse (&str, REGLIST_RN);
+ if (etype == REGLIST_CLRM)
{
- first_error (_(reg_expected_msgs[REG_TYPE_RN]));
- return FAIL;
+ if (reg == REG_SP || reg == REG_PC)
+ reg = FAIL;
+ else if (reg == FAIL
+ && !strncasecmp (str, apsr_str, apsr_str_len)
+ && !ISALPHA (*(str + apsr_str_len)))
+ {
+ reg = 15;
+ str += apsr_str_len;
+ }
+
+ if (reg == FAIL)
+ {
+ first_error (_("r0-r12, lr or APSR expected"));
+ return FAIL;
+ }
+ }
+ else /* etype == REGLIST_RN. */
+ {
+ if (reg == FAIL)
+ {
+ first_error (_(reg_expected_msgs[REGLIST_RN]));
+ return FAIL;
+ }
}
if (in_range)
return FAIL;
}
}
- else
+ else if (etype == REGLIST_RN)
{
expressionS exp;
return range;
}
-/* Types of registers in a list. */
-
-enum reg_list_els
-{
- REGLIST_VFP_S,
- REGLIST_VFP_D,
- REGLIST_NEON_D
-};
-
/* Parse a VFP register list. If the string is invalid return FAIL.
Otherwise return the number of registers, and set PBASE to the first
register. Parses registers of type ETYPE.
bug. */
static int
-parse_vfp_reg_list (char **ccp, unsigned int *pbase, enum reg_list_els etype)
+parse_vfp_reg_list (char **ccp, unsigned int *pbase, enum reg_list_els etype,
+ bfd_boolean *partial_match)
{
char *str = *ccp;
int base_reg;
int warned = 0;
unsigned long mask = 0;
int i;
+ bfd_boolean vpr_seen = FALSE;
+ bfd_boolean expect_vpr =
+ (etype == REGLIST_VFP_S_VPR) || (etype == REGLIST_VFP_D_VPR);
if (skip_past_char (&str, '{') == FAIL)
{
switch (etype)
{
case REGLIST_VFP_S:
+ case REGLIST_VFP_S_VPR:
regtype = REG_TYPE_VFS;
max_regs = 32;
break;
case REGLIST_VFP_D:
+ case REGLIST_VFP_D_VPR:
regtype = REG_TYPE_VFD;
break;
case REGLIST_NEON_D:
regtype = REG_TYPE_NDQ;
break;
+
+ default:
+ gas_assert (0);
}
- if (etype != REGLIST_VFP_S)
+ if (etype != REGLIST_VFP_S && etype != REGLIST_VFP_S_VPR)
{
/* VFPv3 allows 32 D registers, except for the VFPv3-D16 variant. */
if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_d32))
}
base_reg = max_regs;
+ *partial_match = FALSE;
do
{
int setmask = 1, addregs = 1;
+ const char vpr_str[] = "vpr";
+ int vpr_str_len = strlen (vpr_str);
new_base = arm_typed_reg_parse (&str, regtype, ®type, NULL);
- if (new_base == FAIL)
+ if (expect_vpr)
+ {
+ if (new_base == FAIL
+ && !strncasecmp (str, vpr_str, vpr_str_len)
+ && !ISALPHA (*(str + vpr_str_len))
+ && !vpr_seen)
+ {
+ vpr_seen = TRUE;
+ str += vpr_str_len;
+ if (count == 0)
+ base_reg = 0; /* Canonicalize VPR only on d0 with 0 regs. */
+ }
+ else if (vpr_seen)
+ {
+ first_error (_("VPR expected last"));
+ return FAIL;
+ }
+ else if (new_base == FAIL)
+ {
+ if (regtype == REG_TYPE_VFS)
+ first_error (_("VFP single precision register or VPR "
+ "expected"));
+ else /* regtype == REG_TYPE_VFD. */
+ first_error (_("VFP/Neon double precision register or VPR "
+ "expected"));
+ return FAIL;
+ }
+ }
+ else if (new_base == FAIL)
{
first_error (_(reg_expected_msgs[regtype]));
return FAIL;
}
+ *partial_match = TRUE;
+ if (vpr_seen)
+ continue;
+
if (new_base >= max_regs)
{
first_error (_("register out of range in list"));
return FAIL;
}
- if ((mask >> new_base) != 0 && ! warned)
+ if ((mask >> new_base) != 0 && ! warned && !vpr_seen)
{
as_tsktsk (_("register list not in ascending order"));
warned = 1;
str++;
/* Sanity check -- should have raised a parse error above. */
- if (count == 0 || count > max_regs)
+ if ((!vpr_seen && count == 0) || count > max_regs)
abort ();
*pbase = base_reg;
+ if (expect_vpr && !vpr_seen)
+ {
+ first_error (_("VPR expected last"));
+ return FAIL;
+ }
+
/* Final test -- the registers must be consecutive. */
mask >>= base_reg;
for (i = 0; i < count; i++)
long range;
int n;
- range = parse_reg_list (&input_line_pointer);
+ range = parse_reg_list (&input_line_pointer, REGLIST_RN);
if (range == FAIL)
{
as_bad (_("expected register list"));
valueT op;
int num_vfpv3_regs = 0;
int num_regs_below_16;
+ bfd_boolean partial_match;
- count = parse_vfp_reg_list (&input_line_pointer, &start, REGLIST_VFP_D);
+ count = parse_vfp_reg_list (&input_line_pointer, &start, REGLIST_VFP_D,
+ &partial_match);
if (count == FAIL)
{
as_bad (_("expected register list"));
int count;
unsigned int reg;
valueT op;
+ bfd_boolean partial_match;
- count = parse_vfp_reg_list (&input_line_pointer, ®, REGLIST_VFP_D);
+ count = parse_vfp_reg_list (&input_line_pointer, ®, REGLIST_VFP_D,
+ &partial_match);
if (count == FAIL)
{
as_bad (_("expected register list"));
OP_RIWG, /* iWMMXt wCG register */
OP_RXA, /* XScale accumulator register */
+ /* New operands for Armv8.1-M Mainline. */
+ OP_LR, /* ARM LR register */
+ OP_RRnpcsp_I32, /* ARM register (no BadReg) or literal 1 .. 32 */
+
OP_REGLST, /* ARM register list */
+ OP_CLRMLST, /* CLRM register list */
OP_VRSLST, /* VFP single-precision register list */
OP_VRDLST, /* VFP double-precision register list */
OP_VRSDLST, /* VFP single or double-precision register list (& quad) */
OP_NRDLST, /* Neon double-precision register list (d0-d31, qN aliases) */
OP_NSTRLST, /* Neon element/structure list */
+ OP_VRSDVLST, /* VFP single or double-precision register list and VPR */
OP_RNDQ_I0, /* Neon D or Q reg, or immediate zero. */
OP_RVSD_I0, /* VFP S or D reg, or immediate zero. */
OP_oI255c, /* curly-brace enclosed, 0 .. 255 */
OP_oRR, /* ARM register */
+ OP_oLR, /* ARM LR register */
OP_oRRnpc, /* ARM register, not the PC */
OP_oRRnpcsp, /* ARM register, neither the PC nor the SP (a.k.a. BadReg) */
OP_oRRw, /* ARM register, not r15, optional trailing ! */
enum arm_reg_type rtype;
parse_operand_result result;
unsigned int op_parse_code;
+ bfd_boolean partial_match;
#define po_char_or_fail(chr) \
do \
case OP_RRnpc:
case OP_RRnpcsp:
case OP_oRR:
+ case OP_LR:
+ case OP_oLR:
case OP_RR: po_reg_or_fail (REG_TYPE_RN); break;
case OP_RCP: po_reg_or_fail (REG_TYPE_CP); break;
case OP_RCN: po_reg_or_fail (REG_TYPE_CN); break;
/* Register lists. */
case OP_REGLST:
- val = parse_reg_list (&str);
+ val = parse_reg_list (&str, REGLIST_RN);
if (*str == '^')
{
inst.operands[i].writeback = 1;
}
break;
+ case OP_CLRMLST:
+ val = parse_reg_list (&str, REGLIST_CLRM);
+ break;
+
case OP_VRSLST:
- val = parse_vfp_reg_list (&str, &inst.operands[i].reg, REGLIST_VFP_S);
+ val = parse_vfp_reg_list (&str, &inst.operands[i].reg, REGLIST_VFP_S,
+ &partial_match);
break;
case OP_VRDLST:
- val = parse_vfp_reg_list (&str, &inst.operands[i].reg, REGLIST_VFP_D);
+ val = parse_vfp_reg_list (&str, &inst.operands[i].reg, REGLIST_VFP_D,
+ &partial_match);
break;
case OP_VRSDLST:
/* Allow Q registers too. */
val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
- REGLIST_NEON_D);
+ REGLIST_NEON_D, &partial_match);
if (val == FAIL)
{
inst.error = NULL;
val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
- REGLIST_VFP_S);
+ REGLIST_VFP_S, &partial_match);
+ inst.operands[i].issingle = 1;
+ }
+ break;
+
+ case OP_VRSDVLST:
+ val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
+ REGLIST_VFP_D_VPR, &partial_match);
+ if (val == FAIL && !partial_match)
+ {
+ inst.error = NULL;
+ val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
+ REGLIST_VFP_S_VPR, &partial_match);
inst.operands[i].issingle = 1;
}
break;
case OP_NRDLST:
val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
- REGLIST_NEON_D);
+ REGLIST_NEON_D, &partial_match);
break;
case OP_NSTRLST:
case OP_COND:
case OP_oBARRIER_I15:
case OP_REGLST:
+ case OP_CLRMLST:
case OP_VRSLST:
case OP_VRDLST:
case OP_VRSDLST:
+ case OP_VRSDVLST:
case OP_NRDLST:
case OP_NSTRLST:
if (val == FAIL)
inst.operands[i].imm = val;
break;
+ case OP_LR:
+ case OP_oLR:
+ if (inst.operands[i].reg != REG_LR)
+ inst.error = _("operand must be LR register");
+ break;
+
default:
break;
}
X(_b, e000, f000b000), \
X(_bcond, d000, f0008000), \
X(_bf, 0000, f040e001), \
+ X(_bfcsel,0000, f000e001), \
X(_bfx, 0000, f060e001), \
+ X(_bfl, 0000, f000c001), \
X(_bflx, 0000, f070e001), \
X(_bic, 4380, ea200000), \
X(_bics, 4380, ea300000), \
X(_cpsid, b670, f3af8600), \
X(_cpy, 4600, ea4f0000), \
X(_dec_sp,80dd, f1ad0d00), \
+ X(_dls, 0000, f040e001), \
X(_eor, 4040, ea800000), \
X(_eors, 4040, ea900000), \
X(_inc_sp,00dd, f10d0d00), \
X(_ldr_pc,4800, f85f0000), \
X(_ldr_pc2,4800, f85f0000), \
X(_ldr_sp,9800, f85d0000), \
+ X(_le, 0000, f00fc001), \
X(_lsl, 0000, fa00f000), \
X(_lsls, 0000, fa10f000), \
X(_lsr, 0800, fa20f000), \
X(_yield, bf10, f3af8001), \
X(_wfe, bf20, f3af8002), \
X(_wfi, bf30, f3af8003), \
+ X(_wls, 0000, f040c001), \
X(_sev, bf40, f3af8004), \
X(_sevl, bf50, f3af8005), \
X(_udf, de00, f7f0a000)
/* Helper function used for both push/pop and ldm/stm. */
static void
-encode_thumb2_ldmstm (int base, unsigned mask, bfd_boolean writeback)
+encode_thumb2_multi (bfd_boolean do_io, int base, unsigned mask,
+ bfd_boolean writeback)
{
- bfd_boolean load;
+ bfd_boolean load, store;
- load = (inst.instruction & (1 << 20)) != 0;
+ gas_assert (base != -1 || !do_io);
+ load = do_io && ((inst.instruction & (1 << 20)) != 0);
+ store = do_io && !load;
if (mask & (1 << 13))
inst.error = _("SP not allowed in register list");
- if ((mask & (1 << base)) != 0
+ if (do_io && (mask & (1 << base)) != 0
&& writeback)
inst.error = _("having the base register in the register list when "
"using write back is UNPREDICTABLE");
set_it_insn_type_last ();
}
}
- else
+ else if (store)
{
if (mask & (1 << 15))
inst.error = _("PC not allowed in register list");
}
- if ((mask & (mask - 1)) == 0)
+ if (do_io && ((mask & (mask - 1)) == 0))
{
/* Single register transfers implemented as str/ldr. */
if (writeback)
inst.instruction |= WRITE_BACK;
inst.instruction |= mask;
- inst.instruction |= base << 16;
+ if (do_io)
+ inst.instruction |= base << 16;
}
static void
if (inst.instruction < 0xffff)
inst.instruction = THUMB_OP32 (inst.instruction);
- encode_thumb2_ldmstm (inst.operands[0].reg, inst.operands[1].imm,
- inst.operands[0].writeback);
+ encode_thumb2_multi (TRUE /* do_io */, inst.operands[0].reg,
+ inst.operands[1].imm,
+ inst.operands[0].writeback);
}
}
else
else if (unified_syntax)
{
inst.instruction = THUMB_OP32 (inst.instruction);
- encode_thumb2_ldmstm (13, mask, TRUE);
+ encode_thumb2_multi (TRUE /* do_io */, 13, mask, TRUE);
}
else
{
}
}
+static void
+do_t_clrm (void)
+{
+ if (unified_syntax)
+ encode_thumb2_multi (FALSE /* do_io */, -1, inst.operands[0].imm, FALSE);
+ else
+ {
+ inst.error = _("invalid register list to push/pop instruction");
+ return;
+ }
+}
+
+static void
+do_t_vscclrm (void)
+{
+ if (inst.operands[0].issingle)
+ {
+ inst.instruction |= (inst.operands[0].reg & 0x1) << 22;
+ inst.instruction |= (inst.operands[0].reg & 0x1e) << 11;
+ inst.instruction |= inst.operands[0].imm;
+ }
+ else
+ {
+ inst.instruction |= (inst.operands[0].reg & 0x10) << 18;
+ inst.instruction |= (inst.operands[0].reg & 0xf) << 12;
+ inst.instruction |= 1 << 8;
+ inst.instruction |= inst.operands[0].imm << 1;
+ }
+}
+
static void
do_t_rbit (void)
{
}
break;
+ case T_MNEM_bfl:
+ if (inst.operands[1].hasreloc == 0)
+ {
+ int val = inst.operands[1].imm;
+ if (v8_1_branch_value_check (inst.operands[1].imm, 19, TRUE) == FAIL)
+ as_bad (BAD_BRANCH_OFF);
+
+ int immA = (val & 0x0007f000) >> 12;
+ int immB = (val & 0x00000ffc) >> 2;
+ int immC = (val & 0x00000002) >> 1;
+ inst.instruction |= (immA << 16) | (immB << 1) | (immC << 11);
+ }
+ else
+ {
+ inst.relocs[1].type = BFD_RELOC_ARM_THUMB_BF19;
+ inst.relocs[1].pc_rel = 1;
+ }
+ break;
+
+ case T_MNEM_bfcsel:
+ /* Operand 1. */
+ if (inst.operands[1].hasreloc == 0)
+ {
+ int val = inst.operands[1].imm;
+ int immA = (val & 0x00001000) >> 12;
+ int immB = (val & 0x00000ffc) >> 2;
+ int immC = (val & 0x00000002) >> 1;
+ inst.instruction |= (immA << 16) | (immB << 1) | (immC << 11);
+ }
+ else
+ {
+ inst.relocs[1].type = BFD_RELOC_ARM_THUMB_BF13;
+ inst.relocs[1].pc_rel = 1;
+ }
+
+ /* Operand 2. */
+ if (inst.operands[2].hasreloc == 0)
+ {
+ constraint ((inst.operands[0].hasreloc != 0), BAD_ARGS);
+ int val2 = inst.operands[2].imm;
+ int val0 = inst.operands[0].imm & 0x1f;
+ int diff = val2 - val0;
+ if (diff == 4)
+ inst.instruction |= 1 << 17; /* T bit. */
+ else if (diff != 2)
+ as_bad (_("out of range label-relative fixup value"));
+ }
+ else
+ {
+ constraint ((inst.operands[0].hasreloc == 0), BAD_ARGS);
+ inst.relocs[2].type = BFD_RELOC_THUMB_PCREL_BFCSEL;
+ inst.relocs[2].pc_rel = 1;
+ }
+
+ /* Operand 3. */
+ constraint (inst.cond != COND_ALWAYS, BAD_COND);
+ inst.instruction |= (inst.operands[3].imm & 0xf) << 18;
+ break;
+
case T_MNEM_bfx:
case T_MNEM_bflx:
inst.instruction |= inst.operands[1].reg << 16;
}
}
+/* Helper function for do_t_loloop to handle relocations. */
+static void
+v8_1_loop_reloc (int is_le)
+{
+ if (inst.relocs[0].exp.X_op == O_constant)
+ {
+ int value = inst.relocs[0].exp.X_add_number;
+ value = (is_le) ? -value : value;
+
+ if (v8_1_branch_value_check (value, 12, FALSE) == FAIL)
+ as_bad (BAD_BRANCH_OFF);
+
+ int imml, immh;
+
+ immh = (value & 0x00000ffc) >> 2;
+ imml = (value & 0x00000002) >> 1;
+
+ inst.instruction |= (imml << 11) | (immh << 1);
+ }
+ else
+ {
+ inst.relocs[0].type = BFD_RELOC_ARM_THUMB_LOOP12;
+ inst.relocs[0].pc_rel = 1;
+ }
+}
+
+/* To handle the Scalar Low Overhead Loop instructions
+ in Armv8.1-M Mainline. */
+static void
+do_t_loloop (void)
+{
+ unsigned long insn = inst.instruction;
+
+ set_it_insn_type (OUTSIDE_IT_INSN);
+ inst.instruction = THUMB_OP32 (inst.instruction);
+
+ switch (insn)
+ {
+ case T_MNEM_le:
+ /* le <label>. */
+ if (!inst.operands[0].present)
+ inst.instruction |= 1 << 21;
+
+ v8_1_loop_reloc (TRUE);
+ break;
+
+ case T_MNEM_wls:
+ v8_1_loop_reloc (FALSE);
+ /* Fall through. */
+ case T_MNEM_dls:
+ constraint (inst.operands[1].isreg != 1, BAD_ARGS);
+ inst.instruction |= (inst.operands[1].reg << 16);
+ break;
+
+ default: abort();
+ }
+}
+
/* Neon instruction encoder helpers. */
/* Encodings for the different types for various Neon opcodes. */
{ mnem, OPS##nops ops, OT_csuffix, 0x0, T_MNEM##top, 0, THUMB_VARIANT, NULL, \
do_##te }
+/* T_MNEM_xyz enumerator variants of ToU. */
+#define toU(mnem, top, nops, ops, te) \
+ { mnem, OPS##nops ops, OT_unconditional, 0x0, T_MNEM##top, 0, THUMB_VARIANT, \
+ NULL, do_##te }
+
/* Legacy mnemonics that always have conditional infix after the third
character. */
#define CL(mnem, op, nops, ops, ae) \
#undef THUMB_VARIANT
#define THUMB_VARIANT & arm_ext_v8_1m_main
toC("bf", _bf, 2, (EXPs, EXPs), t_branch_future),
+ toU("bfcsel", _bfcsel, 4, (EXPs, EXPs, EXPs, COND), t_branch_future),
toC("bfx", _bfx, 2, (EXPs, RRnpcsp), t_branch_future),
+ toC("bfl", _bfl, 2, (EXPs, EXPs), t_branch_future),
toC("bflx", _bflx, 2, (EXPs, RRnpcsp), t_branch_future),
+
+ toU("dls", _dls, 2, (LR, RRnpcsp), t_loloop),
+ toU("wls", _wls, 3, (LR, RRnpcsp, EXP), t_loloop),
+ toU("le", _le, 2, (oLR, EXP), t_loloop),
+
+ ToC("clrm", e89f0000, 1, (CLRMLST), t_clrm),
+ ToC("vscclrm", ec9f0a00, 1, (VRSDVLST), t_vscclrm)
};
#undef ARM_VARIANT
#undef THUMB_VARIANT
#undef ToC
#undef toC
#undef ToU
+#undef toU
\f
/* MD interface: bits in the object file. */
case BFD_RELOC_THUMB_PCREL_BRANCH12:
case BFD_RELOC_THUMB_PCREL_BRANCH20:
case BFD_RELOC_THUMB_PCREL_BRANCH25:
+ case BFD_RELOC_THUMB_PCREL_BFCSEL:
case BFD_RELOC_ARM_THUMB_BF17:
case BFD_RELOC_ARM_THUMB_BF19:
+ case BFD_RELOC_ARM_THUMB_BF13:
+ case BFD_RELOC_ARM_THUMB_LOOP12:
return base + 4;
case BFD_RELOC_THUMB_PCREL_BRANCH23:
}
break;
+ case BFD_RELOC_THUMB_PCREL_BFCSEL:
+ if (fixP->fx_addsy
+ && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
+ && !S_FORCE_RELOC (fixP->fx_addsy, TRUE)
+ && ARM_IS_FUNC (fixP->fx_addsy)
+ && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
+ {
+ fixP->fx_done = 0;
+ }
+ if ((value & ~0x7f) && ((value & ~0x3f) != ~0x3f))
+ as_bad_where (fixP->fx_file, fixP->fx_line,
+ _("branch out of range"));
+
+ if (fixP->fx_done || !seg->use_rela_p)
+ {
+ newval = md_chars_to_number (buf, THUMB_SIZE);
+
+ addressT boff = ((newval & 0x0780) >> 7) << 1;
+ addressT diff = value - boff;
+
+ if (diff == 4)
+ {
+ newval |= 1 << 1; /* T bit. */
+ }
+ else if (diff != 2)
+ {
+ as_bad_where (fixP->fx_file, fixP->fx_line,
+ _("out of range label-relative fixup value"));
+ }
+ md_number_to_chars (buf, newval, THUMB_SIZE);
+ }
+ break;
+
case BFD_RELOC_ARM_THUMB_BF17:
if (fixP->fx_addsy
&& (S_GET_SEGMENT (fixP->fx_addsy) == seg)
}
break;
+ case BFD_RELOC_ARM_THUMB_BF13:
+ if (fixP->fx_addsy
+ && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
+ && !S_FORCE_RELOC (fixP->fx_addsy, TRUE)
+ && ARM_IS_FUNC (fixP->fx_addsy)
+ && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
+ {
+ /* Force a relocation for a branch 13 bits wide. */
+ fixP->fx_done = 0;
+ }
+
+ if (v8_1_branch_value_check (value, 13, TRUE) == FAIL)
+ as_bad_where (fixP->fx_file, fixP->fx_line,
+ BAD_BRANCH_OFF);
+
+ if (fixP->fx_done || !seg->use_rela_p)
+ {
+ offsetT newval2;
+ addressT immA, immB, immC;
+
+ immA = (value & 0x00001000) >> 12;
+ immB = (value & 0x00000ffc) >> 2;
+ immC = (value & 0x00000002) >> 1;
+
+ newval = md_chars_to_number (buf, THUMB_SIZE);
+ newval2 = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
+ newval |= immA;
+ newval2 |= (immC << 11) | (immB << 1);
+ md_number_to_chars (buf, newval, THUMB_SIZE);
+ md_number_to_chars (buf + THUMB_SIZE, newval2, THUMB_SIZE);
+ }
+ break;
+
+ case BFD_RELOC_ARM_THUMB_LOOP12:
+ if (fixP->fx_addsy
+ && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
+ && !S_FORCE_RELOC (fixP->fx_addsy, TRUE)
+ && ARM_IS_FUNC (fixP->fx_addsy)
+ && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
+ {
+ /* Force a relocation for a branch 12 bits wide. */
+ fixP->fx_done = 0;
+ }
+
+ bfd_vma insn = get_thumb32_insn (buf);
+ /* le lr, <label> or le <label> */
+ if (((insn & 0xffffffff) == 0xf00fc001)
+ || ((insn & 0xffffffff) == 0xf02fc001))
+ value = -value;
+
+ if (v8_1_branch_value_check (value, 12, FALSE) == FAIL)
+ as_bad_where (fixP->fx_file, fixP->fx_line,
+ BAD_BRANCH_OFF);
+ if (fixP->fx_done || !seg->use_rela_p)
+ {
+ addressT imml, immh;
+
+ immh = (value & 0x00000ffc) >> 2;
+ imml = (value & 0x00000002) >> 1;
+
+ newval = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
+ newval |= (imml << 11) | (immh << 1);
+ md_number_to_chars (buf + THUMB_SIZE, newval, THUMB_SIZE);
+ }
+ break;
+
case BFD_RELOC_ARM_V4BX:
/* This will need to go in the object file. */
fixP->fx_done = 0;
case BFD_RELOC_ARM_FUNCDESC:
case BFD_RELOC_ARM_THUMB_BF17:
case BFD_RELOC_ARM_THUMB_BF19:
+ case BFD_RELOC_ARM_THUMB_BF13:
code = fixp->fx_r_type;
break;
return NULL;
case BFD_RELOC_THUMB_PCREL_BRANCH5:
+ case BFD_RELOC_THUMB_PCREL_BFCSEL:
+ case BFD_RELOC_ARM_THUMB_LOOP12:
as_bad_where (fixp->fx_file, fixp->fx_line,
_("%s used for a symbol not defined in the same file"),
bfd_get_reloc_code_name (fixp->fx_r_type));