From: mengqinggang Date: Sun, 7 Sep 2025 09:43:56 +0000 (+0800) Subject: LoongArch: Add processing for LA32/LA32R relocations X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=c9d1a08d68e1cc3ca621a624b4973ff7022556e0;p=thirdparty%2Fbinutils-gdb.git LoongArch: Add processing for LA32/LA32R relocations R_LARCH_CALL30: pcaddu12i $ra, %call30(func) jirl $ra, $ra, 0 Similar with R_LARCH_CALL36, pcaddu12i and jirl must be adjacent. R_LARCH_PCADD_HI20, R_LARCH_PCADD_LO12: .Lpcadd_hi0: pcaddu12i $t0, %pcadd_hi20(sym) addi.w $t0, $t0, %pcadd_lo12(.Lpcadd_hi0) Similar with RISCV PCREL_HI20, PCREL_LO12, R_LARCH_PCADD_LO12 reference to the symbol at R_LARCH_PCADD_HI20. --- diff --git a/bfd/elfnn-loongarch.c b/bfd/elfnn-loongarch.c index dd6ac76a042..5e5eb237abd 100644 --- a/bfd/elfnn-loongarch.c +++ b/bfd/elfnn-loongarch.c @@ -228,7 +228,8 @@ loongarch_elf_new_section_hook (bfd *abfd, asection *sec) #define IS_CALL_RELOC(R_TYPE) \ ((R_TYPE) == R_LARCH_B26 \ - ||(R_TYPE) == R_LARCH_CALL36) + ||(R_TYPE) == R_LARCH_CALL36 \ + ||(R_TYPE) == R_LARCH_CALL30) /* If TLS GD/IE need dynamic relocations, INDX will be the dynamic indx, and set NEED_RELOC to true used in allocate_dynrelocs and @@ -885,7 +886,8 @@ bad_static_reloc (struct bfd_link_info *info, bool bad_extern_access = (bfd_link_pde (info) || r_type == R_LARCH_PCREL20_S2 - || r_type == R_LARCH_PCALA_HI20); + || r_type == R_LARCH_PCALA_HI20 + || r_type == R_LARCH_PCADD_HI20); if (h) name = h->root.root.string; @@ -1051,6 +1053,7 @@ loongarch_elf_check_relocs (bfd *abfd, struct bfd_link_info *info, switch (r_type) { case R_LARCH_GOT_PC_HI20: + case R_LARCH_GOT_PCADD_HI20: case R_LARCH_GOT_HI20: case R_LARCH_SOP_PUSH_GPREL: /* For la.global. */ @@ -1064,8 +1067,10 @@ loongarch_elf_check_relocs (bfd *abfd, struct bfd_link_info *info, break; case R_LARCH_TLS_LD_PC_HI20: + case R_LARCH_TLS_LD_PCADD_HI20: case R_LARCH_TLS_LD_HI20: case R_LARCH_TLS_GD_PC_HI20: + case R_LARCH_TLS_GD_PCADD_HI20: case R_LARCH_TLS_GD_HI20: case R_LARCH_SOP_PUSH_TLS_GD: if (!loongarch_elf_record_tls_and_got_reference (abfd, info, h, @@ -1076,6 +1081,7 @@ loongarch_elf_check_relocs (bfd *abfd, struct bfd_link_info *info, break; case R_LARCH_TLS_IE_PC_HI20: + case R_LARCH_TLS_IE_PCADD_HI20: case R_LARCH_TLS_IE_HI20: case R_LARCH_SOP_PUSH_TLS_GOT: if (bfd_link_pic (info)) @@ -1103,6 +1109,7 @@ loongarch_elf_check_relocs (bfd *abfd, struct bfd_link_info *info, break; case R_LARCH_TLS_DESC_PC_HI20: + case R_LARCH_TLS_DESC_PCADD_HI20: case R_LARCH_TLS_DESC_HI20: if (!loongarch_elf_record_tls_and_got_reference (abfd, info, h, r_symndx, @@ -1132,6 +1139,7 @@ loongarch_elf_check_relocs (bfd *abfd, struct bfd_link_info *info, should not be used to build shared libraries. In static PIE undefined weak symbols may be allowed by rewriting pcaddi to addi.w if addend is in [-2048, 2048). */ + case R_LARCH_PCADD_HI20: case R_LARCH_PCREL20_S2: if (bfd_link_pic (info) && (sec->flags & SEC_ALLOC) != 0 @@ -1179,6 +1187,7 @@ loongarch_elf_check_relocs (bfd *abfd, struct bfd_link_info *info, case R_LARCH_B21: case R_LARCH_B26: case R_LARCH_CALL36: + case R_LARCH_CALL30: if (h != NULL) { h->needs_plt = 1; @@ -3012,6 +3021,19 @@ perform_relocation (const Elf_Internal_Rela *rel, asection *input_section, case R_LARCH_TLS_LD_PCREL20_S2: case R_LARCH_TLS_GD_PCREL20_S2: case R_LARCH_TLS_DESC_PCREL20_S2: + case R_LARCH_CALL30: + case R_LARCH_PCADD_HI20: + case R_LARCH_PCADD_LO12: + case R_LARCH_GOT_PCADD_HI20: + case R_LARCH_GOT_PCADD_LO12: + case R_LARCH_TLS_IE_PCADD_HI20: + case R_LARCH_TLS_IE_PCADD_LO12: + case R_LARCH_TLS_LD_PCADD_HI20: + case R_LARCH_TLS_LD_PCADD_LO12: + case R_LARCH_TLS_GD_PCADD_HI20: + case R_LARCH_TLS_GD_PCADD_LO12: + case R_LARCH_TLS_DESC_PCADD_HI20: + case R_LARCH_TLS_DESC_PCADD_LO12: r = loongarch_check_offset (rel, input_section); if (r != bfd_reloc_ok) break; @@ -3268,6 +3290,153 @@ tlsoff (struct bfd_link_info *info, bfd_vma addr) return addr - elf_hash_table (info)->tls_sec->vma; } +typedef struct +{ + /* PC value. */ + bfd_vma address; + /* Relocation value with addend. */ + bfd_vma value; +} loongarch_pcrel_hi_reloc; + +typedef struct loongarch_pcrel_lo_reloc +{ + /* PC value of pcaddu12i. */ + bfd_vma address; + /* Internal relocation. */ + Elf_Internal_Rela *reloc; + /* loongarch_elf_relocate_section can only handle an input section at a time, + so we can only resolved pcadd_hi20 and pcadd_lo12 in the same section. If + these pcrel relocs are not in the same section we should report dangerous + relocation errors. */ + asection *input_section; + struct bfd_link_info *info; + reloc_howto_type *howto; + bfd_byte *contents; + /* The next loongarch_pcrel_lo_reloc. */ + struct loongarch_pcrel_lo_reloc *next; +} loongarch_pcrel_lo_reloc; + +typedef struct +{ + /* Hash table for loongarch_pcrel_hi_reloc. */ + htab_t hi_relocs; + /* Linked list for loongarch_pcrel_lo_reloc. */ + loongarch_pcrel_lo_reloc *lo_relocs; +} loongarch_pcrel_relocs; + +/* Hash function of the pcrel_hi_reloc hash table. */ +static hashval_t +loongarch_pcrel_reloc_hash (const void *entry) +{ + const loongarch_pcrel_hi_reloc *e = entry; + return (hashval_t)(e->address >> 2); +} + +/* Comparison function of the pcrel_hi_reloc hash table. */ +static int +loongarch_pcrel_reloc_eq (const void *entry1, const void *entry2) +{ + const loongarch_pcrel_hi_reloc *e1 = entry1, *e2 = entry2; + return e1->address == e2->address; +} + +static bool +loongarch_init_pcrel_relocs (loongarch_pcrel_relocs *p) +{ + p->lo_relocs = NULL; + p->hi_relocs = htab_create (1024, loongarch_pcrel_reloc_hash, + loongarch_pcrel_reloc_eq, free); + return p->hi_relocs != NULL; +} + +static void +loongarch_free_pcrel_reloc (loongarch_pcrel_relocs *p) +{ + loongarch_pcrel_lo_reloc *cur = p->lo_relocs; + + while (cur != NULL) + { + loongarch_pcrel_lo_reloc *next = cur->next; + free (cur); + cur = next; + } + htab_delete (p->hi_relocs); +} + +static bool +loongarch_record_pcrel_hi_reloc (loongarch_pcrel_relocs *p, + bfd_vma addr, + bfd_vma *value) +{ + bfd_vma offset = *value - addr; + bfd_vma off_lo = offset & (bfd_vma)0xfff; + /* If lo12 immediate > 0x7ff, because sign-extend caused by addi.w/ld.w, + hi20 immediate need to add 0x1. + See RELOCATE_CALC_PC32_HI20(relocation, pc) */ + if (off_lo > 0x7ff) + offset += 0x1000; + + *value = offset; + + loongarch_pcrel_hi_reloc entry = {addr, offset}; + loongarch_pcrel_hi_reloc **slot = + (loongarch_pcrel_hi_reloc **)htab_find_slot (p->hi_relocs, &entry, INSERT); + + BFD_ASSERT (*slot == NULL); + *slot = (loongarch_pcrel_hi_reloc *) bfd_malloc (sizeof (loongarch_pcrel_hi_reloc)); + if (*slot == NULL) + return false; + **slot = entry; + return true; +} + +static bool +loongarch_record_pcrel_lo_reloc (loongarch_pcrel_relocs *p, + bfd_vma addr, + Elf_Internal_Rela *reloc, + asection *input_section, + struct bfd_link_info *info, + reloc_howto_type *howto, + bfd_byte *contents) +{ + loongarch_pcrel_lo_reloc *entry; + entry = (loongarch_pcrel_lo_reloc *) bfd_malloc (sizeof (loongarch_pcrel_lo_reloc)); + if (entry == NULL) + return false; + *entry = (loongarch_pcrel_lo_reloc) {addr, reloc, input_section, info, + howto, contents, p->lo_relocs}; + p->lo_relocs = entry; + return true; +} + +static bool +loongarch_resolve_pcrel_lo_relocs (loongarch_pcrel_relocs *p) +{ + loongarch_pcrel_lo_reloc *r; + for (r = p->lo_relocs; r != NULL; r = r->next) + { + bfd *input_bfd = r->input_section->owner; + loongarch_pcrel_hi_reloc search = {r->address, 0}; + loongarch_pcrel_hi_reloc *entry = htab_find (p->hi_relocs, &search); + + char *string = NULL; + if (entry == NULL) + string = _("%pcrel_lo missing marching %pcrel_hi"); + + if (string != NULL) + { + ((*r->info->callbacks->reloc_overflow) + (r->info, NULL, NULL, r->howto->name, (bfd_vma) 0, + input_bfd, r->input_section, r->reloc->r_offset)); + return true; + } + + perform_relocation (r->reloc, r->input_section, r->howto, entry->value, + input_bfd, r->contents); + } + return true; +} + static int loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, bfd *input_bfd, asection *input_section, @@ -3277,6 +3446,7 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, { Elf_Internal_Rela *rel; Elf_Internal_Rela *relend; + loongarch_pcrel_relocs pcrel_relocs; bool fatal = false; asection *sreloc = elf_section_data (input_section)->sreloc; struct loongarch_elf_link_hash_table *htab = loongarch_elf_hash_table (info); @@ -3287,6 +3457,10 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, bool is_dyn = elf_hash_table (info)->dynamic_sections_created; asection *plt = htab->elf.splt ? htab->elf.splt : htab->elf.iplt; asection *got = htab->elf.sgot; + uint32_t insn; + + if (!loongarch_init_pcrel_relocs (&pcrel_relocs)) + return false; relend = relocs + input_section->reloc_count; for (rel = relocs; rel < relend; rel++) @@ -4069,6 +4243,7 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, case R_LARCH_B21: case R_LARCH_B26: case R_LARCH_CALL36: + case R_LARCH_CALL30: unresolved_reloc = false; bool via_plt = plt != NULL && h != NULL && h->plt.offset != (bfd_vma) - 1; @@ -4080,9 +4255,12 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, /* A call to an undefined weak symbol is converted to 0. */ if (!via_plt && IS_CALL_RELOC (r_type)) { - /* call36 fn1 => pcaddu18i $ra,0+jirl $ra,$zero,0 - tail36 $t0,fn1 => pcaddi18i $t0,0+jirl $zero,$zero,0 */ - if (R_LARCH_CALL36 == r_type) + /* call36 fn1 => pcaddu18i $ra,0 + jirl $ra,$zero,0 + tail36 $t0,fn1 => pcaddi18i $t0,0 + jirl $t0,$zero,0 + call30 fn1 => pcaddu12i $ra,0 + jirl $ra,$zero,0 + tail30 $t0,fn1 => pcaddi12i $t0,0 + jirl $t0,$zero,0 */ + if (r_type == R_LARCH_CALL36 + || r_type == R_LARCH_CALL30) { uint32_t jirl = bfd_get (32, input_bfd, contents + rel->r_offset + 4); @@ -4100,10 +4278,9 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, if (LARCH_INSN_B (b_bl)) bfd_put (32, input_bfd, LARCH_OP_JIRL, contents + rel->r_offset); - else - /* bl %plt(fn1) => jirl $ra,zero,0. */ - bfd_put (32, input_bfd, LARCH_OP_JIRL | 0x1, - contents + rel->r_offset); + else /* bl %plt(fn1) => jirl $ra,zero,0. */ + bfd_put (32, input_bfd, LARCH_OP_JIRL | 0x1, + contents + rel->r_offset); } r = bfd_reloc_continue; break; @@ -4164,8 +4341,8 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, case R_LARCH_PCALA64_LO20: pc -= 8; /* Fall through. */ - case R_LARCH_PCREL20_S2: case R_LARCH_PCALA_HI20: + case R_LARCH_PCREL20_S2: unresolved_reloc = false; /* If sym is undef weak and it's hidden or we are doing a static @@ -4206,7 +4383,7 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, break; } - uint32_t insn = bfd_get (32, input_bfd, + insn = bfd_get (32, input_bfd, contents + rel->r_offset); insn = LARCH_GET_RD (insn) | LARCH_OP_ADDI_W; insn |= (relocation & 0xfff) << 10; @@ -4218,7 +4395,7 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, RELOCATE_CALC_PC32_HI20 (relocation, pc); if (resolve_pcrel_undef_weak) { - uint32_t insn = bfd_get (32, input_bfd, + insn = bfd_get (32, input_bfd, contents + rel->r_offset); insn = LARCH_GET_RD (insn) | LARCH_OP_LU12I_W; bfd_put_32 (input_bfd, insn, contents + rel->r_offset); @@ -4229,12 +4406,6 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, } break; - case R_LARCH_TLS_LE_HI20_R: - relocation += rel->r_addend; - relocation = tlsoff (info, relocation); - RELOCATE_TLS_TP32_HI20 (relocation); - break; - case R_LARCH_PCALA_LO12: /* Not support if sym_addr in 2k page edge. pcalau12i pc_hi20 (sym_addr) @@ -4251,7 +4422,7 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, /* For 2G jump, generate pcalau12i, jirl. */ /* If use jirl, turns to R_LARCH_B16. */ - uint32_t insn = bfd_get (32, input_bfd, contents + rel->r_offset); + insn = bfd_get (32, input_bfd, contents + rel->r_offset); if (LARCH_INSN_JIRL (insn)) { relocation &= 0xfff; @@ -4263,7 +4434,46 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, } break; + case R_LARCH_PCADD_HI20: + resolve_pcrel_undef_weak = + ((info->nointerp + || (h && ELF_ST_VISIBILITY (h->other) != STV_DEFAULT)) + && is_undefweak); + if (resolve_pcrel_undef_weak) + pc = 0; + + if (h && h->plt.offset != MINUS_ONE) + relocation = sec_addr (plt) + h->plt.offset; + else + relocation += rel->r_addend; + + if (!loongarch_record_pcrel_hi_reloc (&pcrel_relocs, pc, &relocation)) + r = bfd_reloc_overflow; + + if (resolve_pcrel_undef_weak) + { + insn = bfd_get (32, input_bfd, + contents + rel->r_offset); + insn = LARCH_GET_RD (insn) | LARCH_OP_LU12I_W; + bfd_put_32 (input_bfd, insn, contents + rel->r_offset); + } + break; + + case R_LARCH_PCADD_LO12: + case R_LARCH_GOT_PCADD_LO12: + case R_LARCH_TLS_IE_PCADD_LO12: + case R_LARCH_TLS_LD_PCADD_LO12: + case R_LARCH_TLS_GD_PCADD_LO12: + case R_LARCH_TLS_DESC_PCADD_LO12: + if (loongarch_record_pcrel_lo_reloc (&pcrel_relocs, relocation, rel, + input_section, info, howto, + contents)) + continue; + r = bfd_reloc_overflow; + break; + case R_LARCH_GOT_PC_HI20: + case R_LARCH_GOT_PCADD_HI20: case R_LARCH_GOT_HI20: /* Calc got offset. */ { @@ -4356,6 +4566,12 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, if (r_type == R_LARCH_GOT_PC_HI20) RELOCATE_CALC_PC32_HI20 (relocation, pc); + if (r_type == R_LARCH_GOT_PCADD_HI20) + { + if (!loongarch_record_pcrel_hi_reloc (&pcrel_relocs, pc, + &relocation)) + r = bfd_reloc_overflow; + } break; case R_LARCH_GOT_PC_LO12: @@ -4396,6 +4612,12 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, break; + case R_LARCH_TLS_LE_HI20_R: + relocation += rel->r_addend; + relocation = tlsoff (info, relocation); + RELOCATE_TLS_TP32_HI20 (relocation); + break; + case R_LARCH_TLS_LE_HI20: case R_LARCH_TLS_LE_LO12: case R_LARCH_TLS_LE_LO12_R: @@ -4418,12 +4640,16 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, Now, LD and GD is both GOT_TLS_GD type, LD seems to can be omitted. */ case R_LARCH_TLS_IE_PC_HI20: + case R_LARCH_TLS_IE_PCADD_HI20: case R_LARCH_TLS_IE_HI20: case R_LARCH_TLS_LD_PC_HI20: + case R_LARCH_TLS_LD_PCADD_HI20: case R_LARCH_TLS_LD_HI20: case R_LARCH_TLS_GD_PC_HI20: + case R_LARCH_TLS_GD_PCADD_HI20: case R_LARCH_TLS_GD_HI20: case R_LARCH_TLS_DESC_PC_HI20: + case R_LARCH_TLS_DESC_PCADD_HI20: case R_LARCH_TLS_DESC_HI20: case R_LARCH_TLS_LD_PCREL20_S2: case R_LARCH_TLS_GD_PCREL20_S2: @@ -4432,10 +4658,12 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, unresolved_reloc = false; if (r_type == R_LARCH_TLS_IE_PC_HI20 + || r_type == R_LARCH_TLS_IE_PCADD_HI20 || r_type == R_LARCH_TLS_IE_HI20) is_ie = true; if (r_type == R_LARCH_TLS_DESC_PC_HI20 + || r_type == R_LARCH_TLS_DESC_PCADD_HI20 || r_type == R_LARCH_TLS_DESC_HI20 || r_type == R_LARCH_TLS_DESC_PCREL20_S2) is_desc = true; @@ -4569,6 +4797,13 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, || r_type == R_LARCH_TLS_GD_PCREL20_S2 || r_type == R_LARCH_TLS_DESC_PCREL20_S2) relocation -= pc; + else if (r_type == R_LARCH_TLS_IE_PCADD_HI20 + || r_type == R_LARCH_TLS_LD_PCADD_HI20 + || r_type == R_LARCH_TLS_GD_PCADD_HI20 + || r_type == R_LARCH_TLS_DESC_PCADD_HI20) + if (!loongarch_record_pcrel_hi_reloc (&pcrel_relocs, pc, + &relocation)) + r = bfd_reloc_overflow; /* else {} ABS relocations. */ break; @@ -4731,6 +4966,10 @@ loongarch_elf_relocate_section (bfd *output_bfd, struct bfd_link_info *info, fatal = true; } + bool ret = loongarch_resolve_pcrel_lo_relocs (&pcrel_relocs); + fatal = !ret; + loongarch_free_pcrel_reloc (&pcrel_relocs); + return !fatal; } diff --git a/bfd/elfxx-loongarch.c b/bfd/elfxx-loongarch.c index b29ca218f24..a5a8dc34e0e 100644 --- a/bfd/elfxx-loongarch.c +++ b/bfd/elfxx-loongarch.c @@ -2281,6 +2281,13 @@ reloc_bits_sanity (bfd *abfd, reloc_howto_type *howto, bfd_vma *fix_val, so the high part need to add 0x8000. */ val = (((val + 0x8000) >> 16) << 5) | (((val & 0xffff) << 10) << 32); break; + case R_LARCH_CALL30: + /* call30 = pcaddu12i+jirl, the jirl immediate field has 16 bits. + Only use 10 bits immediate of jirl, so not need to add 0x8000. + Since there is "val = val >> howto->rightshift" in front, only + the lower 10 bits (0x3ff) need to be saved. */ + val = ((val >> 10) << 5) | (((val & 0x3ff) << 10) << 32); + break; default: val <<= howto->bitpos; break; diff --git a/gas/config/tc-loongarch.c b/gas/config/tc-loongarch.c index d345e941764..fdfe5762184 100644 --- a/gas/config/tc-loongarch.c +++ b/gas/config/tc-loongarch.c @@ -117,7 +117,7 @@ const char md_shortopts[] = "O::g::G:"; static const char default_arch[] = DEFAULT_ARCH; -static bool call36 = 0; +static bool call_reloc = 0; /* The dwarf2 data alignment, adjusted for 32 or 64 bit. */ int loongarch_cie_data_alignment; @@ -865,7 +865,7 @@ loongarch_args_parser_can_match_arg_helper (char esc_ch1, char esc_ch2, esc_ch1, esc_ch2, bit_field, arg); if (ip->reloc_info[0].type >= BFD_RELOC_LARCH_B16 - && ip->reloc_info[0].type <= BFD_RELOC_LARCH_TLS_DESC_PCREL20_S2) + && ip->reloc_info[0].type <= BFD_RELOC_LARCH_TLS_DESC_PCADD_LO12) { /* As we compact stack-relocs, it is no need for pop operation. But break out until here in order to check the imm field. @@ -1170,13 +1170,14 @@ static void append_fixed_insn (struct loongarch_cl_insn *insn) { /* Ensure the jirl is emitted to the same frag as the pcaddu18i. */ - if (BFD_RELOC_LARCH_CALL36 == insn->reloc_info[0].type) + if (insn->reloc_info[0].type == BFD_RELOC_LARCH_CALL36 + || insn->reloc_info[0].type == BFD_RELOC_LARCH_CALL30) frag_grow (8); char *f = frag_more (insn->insn_length); move_insn (insn, frag_now, f - frag_now->fr_literal); - if (call36) + if (call_reloc) { if (strcmp (insn->name, "jirl") == 0) { @@ -1184,11 +1185,12 @@ append_fixed_insn (struct loongarch_cl_insn *insn) frag_wane (frag_now); frag_new (0); } - call36 = 0; + call_reloc = 0; } - if (BFD_RELOC_LARCH_CALL36 == insn->reloc_info[0].type) - call36 = 1; + if (insn->reloc_info[0].type == BFD_RELOC_LARCH_CALL36 + || insn->reloc_info[0].type == BFD_RELOC_LARCH_CALL30) + call_reloc = 1; } /* Add instructions based on the worst-case scenario firstly. */ @@ -1500,6 +1502,8 @@ loongarch_force_relocation (struct fix *fixp) case BFD_RELOC_LARCH_GOT_LO12: case BFD_RELOC_LARCH_GOT64_LO20: case BFD_RELOC_LARCH_GOT64_HI12: + case BFD_RELOC_LARCH_GOT_PCADD_HI20: + case BFD_RELOC_LARCH_GOT_PCADD_LO12: return 1; default: break; @@ -1589,6 +1593,14 @@ md_apply_fix (fixS *fixP, valueT *valP, segT seg ATTRIBUTE_UNUSED) case BFD_RELOC_LARCH_TLS_LE_ADD_R: case BFD_RELOC_LARCH_TLS_LE_HI20_R: case BFD_RELOC_LARCH_TLS_LE_LO12_R: + case BFD_RELOC_LARCH_TLS_IE_PCADD_HI20: + case BFD_RELOC_LARCH_TLS_IE_PCADD_LO12: + case BFD_RELOC_LARCH_TLS_LD_PCADD_HI20: + case BFD_RELOC_LARCH_TLS_LD_PCADD_LO12: + case BFD_RELOC_LARCH_TLS_GD_PCADD_HI20: + case BFD_RELOC_LARCH_TLS_GD_PCADD_LO12: + case BFD_RELOC_LARCH_TLS_DESC_PCADD_HI20: + case BFD_RELOC_LARCH_TLS_DESC_PCADD_LO12: /* Add tls lo (got_lo reloc type). */ if (fixP->fx_addsy == NULL) as_bad_where (fixP->fx_file, fixP->fx_line,