@@ -91,6 +91,7 @@ static uint64_t adjustFixupValue(const MCFixup &Fixup, uint64_t Value,
9191 case FK_Data_2:
9292 case FK_Data_4:
9393 case FK_Data_8:
94+ case FK_Data_leb128:
9495 return Value;
9596 case LoongArch::fixup_loongarch_b16: {
9697 if (!isInt<18 >(Value))
@@ -128,6 +129,15 @@ static uint64_t adjustFixupValue(const MCFixup &Fixup, uint64_t Value,
128129 }
129130}
130131
132+ static void fixupLeb128 (MCContext &Ctx, const MCFixup &Fixup,
133+ MutableArrayRef<char > Data, uint64_t Value) {
134+ unsigned I;
135+ for (I = 0 ; I != Data.size () && Value; ++I, Value >>= 7 )
136+ Data[I] |= uint8_t (Value & 0x7f );
137+ if (Value)
138+ Ctx.reportError (Fixup.getLoc (), " Invalid uleb128 value!" );
139+ }
140+
131141void LoongArchAsmBackend::applyFixup (const MCAssembler &Asm,
132142 const MCFixup &Fixup,
133143 const MCValue &Target,
@@ -143,6 +153,10 @@ void LoongArchAsmBackend::applyFixup(const MCAssembler &Asm,
143153 MCFixupKindInfo Info = getFixupKindInfo (Kind);
144154 MCContext &Ctx = Asm.getContext ();
145155
156+ // Fixup leb128 separately.
157+ if (Fixup.getTargetKind () == FK_Data_leb128)
158+ return fixupLeb128 (Ctx, Fixup, Data, Value);
159+
146160 // Apply any target-specific value adjustments.
147161 Value = adjustFixupValue (Fixup, Value, Ctx);
148162
@@ -173,6 +187,7 @@ bool LoongArchAsmBackend::shouldForceRelocation(const MCAssembler &Asm,
173187 case FK_Data_2:
174188 case FK_Data_4:
175189 case FK_Data_8:
190+ case FK_Data_leb128:
176191 return !Target.isAbsolute ();
177192 }
178193}
@@ -202,9 +217,24 @@ getRelocPairForSize(unsigned Size) {
202217 return std::make_pair (
203218 MCFixupKind (FirstLiteralRelocationKind + ELF::R_LARCH_ADD64),
204219 MCFixupKind (FirstLiteralRelocationKind + ELF::R_LARCH_SUB64));
220+ case 128 :
221+ return std::make_pair (
222+ MCFixupKind (FirstLiteralRelocationKind + ELF::R_LARCH_ADD_ULEB128),
223+ MCFixupKind (FirstLiteralRelocationKind + ELF::R_LARCH_SUB_ULEB128));
205224 }
206225}
207226
227+ std::pair<bool , bool > LoongArchAsmBackend::relaxLEB128 (MCLEBFragment &LF,
228+ MCAsmLayout &Layout,
229+ int64_t &Value) const {
230+ const MCExpr &Expr = LF.getValue ();
231+ if (LF.isSigned () || !Expr.evaluateKnownAbsolute (Value, Layout))
232+ return std::make_pair (false , false );
233+ LF.getFixups ().push_back (
234+ MCFixup::create (0 , &Expr, FK_Data_leb128, Expr.getLoc ()));
235+ return std::make_pair (true , true );
236+ }
237+
208238bool LoongArchAsmBackend::writeNopData (raw_ostream &OS, uint64_t Count,
209239 const MCSubtargetInfo *STI) const {
210240 // We mostly follow binutils' convention here: align to 4-byte boundary with a
@@ -226,21 +256,27 @@ bool LoongArchAsmBackend::handleAddSubRelocations(const MCAsmLayout &Layout,
226256 uint64_t &FixedValue) const {
227257 std::pair<MCFixupKind, MCFixupKind> FK;
228258 uint64_t FixedValueA, FixedValueB;
229- const MCSection &SecA = Target.getSymA ()->getSymbol ().getSection ();
230- const MCSection &SecB = Target.getSymB ()->getSymbol ().getSection ();
231-
232- // We need record relocation if SecA != SecB. Usually SecB is same as the
233- // section of Fixup, which will be record the relocation as PCRel. If SecB
234- // is not same as the section of Fixup, it will report error. Just return
235- // false and then this work can be finished by handleFixup.
236- if (&SecA != &SecB)
237- return false ;
238-
239- // In SecA == SecB case. If the linker relaxation is enabled, we need record
240- // the ADD, SUB relocations. Otherwise the FixedValue has already been
241- // calculated out in evaluateFixup, return true and avoid record relocations.
242- if (!STI.hasFeature (LoongArch::FeatureRelax))
243- return true ;
259+ const MCSymbol &SA = Target.getSymA ()->getSymbol ();
260+ const MCSymbol &SB = Target.getSymB ()->getSymbol ();
261+
262+ bool force = !SA.isInSection () || !SB.isInSection ();
263+ if (!force) {
264+ const MCSection &SecA = SA.getSection ();
265+ const MCSection &SecB = SB.getSection ();
266+
267+ // We need record relocation if SecA != SecB. Usually SecB is same as the
268+ // section of Fixup, which will be record the relocation as PCRel. If SecB
269+ // is not same as the section of Fixup, it will report error. Just return
270+ // false and then this work can be finished by handleFixup.
271+ if (&SecA != &SecB)
272+ return false ;
273+
274+ // In SecA == SecB case. If the linker relaxation is enabled, we need record
275+ // the ADD, SUB relocations. Otherwise the FixedValue has already been calc-
276+ // ulated out in evaluateFixup, return true and avoid record relocations.
277+ if (!STI.hasFeature (LoongArch::FeatureRelax))
278+ return true ;
279+ }
244280
245281 switch (Fixup.getKind ()) {
246282 case llvm::FK_Data_1:
@@ -255,6 +291,9 @@ bool LoongArchAsmBackend::handleAddSubRelocations(const MCAsmLayout &Layout,
255291 case llvm::FK_Data_8:
256292 FK = getRelocPairForSize (64 );
257293 break ;
294+ case llvm::FK_Data_leb128:
295+ FK = getRelocPairForSize (128 );
296+ break ;
258297 default :
259298 llvm_unreachable (" unsupported fixup size" );
260299 }
0 commit comments