@@ -255,14 +255,14 @@ RelType AArch64::getDynRel(RelType type) const {
255255int64_t AArch64::getImplicitAddend (const uint8_t *buf, RelType type) const {
256256 switch (type) {
257257 case R_AARCH64_TLSDESC:
258- return read64 (buf + 8 );
258+ return read64 (ctx, buf + 8 );
259259 case R_AARCH64_NONE:
260260 case R_AARCH64_GLOB_DAT:
261261 case R_AARCH64_JUMP_SLOT:
262262 return 0 ;
263263 case R_AARCH64_ABS16:
264264 case R_AARCH64_PREL16:
265- return SignExtend64<16 >(read16 (buf));
265+ return SignExtend64<16 >(read16 (ctx, buf));
266266 case R_AARCH64_ABS32:
267267 case R_AARCH64_PREL32:
268268 return SignExtend64<32 >(read32 (ctx, buf));
@@ -271,7 +271,7 @@ int64_t AArch64::getImplicitAddend(const uint8_t *buf, RelType type) const {
271271 case R_AARCH64_RELATIVE:
272272 case R_AARCH64_IRELATIVE:
273273 case R_AARCH64_TLS_TPREL64:
274- return read64 (buf);
274+ return read64 (ctx, buf);
275275
276276 // The following relocation types all point at instructions, and
277277 // relocate an immediate field in the instruction.
@@ -355,12 +355,12 @@ int64_t AArch64::getImplicitAddend(const uint8_t *buf, RelType type) const {
355355}
356356
357357void AArch64::writeGotPlt (uint8_t *buf, const Symbol &) const {
358- write64 (buf, ctx.in .plt ->getVA ());
358+ write64 (ctx, buf, ctx.in .plt ->getVA ());
359359}
360360
361361void AArch64::writeIgotPlt (uint8_t *buf, const Symbol &s) const {
362362 if (ctx.arg .writeAddends )
363- write64 (buf, s.getVA ());
363+ write64 (ctx, buf, s.getVA ());
364364}
365365
366366void AArch64::writePltHeader (uint8_t *buf) const {
@@ -485,7 +485,7 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
485485 case R_AARCH64_ABS16:
486486 case R_AARCH64_PREL16:
487487 checkIntUInt (loc, val, 16 , rel);
488- write16 (loc, val);
488+ write16 (ctx, loc, val);
489489 break ;
490490 case R_AARCH64_ABS32:
491491 case R_AARCH64_PREL32:
@@ -508,12 +508,12 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
508508 if (rel.sym && rel.sym ->isTagged () &&
509509 (rel.addend < 0 ||
510510 rel.addend >= static_cast <int64_t >(rel.sym ->getSize ())))
511- write64 (loc, -rel.addend );
511+ write64 (ctx, loc, -rel.addend );
512512 else
513- write64 (loc, val);
513+ write64 (ctx, loc, val);
514514 break ;
515515 case R_AARCH64_PREL64:
516- write64 (loc, val);
516+ write64 (ctx, loc, val);
517517 break ;
518518 case R_AARCH64_AUTH_ABS64:
519519 // If val is wider than 32 bits, the relocation must have been moved from
@@ -662,7 +662,7 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
662662 break ;
663663 case R_AARCH64_TLSDESC:
664664 // For R_AARCH64_TLSDESC the addend is stored in the second 64-bit word.
665- write64 (loc + 8 , val);
665+ write64 (ctx, loc + 8 , val);
666666 break ;
667667 default :
668668 llvm_unreachable (" unknown relocation" );
0 commit comments