@@ -255,14 +255,14 @@ RelType AArch64::getDynRel(RelType type) const {
255
255
int64_t AArch64::getImplicitAddend (const uint8_t *buf, RelType type) const {
256
256
switch (type) {
257
257
case R_AARCH64_TLSDESC:
258
- return read64 (buf + 8 );
258
+ return read64 (ctx, buf + 8 );
259
259
case R_AARCH64_NONE:
260
260
case R_AARCH64_GLOB_DAT:
261
261
case R_AARCH64_JUMP_SLOT:
262
262
return 0 ;
263
263
case R_AARCH64_ABS16:
264
264
case R_AARCH64_PREL16:
265
- return SignExtend64<16 >(read16 (buf));
265
+ return SignExtend64<16 >(read16 (ctx, buf));
266
266
case R_AARCH64_ABS32:
267
267
case R_AARCH64_PREL32:
268
268
return SignExtend64<32 >(read32 (ctx, buf));
@@ -271,7 +271,7 @@ int64_t AArch64::getImplicitAddend(const uint8_t *buf, RelType type) const {
271
271
case R_AARCH64_RELATIVE:
272
272
case R_AARCH64_IRELATIVE:
273
273
case R_AARCH64_TLS_TPREL64:
274
- return read64 (buf);
274
+ return read64 (ctx, buf);
275
275
276
276
// The following relocation types all point at instructions, and
277
277
// relocate an immediate field in the instruction.
@@ -355,12 +355,12 @@ int64_t AArch64::getImplicitAddend(const uint8_t *buf, RelType type) const {
355
355
}
356
356
357
357
void AArch64::writeGotPlt (uint8_t *buf, const Symbol &) const {
358
- write64 (buf, ctx.in .plt ->getVA ());
358
+ write64 (ctx, buf, ctx.in .plt ->getVA ());
359
359
}
360
360
361
361
void AArch64::writeIgotPlt (uint8_t *buf, const Symbol &s) const {
362
362
if (ctx.arg .writeAddends )
363
- write64 (buf, s.getVA ());
363
+ write64 (ctx, buf, s.getVA ());
364
364
}
365
365
366
366
void AArch64::writePltHeader (uint8_t *buf) const {
@@ -485,7 +485,7 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
485
485
case R_AARCH64_ABS16:
486
486
case R_AARCH64_PREL16:
487
487
checkIntUInt (loc, val, 16 , rel);
488
- write16 (loc, val);
488
+ write16 (ctx, loc, val);
489
489
break ;
490
490
case R_AARCH64_ABS32:
491
491
case R_AARCH64_PREL32:
@@ -508,12 +508,12 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
508
508
if (rel.sym && rel.sym ->isTagged () &&
509
509
(rel.addend < 0 ||
510
510
rel.addend >= static_cast <int64_t >(rel.sym ->getSize ())))
511
- write64 (loc, -rel.addend );
511
+ write64 (ctx, loc, -rel.addend );
512
512
else
513
- write64 (loc, val);
513
+ write64 (ctx, loc, val);
514
514
break ;
515
515
case R_AARCH64_PREL64:
516
- write64 (loc, val);
516
+ write64 (ctx, loc, val);
517
517
break ;
518
518
case R_AARCH64_AUTH_ABS64:
519
519
// If val is wider than 32 bits, the relocation must have been moved from
@@ -662,7 +662,7 @@ void AArch64::relocate(uint8_t *loc, const Relocation &rel,
662
662
break ;
663
663
case R_AARCH64_TLSDESC:
664
664
// For R_AARCH64_TLSDESC the addend is stored in the second 64-bit word.
665
- write64 (loc + 8 , val);
665
+ write64 (ctx, loc + 8 , val);
666
666
break ;
667
667
default :
668
668
llvm_unreachable (" unknown relocation" );
0 commit comments