2020#define ADR_IMM (1<<20) // signed imm21
2121#define ADRP_IMM (1LL<<32) // signed imm21 * 4096
2222
23- static bool aarch64_may_use_b(ir_ctx *ctx , const void *addr)
23+ static bool aarch64_may_use_b(ir_code_buffer *code_buffer , const void *addr)
2424{
25- if (ctx-> code_buffer) {
26- if (addr >= ctx-> code_buffer->start && (char*)addr < (char*)ctx-> code_buffer->end) {
27- return (((char*)ctx-> code_buffer->end - (char*)ctx-> code_buffer->start) < B_IMM);
28- } else if ((char*)addr >= (char*)ctx-> code_buffer->end) {
29- return (((char*)addr - (char*)ctx-> code_buffer->start) < B_IMM);
30- } else if (addr < ctx-> code_buffer->start) {
31- return (((char*)ctx-> code_buffer->end - (char*)addr) < B_IMM);
25+ if (code_buffer) {
26+ if (addr >= code_buffer->start && (char*)addr < (char*)code_buffer->end) {
27+ return (((char*)code_buffer->end - (char*)code_buffer->start) < B_IMM);
28+ } else if ((char*)addr >= (char*)code_buffer->end) {
29+ return (((char*)addr - (char*)code_buffer->start) < B_IMM);
30+ } else if (addr < code_buffer->start) {
31+ return (((char*)code_buffer->end - (char*)addr) < B_IMM);
3232 }
3333 }
34- return 1; //???
34+ return 0;
3535}
3636
3737#if 0
38- static bool aarch64_may_use_adr(ir_ctx *ctx , const void *addr)
38+ static bool aarch64_may_use_adr(ir_code_buffer *code_buffer , const void *addr)
3939{
40- if (ctx-> code_buffer) {
41- if (addr >= ctx-> code_buffer->start && (char*)addr < (char*)ctx-> code_buffer->end) {
42- return (((char*)ctx-> code_buffer->end - (char*)ctx-> code_buffer->start) < ADR_IMM);
43- } else if ((char*)addr >= (char*)ctx-> code_buffer->end) {
44- return (((char*)addr - (char*)ctx-> code_buffer->start) < ADR_IMM);
45- } else if (addr < ctx-> code_buffer->start) {
46- return (((char*)ctx-> code_buffer->end - (char*)addr) < ADR_IMM);
40+ if (code_buffer) {
41+ if (addr >= code_buffer->start && (char*)addr < (char*)code_buffer->end) {
42+ return (((char*)code_buffer->end - (char*)code_buffer->start) < ADR_IMM);
43+ } else if ((char*)addr >= (char*)code_buffer->end) {
44+ return (((char*)addr - (char*)code_buffer->start) < ADR_IMM);
45+ } else if (addr < code_buffer->start) {
46+ return (((char*)code_buffer->end - (char*)addr) < ADR_IMM);
4747 }
4848 }
4949 return 0;
5050}
5151
52- static bool aarch64_may_use_adrp(ir_ctx *ctx , const void *addr)
52+ static bool aarch64_may_use_adrp(ir_code_buffer *code_buffer , const void *addr)
5353{
54- if (ctx-> code_buffer) {
55- if (addr >= ctx-> code_buffer->start && (char*)addr < (char*)ctx-> code_buffer->end) {
56- return (((char*)ctx-> code_buffer->end - (char*)ctx-> code_buffer->start) < ADRP_IMM);
57- } else if ((char*)addr >= (char*)ctx-> code_buffer->end) {
58- return (((char*)addr - (char*)ctx-> code_buffer->start) < ADRP_IMM);
59- } else if (addr < ctx-> code_buffer->start) {
60- return (((char*)ctx-> code_buffer->end - (char*)addr) < ADRP_IMM);
54+ if (code_buffer) {
55+ if (addr >= code_buffer->start && (char*)addr < (char*)code_buffer->end) {
56+ return (((char*)code_buffer->end - (char*)code_buffer->start) < ADRP_IMM);
57+ } else if ((char*)addr >= (char*)code_buffer->end) {
58+ return (((char*)addr - (char*)code_buffer->start) < ADRP_IMM);
59+ } else if (addr < code_buffer->start) {
60+ return (((char*)code_buffer->end - (char*)addr) < ADRP_IMM);
6161 }
6262 }
6363 return 0;
@@ -1176,9 +1176,10 @@ static void ir_emit_load(ir_ctx *ctx, ir_type type, ir_reg reg, ir_ref src)
11761176 ir_insn *insn = &ctx->ir_base[src];
11771177
11781178 if (insn->op == IR_SYM || insn->op == IR_FUNC) {
1179+ const char *name = ir_get_str(ctx, insn->val.name);
11791180 void *addr = (ctx->loader && ctx->loader->resolve_sym_name) ?
1180- ctx->loader->resolve_sym_name(ctx->loader, ir_get_str(ctx , insn->val.name) ) :
1181- ir_resolve_sym_name(ir_get_str(ctx, insn->val. name) );
1181+ ctx->loader->resolve_sym_name(ctx->loader, name , insn->op == IR_FUNC ) :
1182+ ir_resolve_sym_name(name);
11821183 IR_ASSERT(addr);
11831184 ir_emit_load_imm_int(ctx, type, reg, (intptr_t)addr);
11841185 } else if (insn->op == IR_STR) {
@@ -4360,7 +4361,7 @@ static void ir_emit_call_ex(ir_ctx *ctx, ir_ref def, ir_insn *insn, int32_t used
43604361 if (IR_IS_CONST_REF(insn->op2)) {
43614362 void *addr = ir_call_addr(ctx, insn, &ctx->ir_base[insn->op2]);
43624363
4363- if (aarch64_may_use_b(ctx, addr)) {
4364+ if (aarch64_may_use_b(ctx->code_buffer , addr)) {
43644365 | bl &addr
43654366 } else {
43664367 ir_emit_load_imm_int(ctx, IR_ADDR, IR_REG_INT_TMP, (intptr_t)addr);
@@ -4435,7 +4436,7 @@ static void ir_emit_tailcall(ir_ctx *ctx, ir_ref def, ir_insn *insn)
44354436 if (IR_IS_CONST_REF(insn->op2)) {
44364437 void *addr = ir_call_addr(ctx, insn, &ctx->ir_base[insn->op2]);
44374438
4438- if (aarch64_may_use_b(ctx, addr)) {
4439+ if (aarch64_may_use_b(ctx->code_buffer , addr)) {
44394440 | b &addr
44404441 } else {
44414442 ir_emit_load_imm_int(ctx, IR_ADDR, IR_REG_INT_TMP, (intptr_t)addr);
@@ -4468,7 +4469,7 @@ static void ir_emit_ijmp(ir_ctx *ctx, ir_ref def, ir_insn *insn)
44684469 } else if (IR_IS_CONST_REF(insn->op2)) {
44694470 void *addr = ir_jmp_addr(ctx, insn, &ctx->ir_base[insn->op2]);
44704471
4471- if (aarch64_may_use_b(ctx, addr)) {
4472+ if (aarch64_may_use_b(ctx->code_buffer , addr)) {
44724473 | b &addr
44734474 } else {
44744475 ir_emit_load_imm_int(ctx, IR_ADDR, IR_REG_INT_TMP, (intptr_t)addr);
@@ -4494,7 +4495,7 @@ static void ir_emit_guard(ir_ctx *ctx, ir_ref def, ir_insn *insn)
44944495 if (IR_IS_CONST_REF(insn->op3)) {
44954496 void *addr = ir_jmp_addr(ctx, insn, &ctx->ir_base[insn->op3]);
44964497
4497- if (aarch64_may_use_b(ctx, addr)) {
4498+ if (aarch64_may_use_b(ctx->code_buffer , addr)) {
44984499 | b &addr
44994500 } else {
45004501 ir_emit_load_imm_int(ctx, IR_ADDR, IR_REG_INT_TMP, (intptr_t)addr);
@@ -4654,7 +4655,7 @@ static void ir_emit_guard_cmp_int(ir_ctx *ctx, uint32_t b, ir_ref def, ir_insn *
46544655 if (IR_IS_CONST_REF(op2) && ctx->ir_base[op2].val.u64 == 0) {
46554656 if (op == IR_ULT) {
46564657 /* always false */
4657- if (aarch64_may_use_b(ctx, addr)) {
4658+ if (aarch64_may_use_b(ctx->code_buffer , addr)) {
46584659 | b &addr
46594660 } else {
46604661 ir_emit_load_imm_int(ctx, IR_ADDR, IR_REG_INT_TMP, (intptr_t)addr);
@@ -4807,7 +4808,7 @@ static void ir_emit_exitcall(ir_ctx *ctx, ir_ref def, ir_insn *insn)
48074808 if (IR_IS_CONST_REF(insn->op2)) {
48084809 void *addr = ir_call_addr(ctx, insn, &ctx->ir_base[insn->op2]);
48094810
4810- if (aarch64_may_use_b(ctx, addr)) {
4811+ if (aarch64_may_use_b(ctx->code_buffer , addr)) {
48114812 | bl &addr
48124813 } else {
48134814 ir_emit_load_imm_int(ctx, IR_ADDR, IR_REG_INT_TMP, (intptr_t)addr);
@@ -5835,17 +5836,8 @@ const void *ir_emit_exitgroup(uint32_t first_exit_point, uint32_t exit_points_pe
58355836 dasm_State **Dst, *dasm_state;
58365837 int ret;
58375838
5838- /* IR_ASSERT(aarch64_may_use_b(ctx, exit_addr)) */
58395839 IR_ASSERT(code_buffer);
5840- if ((char*)exit_addr >= (char*)code_buffer->start && (char*)exit_addr < (char*)code_buffer->end) {
5841- IR_ASSERT(((char*)code_buffer->end - (char*)code_buffer->end) < B_IMM);
5842- } else if ((char*)exit_addr >= (char*)code_buffer->end) {
5843- IR_ASSERT(((char*)exit_addr - (char*)code_buffer->start) < B_IMM);
5844- } else if ((char*)exit_addr < (char*)code_buffer->start) {
5845- IR_ASSERT(((char*)code_buffer->end - (char*)exit_addr) < B_IMM);
5846- } else {
5847- IR_ASSERT(0);
5848- }
5840+ IR_ASSERT(aarch64_may_use_b(code_buffer, exit_addr));
58495841
58505842 Dst = &dasm_state;
58515843 dasm_state = NULL;
@@ -6010,3 +6002,73 @@ static int ir_add_veneer(dasm_State *Dst, void *buffer, uint32_t ins, int *b, ui
60106002
60116003 return n;
60126004}
6005+
6006+ bool ir_needs_thunk(ir_code_buffer *code_buffer, void *addr)
6007+ {
6008+ return !aarch64_may_use_b(code_buffer, addr);
6009+ }
6010+
6011+ void *ir_emit_thunk(ir_code_buffer *code_buffer, void *addr, size_t *size_ptr)
6012+ {
6013+ void *entry;
6014+ size_t size;
6015+ dasm_State **Dst, *dasm_state;
6016+ int ret;
6017+
6018+ Dst = &dasm_state;
6019+ dasm_state = NULL;
6020+ dasm_init(&dasm_state, DASM_MAXSECTION);
6021+ dasm_setupglobal(&dasm_state, dasm_labels, ir_lb_MAX);
6022+ dasm_setup(&dasm_state, dasm_actions);
6023+
6024+ |.code
6025+ | movz Rx(IR_REG_INT_TMP), #((uint64_t)(addr) & 0xffff)
6026+ | movk Rx(IR_REG_INT_TMP), #(((uint64_t)(addr) >> 16) & 0xffff), lsl #16
6027+ | movk Rx(IR_REG_INT_TMP), #(((uint64_t)(addr) >> 32) & 0xffff), lsl #32
6028+ | movk Rx(IR_REG_INT_TMP), #(((uint64_t)(addr) >> 48) & 0xffff), lsl #48
6029+ | br Rx(IR_REG_INT_TMP)
6030+
6031+ ret = dasm_link(&dasm_state, &size);
6032+ if (ret != DASM_S_OK) {
6033+ IR_ASSERT(0);
6034+ dasm_free(&dasm_state);
6035+ return NULL;
6036+ }
6037+
6038+ if (size > (size_t)((char*)code_buffer->end - (char*)code_buffer->pos)) {
6039+ dasm_free(&dasm_state);
6040+ return NULL;
6041+ }
6042+
6043+ entry = code_buffer->pos;
6044+ ret = dasm_encode(&dasm_state, entry);
6045+ if (ret != DASM_S_OK) {
6046+ dasm_free(&dasm_state);
6047+ return NULL;
6048+ }
6049+
6050+ *size_ptr = size;
6051+ code_buffer->pos = (char*)code_buffer->pos + size;
6052+
6053+ dasm_free(&dasm_state);
6054+ ir_mem_flush(entry, size);
6055+
6056+ return entry;
6057+ }
6058+
6059+ void ir_fix_thunk(void *thunk_entry, void *addr)
6060+ {
6061+ uint32_t *code = thunk_entry;
6062+ IR_ASSERT((code[0] & 0xffe00000) == 0xd2800000
6063+ && (code[1] & 0xffe00000) == 0xf2a00000
6064+ && (code[2] & 0xffe00000) == 0xf2c00000
6065+ && (code[3] & 0xffe00000) == 0xf2e00000
6066+ && (code[4] & 0xfffffc1f) == 0xd61f0000);
6067+
6068+ code[0] = (code[0] & 0xffe0001f) | (uint32_t)((uint64_t)(addr) & 0xffff) << 5;
6069+ code[1] = (code[1] & 0xffe0001f) | (uint32_t)(((uint64_t)(addr) >> 16) & 0xffff) << 5;
6070+ code[2] = (code[2] & 0xffe0001f) | (uint32_t)(((uint64_t)(addr) >> 32) & 0xffff) << 5;
6071+ code[3] = (code[3] & 0xffe0001f) | (uint32_t)(((uint64_t)(addr) >> 48) & 0xffff) << 5;
6072+
6073+ ir_mem_flush(code, sizeof(uint32_t) * 4);
6074+ }
0 commit comments