@@ -273,6 +273,8 @@ HASH_FUNC_IMPL(map_hash, BLOCK_MAP_CAPACITY_BITS, 1 << BLOCK_MAP_CAPACITY_BITS)
273
273
static block_t *block_alloc(riscv_t *rv)
274
274
{
275
275
block_t *block = mpool_alloc(rv->block_mp);
276
+ if (unlikely(!block))
277
+ return NULL;
276
278
assert(block);
277
279
block->n_insn = 0;
278
280
#if RV32_HAS(JIT)
@@ -619,13 +621,15 @@ FORCE_INLINE bool insn_is_indirect_branch(uint8_t opcode)
619
621
}
620
622
}
621
623
622
- static void block_translate(riscv_t *rv, block_t *block)
624
+ static bool block_translate(riscv_t *rv, block_t *block)
623
625
{
624
626
retranslate:
625
627
block->pc_start = block->pc_end = rv->PC;
626
628
627
629
rv_insn_t *prev_ir = NULL;
628
630
rv_insn_t *ir = mpool_calloc(rv->block_ir_mp);
631
+ if (unlikely(!ir))
632
+ return false;
629
633
block->ir_head = ir;
630
634
631
635
/* translate the basic block */
@@ -665,6 +669,8 @@ static void block_translate(riscv_t *rv, block_t *block)
665
669
if (insn_is_branch(ir->opcode)) {
666
670
if (insn_is_indirect_branch(ir->opcode)) {
667
671
ir->branch_table = calloc(1, sizeof(branch_history_table_t));
672
+ if (unlikely(!ir->branch_table))
673
+ return false;
668
674
assert(ir->branch_table);
669
675
memset(ir->branch_table->PC, -1,
670
676
sizeof(uint32_t) * HISTORY_SIZE);
@@ -673,36 +679,44 @@ static void block_translate(riscv_t *rv, block_t *block)
673
679
}
674
680
675
681
ir = mpool_calloc(rv->block_ir_mp);
682
+ if (unlikely(!ir))
683
+ return false;
676
684
}
677
685
678
686
assert(prev_ir);
679
687
block->ir_tail = prev_ir;
680
688
block->ir_tail->next = NULL;
689
+ return true;
681
690
}
682
691
683
692
#if RV32_HAS(MOP_FUSION)
684
- #define COMBINE_MEM_OPS(RW) \
685
- next_ir = ir->next; \
686
- count = 1; \
687
- while (1) { \
688
- if (next_ir->opcode != IIF(RW)(rv_insn_lw, rv_insn_sw)) \
689
- break; \
690
- count++; \
691
- if (!next_ir->next) \
692
- break; \
693
- next_ir = next_ir->next; \
694
- } \
695
- if (count > 1) { \
696
- ir->opcode = IIF(RW)(rv_insn_fuse4, rv_insn_fuse3); \
697
- ir->fuse = malloc(count * sizeof(opcode_fuse_t)); \
698
- assert(ir->fuse); \
699
- ir->imm2 = count; \
700
- memcpy(ir->fuse, ir, sizeof(opcode_fuse_t)); \
701
- ir->impl = dispatch_table[ir->opcode]; \
702
- next_ir = ir->next; \
703
- for (int j = 1; j < count; j++, next_ir = next_ir->next) \
704
- memcpy(ir->fuse + j, next_ir, sizeof(opcode_fuse_t)); \
705
- remove_next_nth_ir(rv, ir, block, count - 1); \
693
+ #define COMBINE_MEM_OPS(RW) \
694
+ next_ir = ir->next; \
695
+ count = 1; \
696
+ while (1) { \
697
+ if (next_ir->opcode != IIF(RW)(rv_insn_lw, rv_insn_sw)) \
698
+ break; \
699
+ count++; \
700
+ if (!next_ir->next) \
701
+ break; \
702
+ next_ir = next_ir->next; \
703
+ } \
704
+ if (count > 1) { \
705
+ ir->opcode = IIF(RW)(rv_insn_fuse4, rv_insn_fuse3); \
706
+ ir->fuse = malloc(count * sizeof(opcode_fuse_t)); \
707
+ if (unlikely(!ir->fuse)) { \
708
+ ir->opcode = IIF(RW)(rv_insn_lw, rv_insn_sw); \
709
+ count = 1; /* Degrade to non-fused operation */ \
710
+ } else { \
711
+ assert(ir->fuse); \
712
+ ir->imm2 = count; \
713
+ memcpy(ir->fuse, ir, sizeof(opcode_fuse_t)); \
714
+ ir->impl = dispatch_table[ir->opcode]; \
715
+ next_ir = ir->next; \
716
+ for (int j = 1; j < count; j++, next_ir = next_ir->next) \
717
+ memcpy(ir->fuse + j, next_ir, sizeof(opcode_fuse_t)); \
718
+ remove_next_nth_ir(rv, ir, block, count - 1); \
719
+ } \
706
720
}
707
721
708
722
static inline void remove_next_nth_ir(const riscv_t *rv,
@@ -762,16 +776,20 @@ static void match_pattern(riscv_t *rv, block_t *block)
762
776
next_ir = next_ir->next;
763
777
}
764
778
if (count > 1) {
765
- ir->opcode = rv_insn_fuse1;
766
779
ir->fuse = malloc(count * sizeof(opcode_fuse_t));
767
- assert(ir->fuse);
768
- ir->imm2 = count;
769
- memcpy(ir->fuse, ir, sizeof(opcode_fuse_t));
770
- ir->impl = dispatch_table[ir->opcode];
771
- next_ir = ir->next;
772
- for (int j = 1; j < count; j++, next_ir = next_ir->next)
773
- memcpy(ir->fuse + j, next_ir, sizeof(opcode_fuse_t));
774
- remove_next_nth_ir(rv, ir, block, count - 1);
780
+ if (likely(ir->fuse)) {
781
+ ir->opcode = rv_insn_fuse1;
782
+ assert(ir->fuse);
783
+ ir->imm2 = count;
784
+ memcpy(ir->fuse, ir, sizeof(opcode_fuse_t));
785
+ ir->impl = dispatch_table[ir->opcode];
786
+ next_ir = ir->next;
787
+ for (int j = 1; j < count; j++, next_ir = next_ir->next)
788
+ memcpy(ir->fuse + j, next_ir,
789
+ sizeof(opcode_fuse_t));
790
+ remove_next_nth_ir(rv, ir, block, count - 1);
791
+ }
792
+ /* If malloc failed, degrade gracefully to non-fused ops */
775
793
}
776
794
break;
777
795
}
@@ -803,15 +821,18 @@ static void match_pattern(riscv_t *rv, block_t *block)
803
821
}
804
822
if (count > 1) {
805
823
ir->fuse = malloc(count * sizeof(opcode_fuse_t));
806
- assert(ir->fuse);
807
- memcpy(ir->fuse, ir, sizeof(opcode_fuse_t));
808
- ir->opcode = rv_insn_fuse5;
809
- ir->imm2 = count;
810
- ir->impl = dispatch_table[ir->opcode];
811
- next_ir = ir->next;
812
- for (int j = 1; j < count; j++, next_ir = next_ir->next)
813
- memcpy(ir->fuse + j, next_ir, sizeof(opcode_fuse_t));
814
- remove_next_nth_ir(rv, ir, block, count - 1);
824
+ if (likely(ir->fuse)) {
825
+ assert(ir->fuse);
826
+ memcpy(ir->fuse, ir, sizeof(opcode_fuse_t));
827
+ ir->opcode = rv_insn_fuse5;
828
+ ir->imm2 = count;
829
+ ir->impl = dispatch_table[ir->opcode];
830
+ next_ir = ir->next;
831
+ for (int j = 1; j < count; j++, next_ir = next_ir->next)
832
+ memcpy(ir->fuse + j, next_ir, sizeof(opcode_fuse_t));
833
+ remove_next_nth_ir(rv, ir, block, count - 1);
834
+ }
835
+ /* If malloc failed, degrade gracefully to non-fused ops */
815
836
}
816
837
break;
817
838
}
@@ -881,8 +902,11 @@ static block_t *block_find_or_translate(riscv_t *rv)
881
902
#endif
882
903
/* allocate a new block */
883
904
next_blk = block_alloc(rv);
905
+ if (unlikely(!next_blk))
906
+ return NULL;
884
907
885
- block_translate(rv, next_blk);
908
+ if (unlikely(!block_translate(rv, next_blk)))
909
+ return NULL;
886
910
887
911
#if RV32_HAS(JIT) && RV32_HAS(SYSTEM)
888
912
/*
@@ -1078,6 +1102,12 @@ void rv_step(void *arg)
1078
1102
*/
1079
1103
block_t *block = block_find_or_translate(rv);
1080
1104
/* by now, a block should be available */
1105
+ if (unlikely(!block)) {
1106
+ rv_log_fatal("Failed to allocate or translate block at PC=0x%08x",
1107
+ rv->PC);
1108
+ rv->halt = true;
1109
+ return;
1110
+ }
1081
1111
assert(block);
1082
1112
1083
1113
#if RV32_HAS(JIT) && RV32_HAS(SYSTEM)
@@ -1129,6 +1159,11 @@ void rv_step(void *arg)
1129
1159
else if (!block->compiled && block->n_invoke >= THRESHOLD) {
1130
1160
block->compiled = true;
1131
1161
queue_entry_t *entry = malloc(sizeof(queue_entry_t));
1162
+ if (unlikely(!entry)) {
1163
+ /* Malloc failed - reset compiled flag to allow retry later */
1164
+ block->compiled = false;
1165
+ continue;
1166
+ }
1132
1167
entry->block = block;
1133
1168
pthread_mutex_lock(&rv->wait_queue_lock);
1134
1169
list_add(&entry->list, &rv->wait_queue);
@@ -1378,16 +1413,38 @@ void ecall_handler(riscv_t *rv)
1378
1413
void memset_handler(riscv_t *rv)
1379
1414
{
1380
1415
memory_t *m = PRIV(rv)->mem;
1381
- memset((char *) m->mem_base + rv->X[rv_reg_a0], rv->X[rv_reg_a1],
1382
- rv->X[rv_reg_a2]);
1416
+ uint32_t dest = rv->X[rv_reg_a0];
1417
+ uint32_t value = rv->X[rv_reg_a1];
1418
+ uint32_t count = rv->X[rv_reg_a2];
1419
+
1420
+ /* Bounds checking to prevent buffer overflow */
1421
+ if (dest >= m->mem_size || count > m->mem_size - dest) {
1422
+ SET_CAUSE_AND_TVAL_THEN_TRAP(rv, STORE_MISALIGNED, dest);
1423
+ return;
1424
+ }
1425
+
1426
+ memset((char *) m->mem_base + dest, value, count);
1383
1427
rv->PC = rv->X[rv_reg_ra] & ~1U;
1384
1428
}
1385
1429
1386
1430
void memcpy_handler(riscv_t *rv)
1387
1431
{
1388
1432
memory_t *m = PRIV(rv)->mem;
1389
- memcpy((char *) m->mem_base + rv->X[rv_reg_a0],
1390
- (char *) m->mem_base + rv->X[rv_reg_a1], rv->X[rv_reg_a2]);
1433
+ uint32_t dest = rv->X[rv_reg_a0];
1434
+ uint32_t src = rv->X[rv_reg_a1];
1435
+ uint32_t count = rv->X[rv_reg_a2];
1436
+
1437
+ /* Bounds checking to prevent buffer overflow */
1438
+ if (dest >= m->mem_size || count > m->mem_size - dest) {
1439
+ SET_CAUSE_AND_TVAL_THEN_TRAP(rv, STORE_MISALIGNED, dest);
1440
+ return;
1441
+ }
1442
+ if (src >= m->mem_size || count > m->mem_size - src) {
1443
+ SET_CAUSE_AND_TVAL_THEN_TRAP(rv, LOAD_MISALIGNED, src);
1444
+ return;
1445
+ }
1446
+
1447
+ memcpy((char *) m->mem_base + dest, (char *) m->mem_base + src, count);
1391
1448
rv->PC = rv->X[rv_reg_ra] & ~1U;
1392
1449
}
1393
1450
0 commit comments