@@ -3182,12 +3182,19 @@ static void tcg_gen_req_mo(TCGBar type)
31823182void tcg_gen_qemu_ld_i32 (TCGv_i32 val , TCGv addr , TCGArg idx , TCGMemOp memop )
31833183{
31843184 TCGMemOp orig_memop ;
3185- TCGv_i64 load_size , mmu_idx ;
3185+ TCGv_i64 load_size , mmu_idx , addr_loc ;
3186+
3187+ /* Temporary bugfix
3188+ * On some architectures, the addr and val(ret) params are, for some reasons,
3189+ * getting optimized after the concrete load operation (ret => addr)
3190+ * This trick ensure that the addr value is preseved */
3191+ addr_loc = tcg_temp_new_i64 ();
3192+ tcg_gen_mov_i64 (addr_loc , addr );
31863193
31873194 tcg_gen_req_mo (TCG_MO_LD_LD | TCG_MO_ST_LD );
31883195 memop = tcg_canonicalize_memop (memop , 0 , 0 );
31893196 trace_guest_mem_before_tcg (tcg_ctx -> cpu , cpu_env ,
3190- addr , trace_mem_get_info (memop , 0 ));
3197+ addr_loc , trace_mem_get_info (memop , 0 ));
31913198
31923199 orig_memop = memop ;
31933200 if (!TCG_TARGET_HAS_MEMORY_BSWAP && (memop & MO_BSWAP )) {
@@ -3198,17 +3205,19 @@ void tcg_gen_qemu_ld_i32(TCGv_i32 val, TCGv addr, TCGArg idx, TCGMemOp memop)
31983205 }
31993206 }
32003207
3201- gen_ldst_i32 (INDEX_op_qemu_ld_i32 , val , addr , memop , idx );
3208+ gen_ldst_i32 (INDEX_op_qemu_ld_i32 , val , addr_loc , memop , idx );
32023209
32033210 /* Perform the symbolic memory access. Doing so _after_ the concrete
32043211 * operation ensures that the target address is in the TLB. */
32053212 load_size = tcg_const_i64 (1 << (memop & MO_SIZE ));
32063213 mmu_idx = tcg_const_i64 (idx );
3214+
32073215 gen_helper_sym_load_guest_i32 (tcgv_i32_expr (val ), cpu_env ,
3208- addr , tcgv_i64_expr (addr ),
3216+ addr_loc , tcgv_i64_expr (addr_loc ),
32093217 load_size , mmu_idx );
32103218 tcg_temp_free_i64 (load_size );
32113219 tcg_temp_free_i64 (mmu_idx );
3220+ tcg_temp_free_i64 (addr_loc );
32123221
32133222 if ((orig_memop ^ memop ) & MO_BSWAP ) {
32143223 switch (orig_memop & MO_SIZE ) {
0 commit comments