Skip to content
Open
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1892,3 +1892,15 @@ void InterpreterMacroAssembler::load_resolved_indy_entry(Register cache, Registe
add(cache, cache, Array<ResolvedIndyEntry>::base_offset_in_bytes());
lea(cache, Address(cache, index));
}

#ifdef ASSERT
void InterpreterMacroAssembler::verify_field_offset(Register reg) {
// Verify the field offset is not in the header, implicitly checks for 0
Label L;
subs(zr, reg, oopDesc::base_offset_in_bytes());
br(Assembler::GE, L);
stop("bad field offset");
bind(L);
}
#endif

2 changes: 2 additions & 0 deletions src/hotspot/cpu/aarch64/interp_masm_aarch64.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,8 @@ class InterpreterMacroAssembler: public MacroAssembler {
}

void load_resolved_indy_entry(Register cache, Register index);

void verify_field_offset(Register reg) NOT_DEBUG_RETURN;
};

#endif // CPU_AARCH64_INTERP_MASM_AARCH64_HPP
17 changes: 15 additions & 2 deletions src/hotspot/cpu/aarch64/templateTable_aarch64.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
Register temp_reg, bool load_bc_into_bc_reg/*=true*/,
int byte_no)
{
assert_different_registers(bc_reg, temp_reg);
if (!RewriteBytecodes) return;
Label L_patch_done;

Expand Down Expand Up @@ -223,8 +224,12 @@ void TemplateTable::patch_bytecode(Bytecodes::Code bc, Register bc_reg,
__ bind(L_okay);
#endif

// patch bytecode
__ strb(bc_reg, at_bcp(0));
// Patch bytecode with release store to coordinate with ConstantPoolCacheEntry loads
// in fast bytecode codelets. The fast bytecode codelets have a memory barrier that gains
// the needed ordering, together with control dependency on entering the fast codelet
// itself.
__ lea(temp_reg, at_bcp(0));
__ stlrb(bc_reg, temp_reg);
__ bind(L_patch_done);
}

Expand Down Expand Up @@ -2982,6 +2987,7 @@ void TemplateTable::fast_storefield(TosState state)

// replace index with field offset from cache entry
__ ldr(r1, Address(r2, in_bytes(base + ConstantPoolCacheEntry::f2_offset())));
__ verify_field_offset(r1);

{
Label notVolatile;
Expand Down Expand Up @@ -3075,6 +3081,8 @@ void TemplateTable::fast_accessfield(TosState state)

__ ldr(r1, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::f2_offset())));
__ verify_field_offset(r1);

__ ldrw(r3, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::flags_offset())));

Expand Down Expand Up @@ -3142,8 +3150,13 @@ void TemplateTable::fast_xaccess(TosState state)
__ ldr(r0, aaddress(0));
// access constant pool cache
__ get_cache_and_index_at_bcp(r2, r3, 2);

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I spent a little while looking at the other uses of get_cache_and_index_at_bcp and I couldn't find anything obviously wrong, but that might not mean we're OK.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ran this change with our reproducer, and no observable failures. I also made verify_field_offset be present in the optimized build to be sure. So I think we are good. This is as approximate of a fix we can get I think.


// Must prevent reordering of the following cp cache loads with bytecode load
__ membar(MacroAssembler::LoadLoad);

__ ldr(r1, Address(r2, in_bytes(ConstantPoolCache::base_offset() +
ConstantPoolCacheEntry::f2_offset())));
__ verify_field_offset(r1);

// 8179954: We need to make sure that the code generated for
// volatile accesses forms a sequentially-consistent set of
Expand Down