Skip to content

Commit a06b344

Browse files
committed
Fix PPC64
1 parent e05605e commit a06b344

File tree

2 files changed

+14
-13
lines changed

2 files changed

+14
-13
lines changed

src/hotspot/cpu/ppc/gc/shared/barrierSetAssembler_ppc.cpp

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -183,22 +183,23 @@ void BarrierSetAssembler::nmethod_entry_barrier(MacroAssembler* masm, Register t
183183
BarrierSetNMethod* bs_nm = BarrierSet::barrier_set()->barrier_set_nmethod();
184184
assert_different_registers(tmp, R0);
185185

186-
__ block_comment("nmethod_entry_barrier (nmethod_entry_barrier) {");
186+
__ align(8); // must align the following block which requires atomic updates
187187

188-
// Load stub address using toc (fixed instruction size, unlike load_const_optimized)
189-
__ calculate_address_from_global_toc(tmp, StubRoutines::method_entry_barrier(),
190-
true, true, false); // 2 instructions
191-
__ mtctr(tmp);
188+
__ block_comment("nmethod_entry_barrier (nmethod_entry_barrier) {");
192189

193190
// This is a compound instruction. Patching support is provided by NativeMovRegMem.
194191
// Actual patching is done in (platform-specific part of) BarrierSetNMethod.
195-
__ align(8); // align for atomic update
196192
__ load_const32(tmp, 0 /* Value is patched */); // 2 instructions
197193

198194
// Low order half of 64 bit value is currently used.
199195
__ ld(R0, in_bytes(bs_nm->thread_disarmed_guard_value_offset()), R16_thread);
200196
__ cmpw(CR0, R0, tmp);
201197

198+
// Load stub address using toc (fixed instruction size, unlike load_const_optimized)
199+
__ calculate_address_from_global_toc(tmp, StubRoutines::method_entry_barrier(),
200+
true, true, false); // 2 instructions
201+
__ mtctr(tmp);
202+
202203
__ bnectrl(CR0);
203204

204205
// Oops may have been changed. Make those updates observable.

src/hotspot/cpu/ppc/gc/shared/barrierSetNMethod_ppc.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ class NativeNMethodBarrier: public NativeInstruction {
3838

3939
NativeMovRegMem* get_patchable_instruction_handle() const {
4040
// Endianness is handled by NativeMovRegMem
41-
return reinterpret_cast<NativeMovRegMem*>(get_barrier_start_address() + 3 * 4);
41+
return reinterpret_cast<NativeMovRegMem*>(get_barrier_start_address());
4242
}
4343

4444
public:
@@ -95,12 +95,6 @@ class NativeNMethodBarrier: public NativeInstruction {
9595

9696
uint* current_instruction = reinterpret_cast<uint*>(get_barrier_start_address());
9797

98-
// calculate_address_from_global_toc (compound instruction)
99-
verify_op_code_manually(current_instruction, MacroAssembler::is_addis(*current_instruction));
100-
verify_op_code_manually(current_instruction, MacroAssembler::is_addi(*current_instruction));
101-
102-
verify_op_code_manually(current_instruction, MacroAssembler::is_mtctr(*current_instruction));
103-
10498
get_patchable_instruction_handle()->verify();
10599
current_instruction += 2;
106100

@@ -109,6 +103,12 @@ class NativeNMethodBarrier: public NativeInstruction {
109103
// cmpw (mnemonic)
110104
verify_op_code(current_instruction, Assembler::CMP_OPCODE);
111105

106+
// calculate_address_from_global_toc (compound instruction)
107+
verify_op_code_manually(current_instruction, MacroAssembler::is_addis(*current_instruction));
108+
verify_op_code_manually(current_instruction, MacroAssembler::is_addi(*current_instruction));
109+
110+
verify_op_code_manually(current_instruction, MacroAssembler::is_mtctr(*current_instruction));
111+
112112
// bnectrl (mnemonic) (weak check; not checking the exact type)
113113
verify_op_code(current_instruction, Assembler::BCCTR_OPCODE);
114114

0 commit comments

Comments
 (0)