Skip to content

Commit dff5fe7

Browse files
rmacnak-googleCommit Queue
authored andcommitted
[vm, compiler] Use load-acquire for accessing _HashVMImmutableBase.index.
TEST=tsan Cq-Include-Trybots: luci.dart.try:vm-tsan-linux-release-x64-try,vm-tsan-linux-release-arm64-try,iso-stress-linux-arm64-try,iso-stress-linux-x64-try Change-Id: I15f9af02a9e0353bd93b14483f7fd18f25023308 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/442504 Reviewed-by: Alexander Aprelev <[email protected]> Commit-Queue: Ryan Macnak <[email protected]>
1 parent ad7f786 commit dff5fe7

File tree

8 files changed

+59
-15
lines changed

8 files changed

+59
-15
lines changed

runtime/vm/compiler/assembler/assembler_base.cc

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,8 @@ AssemblerBase::~AssemblerBase() {}
3131

3232
void AssemblerBase::LoadFromSlot(Register dst,
3333
Register base,
34-
const Slot& slot) {
34+
const Slot& slot,
35+
MemoryOrder memory_order) {
3536
if (!slot.is_tagged()) {
3637
// The result cannot be a floating point or SIMD value.
3738
ASSERT(slot.representation() == kUntagged ||
@@ -58,9 +59,17 @@ void AssemblerBase::LoadFromSlot(Register dst,
5859
}
5960
} else {
6061
if (slot.is_compressed()) {
61-
LoadCompressedFieldFromOffset(dst, base, slot.offset_in_bytes());
62+
if (memory_order == kAcquire) {
63+
LoadAcquireCompressedFromOffset(dst, base, slot.offset_in_bytes());
64+
} else {
65+
LoadCompressedFieldFromOffset(dst, base, slot.offset_in_bytes());
66+
}
6267
} else {
63-
LoadFieldFromOffset(dst, base, slot.offset_in_bytes());
68+
if (memory_order == kAcquire) {
69+
LoadAcquire(dst, FieldAddress(base, slot.offset_in_bytes()));
70+
} else {
71+
LoadFieldFromOffset(dst, base, slot.offset_in_bytes());
72+
}
6473
}
6574
}
6675
}

runtime/vm/compiler/assembler/assembler_base.h

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -761,6 +761,7 @@ class AssemblerBase : public StackResource {
761761
// threads. Currently, only used for lazily populating hash indices in
762762
// shared const maps and sets.
763763
kRelease,
764+
kAcquire,
764765

765766
// All other stores.
766767
kRelaxedNonAtomic,
@@ -1074,7 +1075,10 @@ class AssemblerBase : public StackResource {
10741075
// compressed pointers and compressed pointers may require write barriers, so
10751076
// StoreCompressedIntoObject should be used instead.
10761077

1077-
void LoadFromSlot(Register dst, Register base, const Slot& slot);
1078+
void LoadFromSlot(Register dst,
1079+
Register base,
1080+
const Slot& slot,
1081+
MemoryOrder memory_order = kRelaxedNonAtomic);
10781082
void StoreToSlot(Register src,
10791083
Register base,
10801084
const Slot& slot,

runtime/vm/compiler/backend/il.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4685,10 +4685,10 @@ void LoadFieldInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
46854685

46864686
auto const rep = slot().representation();
46874687
if (calls_initializer()) {
4688-
__ LoadFromSlot(locs()->out(0).reg(), instance_reg, slot());
4688+
__ LoadFromSlot(locs()->out(0).reg(), instance_reg, slot(), memory_order_);
46894689
EmitNativeCodeForInitializerCall(compiler);
46904690
} else if (rep == kTagged || rep == kUntagged) {
4691-
__ LoadFromSlot(locs()->out(0).reg(), instance_reg, slot());
4691+
__ LoadFromSlot(locs()->out(0).reg(), instance_reg, slot(), memory_order_);
46924692
} else if (RepresentationUtils::IsUnboxedInteger(rep)) {
46934693
const size_t value_size = RepresentationUtils::ValueSize(rep);
46944694
if (value_size <= compiler::target::kWordSize) {

runtime/vm/compiler/backend/il.h

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8126,14 +8126,17 @@ class LoadFieldInstr : public TemplateLoadField<1> {
81268126
InnerPointerAccess loads_inner_pointer,
81278127
const InstructionSource& source,
81288128
bool calls_initializer = false,
8129-
intptr_t deopt_id = DeoptId::kNone)
8129+
intptr_t deopt_id = DeoptId::kNone,
8130+
compiler::Assembler::MemoryOrder memory_order =
8131+
compiler::Assembler::kRelaxedNonAtomic)
81308132
: TemplateLoadField(source,
81318133
calls_initializer
81328134
? SlowPathOnSentinelValue::kCallInitializer
81338135
: SlowPathOnSentinelValue::kDoNothing,
81348136
deopt_id,
81358137
slot.IsDartField() ? &slot.field() : nullptr),
81368138
slot_(slot),
8139+
memory_order_(memory_order),
81378140
loads_inner_pointer_(loads_inner_pointer) {
81388141
switch (loads_inner_pointer) {
81398142
case InnerPointerAccess::kNotUntagged:
@@ -8240,6 +8243,7 @@ class LoadFieldInstr : public TemplateLoadField<1> {
82408243

82418244
#define FIELD_LIST(F) \
82428245
F(const Slot&, slot_) \
8246+
F(compiler::Assembler::MemoryOrder, memory_order_) \
82438247
F(InnerPointerAccess, loads_inner_pointer_)
82448248

82458249
DECLARE_INSTRUCTION_SERIALIZABLE_FIELDS(LoadFieldInstr,

runtime/vm/compiler/backend/il_printer.cc

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -968,6 +968,9 @@ void StoreFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
968968
if (stores_inner_pointer() == InnerPointerAccess::kMayBeInnerPointer) {
969969
f->AddString(", MayStoreInnerPointer");
970970
}
971+
if (memory_order_ == compiler::Assembler::kRelease) {
972+
f->AddString(", Release");
973+
}
971974
}
972975

973976
void IfThenElseInstr::PrintOperandsTo(BaseTextBuffer* f) const {
@@ -1049,6 +1052,9 @@ void LoadFieldInstr::PrintOperandsTo(BaseTextBuffer* f) const {
10491052
if (loads_inner_pointer() == InnerPointerAccess::kMayBeInnerPointer) {
10501053
f->AddString(", MayLoadInnerPointer");
10511054
}
1055+
if (memory_order_ == compiler::Assembler::kAcquire) {
1056+
f->AddString(", Acquire");
1057+
}
10521058
}
10531059

10541060
void LoadUntaggedInstr::PrintOperandsTo(BaseTextBuffer* f) const {

runtime/vm/compiler/frontend/base_flow_graph_builder.cc

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -516,23 +516,28 @@ Fragment BaseFlowGraphBuilder::LoadField(const Field& field,
516516
Fragment BaseFlowGraphBuilder::LoadNativeField(
517517
const Slot& native_field,
518518
InnerPointerAccess loads_inner_pointer,
519-
bool calls_initializer) {
519+
bool calls_initializer,
520+
compiler::Assembler::MemoryOrder memory_order) {
520521
LoadFieldInstr* load = new (Z) LoadFieldInstr(
521522
Pop(), native_field, loads_inner_pointer, InstructionSource(),
522-
calls_initializer, calls_initializer ? GetNextDeoptId() : DeoptId::kNone);
523+
calls_initializer, calls_initializer ? GetNextDeoptId() : DeoptId::kNone,
524+
memory_order);
523525
Push(load);
524526
return Fragment(load);
525527
}
526528

527-
Fragment BaseFlowGraphBuilder::LoadNativeField(const Slot& native_field,
528-
bool calls_initializer) {
529+
Fragment BaseFlowGraphBuilder::LoadNativeField(
530+
const Slot& native_field,
531+
bool calls_initializer,
532+
compiler::Assembler::MemoryOrder memory_order) {
529533
const InnerPointerAccess loads_inner_pointer =
530534
native_field.representation() == kUntagged
531535
? (native_field.may_contain_inner_pointer()
532536
? InnerPointerAccess::kMayBeInnerPointer
533537
: InnerPointerAccess::kCannotBeInnerPointer)
534538
: InnerPointerAccess::kNotUntagged;
535-
return LoadNativeField(native_field, loads_inner_pointer, calls_initializer);
539+
return LoadNativeField(native_field, loads_inner_pointer, calls_initializer,
540+
memory_order);
536541
}
537542

538543
Fragment BaseFlowGraphBuilder::LoadLocal(LocalVariable* variable) {

runtime/vm/compiler/frontend/base_flow_graph_builder.h

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -185,9 +185,13 @@ class BaseFlowGraphBuilder {
185185
Fragment LoadField(const Field& field, bool calls_initializer);
186186
Fragment LoadNativeField(const Slot& native_field,
187187
InnerPointerAccess loads_inner_pointer,
188-
bool calls_initializer = false);
188+
bool calls_initializer = false,
189+
compiler::Assembler::MemoryOrder memory_order =
190+
compiler::Assembler::kRelaxedNonAtomic);
189191
Fragment LoadNativeField(const Slot& native_field,
190-
bool calls_initializer = false);
192+
bool calls_initializer = false,
193+
compiler::Assembler::MemoryOrder memory_order =
194+
compiler::Assembler::kRelaxedNonAtomic);
191195
// Pass true for index_unboxed if indexing into external typed data.
192196
Fragment LoadIndexed(classid_t class_id,
193197
intptr_t index_scale = compiler::target::kWordSize,

runtime/vm/compiler/frontend/kernel_to_il.cc

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -907,7 +907,6 @@ const Function& TypedListGetNativeFunction(Thread* thread, classid_t cid) {
907907
V(ReceivePort_getSendPort, ReceivePort_send_port) \
908908
V(ReceivePort_getHandler, ReceivePort_handler) \
909909
V(ImmutableLinkedHashBase_getData, ImmutableLinkedHashBase_data) \
910-
V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index) \
911910
V(LinkedHashBase_getData, LinkedHashBase_data) \
912911
V(LinkedHashBase_getDeletedKeys, LinkedHashBase_deleted_keys) \
913912
V(LinkedHashBase_getHashMask, LinkedHashBase_hash_mask) \
@@ -925,6 +924,9 @@ const Function& TypedListGetNativeFunction(Thread* thread, classid_t cid) {
925924
V(WeakProperty_getValue, WeakProperty_value) \
926925
V(WeakReference_getTarget, WeakReference_target)
927926

927+
#define LOAD_ACQUIRE_NATIVE_FIELD(V) \
928+
V(ImmutableLinkedHashBase_getIndex, ImmutableLinkedHashBase_index)
929+
928930
#define STORE_NATIVE_FIELD(V) \
929931
V(Finalizer_setCallback, Finalizer_callback) \
930932
V(FinalizerBase_setAllEntries, FinalizerBase_all_entries) \
@@ -1115,6 +1117,7 @@ bool FlowGraphBuilder::IsRecognizedMethodForFlowGraph(
11151117
case MethodRecognizer::kDouble_hashCode:
11161118
#define CASE(method, slot) case MethodRecognizer::k##method:
11171119
LOAD_NATIVE_FIELD(CASE)
1120+
LOAD_ACQUIRE_NATIVE_FIELD(CASE)
11181121
STORE_NATIVE_FIELD(CASE)
11191122
STORE_NATIVE_FIELD_NO_BARRIER(CASE)
11201123
#undef CASE
@@ -1925,6 +1928,15 @@ FlowGraph* FlowGraphBuilder::BuildGraphOfRecognizedMethod(
19251928
break;
19261929
LOAD_NATIVE_FIELD(IL_BODY)
19271930
#undef IL_BODY
1931+
#define IL_BODY(method, slot) \
1932+
case MethodRecognizer::k##method: \
1933+
ASSERT_EQUAL(function.NumParameters(), 1); \
1934+
body += LoadLocal(parsed_function_->RawParameterVariable(0)); \
1935+
body += \
1936+
LoadNativeField(Slot::slot(), false, compiler::Assembler::kAcquire); \
1937+
break;
1938+
LOAD_ACQUIRE_NATIVE_FIELD(IL_BODY)
1939+
#undef IL_BODY
19281940
#define IL_BODY(method, slot) \
19291941
case MethodRecognizer::k##method: \
19301942
ASSERT_EQUAL(function.NumParameters(), 2); \

0 commit comments

Comments
 (0)