Skip to content

Commit 285b110

Browse files
rmacnak-googleCommit Queue
authored andcommitted
[vm, compiler] Reduce spilling needed to call __tsan_read/write for array access.
TEST=tsan Bug: #61352 Change-Id: Ia3dc1a057f96e2c9e695a28824882a76e4893c69 Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/449143 Reviewed-by: Alexander Aprelev <[email protected]> Commit-Queue: Ryan Macnak <[email protected]>
1 parent 0e08c8e commit 285b110

File tree

8 files changed

+268
-172
lines changed

8 files changed

+268
-172
lines changed

runtime/vm/compiler/backend/constant_propagator.cc

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -312,6 +312,9 @@ void ConstantPropagator::VisitTsanFuncEntryExit(TsanFuncEntryExitInstr* instr) {
312312

313313
void ConstantPropagator::VisitTsanReadWrite(TsanReadWriteInstr* instr) {}
314314

315+
void ConstantPropagator::VisitTsanReadWriteIndexed(
316+
TsanReadWriteIndexedInstr* instr) {}
317+
315318
Definition* ConstantPropagator::UnwrapPhi(Definition* defn) {
316319
if (defn->IsPhi()) {
317320
JoinEntryInstr* block = defn->AsPhi()->block();

runtime/vm/compiler/backend/flow_graph.cc

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2686,9 +2686,9 @@ void FlowGraph::AddTsanInstrumentation() {
26862686
if (LoadFieldInstr* load = current->AsLoadField()) {
26872687
if (!load->slot().is_no_sanitize_thread() &&
26882688
load->memory_order() == compiler::Assembler::kRelaxedNonAtomic) {
2689-
auto* tsan_read = new (Z) TsanReadWriteInstr(
2690-
TsanReadWriteInstr::Kind::kRead, load->slot(),
2691-
new (Z) Value(load->instance()->definition()), load->source());
2689+
auto* tsan_read = new (Z)
2690+
TsanReadWriteInstr(TsanReadWriteInstr::Kind::kRead, load->slot(),
2691+
load->instance()->Copy(Z), load->source());
26922692
InsertBefore(load, tsan_read, /*env=*/nullptr, kEffect);
26932693
needs_entry_exit = true;
26942694
}
@@ -2697,10 +2697,25 @@ void FlowGraph::AddTsanInstrumentation() {
26972697
store->memory_order() == compiler::Assembler::kRelaxedNonAtomic) {
26982698
auto* tsan_write = new (Z) TsanReadWriteInstr(
26992699
TsanReadWriteInstr::Kind::kWrite, store->slot(),
2700-
new (Z) Value(store->instance()->definition()), store->source());
2700+
store->instance()->Copy(Z), store->source());
27012701
InsertBefore(store, tsan_write, /*env=*/nullptr, kEffect);
27022702
needs_entry_exit = true;
27032703
}
2704+
2705+
} else if (LoadIndexedInstr* load = current->AsLoadIndexed()) {
2706+
auto* tsan_read = new (Z) TsanReadWriteIndexedInstr(
2707+
TsanReadWriteIndexedInstr::Kind::kRead, load->array()->Copy(Z),
2708+
load->index()->Copy(Z), load->index_unboxed(), load->index_scale(),
2709+
load->class_id(), load->source());
2710+
InsertBefore(load, tsan_read, /*env=*/nullptr, kEffect);
2711+
needs_entry_exit = true;
2712+
} else if (StoreIndexedInstr* store = current->AsStoreIndexed()) {
2713+
auto* tsan_write = new (Z) TsanReadWriteIndexedInstr(
2714+
TsanReadWriteIndexedInstr::Kind::kWrite, store->array()->Copy(Z),
2715+
store->index()->Copy(Z), store->index_unboxed(),
2716+
store->index_scale(), store->class_id(), store->source());
2717+
InsertBefore(store, tsan_write, /*env=*/nullptr, kEffect);
2718+
needs_entry_exit = true;
27042719
} else if (current->CanCallDart() || current->MayThrow()) {
27052720
needs_entry_exit = true;
27062721
}

runtime/vm/compiler/backend/il.cc

Lines changed: 159 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -7285,27 +7285,23 @@ Definition* InvokeMathCFunctionInstr::Canonicalize(FlowGraph* flow_graph) {
72857285
return this;
72867286
}
72877287

7288-
LocationSummary* TsanFuncEntryExitInstr::MakeLocationSummary(Zone* zone,
7289-
bool opt) const {
7288+
static LocationSummary* MakeTsanLocationSummary(Zone* zone,
7289+
intptr_t num_inputs) {
72907290
const intptr_t kNumTemps = 1;
72917291
LocationSummary* result = new (zone) LocationSummary(
7292-
zone, InputCount(), kNumTemps, LocationSummary::kNativeLeafCall);
7292+
zone, num_inputs, kNumTemps, LocationSummary::kNativeLeafCall);
7293+
for (intptr_t i = 0; i < num_inputs; i++) {
7294+
result->set_in(i, Location::RequiresRegister());
7295+
}
72937296
result->set_temp(0, Location::RegisterLocation(CALLEE_SAVED_TEMP));
72947297
return result;
72957298
}
72967299

7297-
const RuntimeEntry& TsanFuncEntryExitInstr::TargetFunction() const {
7298-
if (kind_ == kEntry) {
7299-
return kTsanFuncEntryRuntimeEntry;
7300-
} else {
7301-
return kTsanFuncExitRuntimeEntry;
7302-
}
7303-
}
7304-
7305-
void TsanFuncEntryExitInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
7306-
if (!compiler->flow_graph().graph_entry()->NeedsFrame()) return;
7307-
7308-
const Register saved_sp = locs()->temp(0).reg();
7300+
static void EmitTsanNativeCode(
7301+
FlowGraphCompiler* compiler,
7302+
LocationSummary* locs,
7303+
std::function<const RuntimeEntry&()> move_parameters) {
7304+
const Register saved_sp = locs->temp(0).reg();
73097305
ASSERT(IsCalleeSavedRegister(saved_sp));
73107306
ASSERT(IsCalleeSavedRegister(THR));
73117307
ASSERT(IsCalleeSavedRegister(PP));
@@ -7326,13 +7322,8 @@ void TsanFuncEntryExitInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
73267322
#else
73277323
__ ReserveAlignedFrameSpace(0);
73287324
#endif
7329-
if (kind_ == kEntry) {
7330-
__ Load(CallingConventions::ArgumentRegisters[0],
7331-
compiler::Address(
7332-
FPREG, compiler::target::frame_layout.saved_caller_pc_from_fp *
7333-
compiler::target::kWordSize));
7334-
}
7335-
__ Load(TMP, compiler::Address(THR, TargetFunction().OffsetFromThread()));
7325+
auto& entry = move_parameters();
7326+
__ Load(TMP, compiler::Address(THR, entry.OffsetFromThread()));
73367327
__ Store(TMP,
73377328
compiler::Address(THR, compiler::target::Thread::vm_tag_offset()));
73387329
__ CallCFunction(TMP);
@@ -7345,89 +7336,162 @@ void TsanFuncEntryExitInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
73457336
#endif
73467337
}
73477338

7339+
LocationSummary* TsanFuncEntryExitInstr::MakeLocationSummary(Zone* zone,
7340+
bool opt) const {
7341+
return MakeTsanLocationSummary(zone, /*num_inputs=*/0);
7342+
}
7343+
7344+
void TsanFuncEntryExitInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
7345+
if (!compiler->flow_graph().graph_entry()->NeedsFrame()) return;
7346+
7347+
EmitTsanNativeCode(compiler, locs(), [&]() -> const RuntimeEntry& {
7348+
if (kind_ == kEntry) {
7349+
__ Load(
7350+
CallingConventions::ArgumentRegisters[0],
7351+
compiler::Address(
7352+
FPREG, compiler::target::frame_layout.saved_caller_pc_from_fp *
7353+
compiler::target::kWordSize));
7354+
return kTsanFuncEntryRuntimeEntry;
7355+
} else {
7356+
return kTsanFuncExitRuntimeEntry;
7357+
}
7358+
});
7359+
}
7360+
73487361
LocationSummary* TsanReadWriteInstr::MakeLocationSummary(Zone* zone,
73497362
bool opt) const {
7350-
const intptr_t kNumTemps = 1;
7351-
LocationSummary* result = new (zone) LocationSummary(
7352-
zone, InputCount(), kNumTemps, LocationSummary::kNativeLeafCall);
7353-
result->set_temp(0, Location::RegisterLocation(CALLEE_SAVED_TEMP));
7354-
result->set_in(0, Location::RequiresRegister());
7355-
return result;
7363+
return MakeTsanLocationSummary(zone, /*num_inputs=*/1);
73567364
}
73577365

7358-
const RuntimeEntry& TsanReadWriteInstr::TargetFunction() const {
7359-
intptr_t size = RepresentationUtils::ValueSize(slot().representation());
7360-
if (kind_ == Kind::kRead) {
7361-
switch (size) {
7362-
case 1:
7363-
return kTsanRead1RuntimeEntry;
7364-
case 2:
7365-
return kTsanRead2RuntimeEntry;
7366-
case 4:
7367-
return kTsanRead4RuntimeEntry;
7368-
case 8:
7369-
return kTsanRead8RuntimeEntry;
7370-
case 16:
7371-
return kTsanRead16RuntimeEntry;
7372-
default:
7373-
UNREACHABLE();
7374-
}
7375-
} else if (kind_ == kWrite) {
7376-
switch (size) {
7377-
case 1:
7378-
return kTsanWrite1RuntimeEntry;
7379-
case 2:
7380-
return kTsanWrite2RuntimeEntry;
7381-
case 4:
7382-
return kTsanWrite4RuntimeEntry;
7383-
case 8:
7384-
return kTsanWrite8RuntimeEntry;
7385-
case 16:
7386-
return kTsanWrite16RuntimeEntry;
7387-
default:
7388-
UNREACHABLE();
7366+
void TsanReadWriteInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
7367+
EmitTsanNativeCode(compiler, locs(), [&]() -> const RuntimeEntry& {
7368+
const Register instance_reg = locs()->in(0).reg();
7369+
intptr_t tag = slot().has_untagged_instance() ? 0 : kHeapObjectTag;
7370+
__ AddImmediate(CallingConventions::ArgumentRegisters[0], instance_reg,
7371+
slot().offset_in_bytes() - tag);
7372+
7373+
intptr_t size = RepresentationUtils::ValueSize(slot().representation());
7374+
if (kind_ == Kind::kRead) {
7375+
switch (size) {
7376+
case 1:
7377+
return kTsanRead1RuntimeEntry;
7378+
case 2:
7379+
return kTsanRead2RuntimeEntry;
7380+
case 4:
7381+
return kTsanRead4RuntimeEntry;
7382+
case 8:
7383+
return kTsanRead8RuntimeEntry;
7384+
case 16:
7385+
return kTsanRead16RuntimeEntry;
7386+
default:
7387+
UNREACHABLE();
7388+
}
7389+
} else if (kind_ == kWrite) {
7390+
switch (size) {
7391+
case 1:
7392+
return kTsanWrite1RuntimeEntry;
7393+
case 2:
7394+
return kTsanWrite2RuntimeEntry;
7395+
case 4:
7396+
return kTsanWrite4RuntimeEntry;
7397+
case 8:
7398+
return kTsanWrite8RuntimeEntry;
7399+
case 16:
7400+
return kTsanWrite16RuntimeEntry;
7401+
default:
7402+
UNREACHABLE();
7403+
}
73897404
}
7390-
}
7391-
UNREACHABLE();
7405+
UNREACHABLE();
7406+
});
73927407
}
73937408

7394-
void TsanReadWriteInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
7395-
const Register saved_sp = locs()->temp(0).reg();
7396-
ASSERT(IsCalleeSavedRegister(saved_sp));
7397-
ASSERT(IsCalleeSavedRegister(THR));
7398-
ASSERT(IsCalleeSavedRegister(PP));
7399-
ASSERT(IsCalleeSavedRegister(CODE_REG));
7400-
#if defined(TARGET_ARCH_ARM64)
7401-
ASSERT(IsCalleeSavedRegister(NULL_REG));
7402-
ASSERT(IsCalleeSavedRegister(HEAP_BITS));
7403-
ASSERT(IsCalleeSavedRegister(DISPATCH_TABLE_REG));
7404-
#elif defined(TARGET_ARCH_RISCV64)
7405-
ASSERT(IsCalleeSavedRegister(NULL_REG));
7406-
ASSERT(IsCalleeSavedRegister(WRITE_BARRIER_STATE));
7407-
ASSERT(IsCalleeSavedRegister(DISPATCH_TABLE_REG));
7408-
#endif
7409+
LocationSummary* TsanReadWriteIndexedInstr::MakeLocationSummary(
7410+
Zone* zone,
7411+
bool opt) const {
7412+
return MakeTsanLocationSummary(zone, /*num_inputs=*/2);
7413+
}
74097414

7410-
__ MoveRegister(saved_sp, SPREG);
7411-
#if defined(TARGET_ARCH_ARM64)
7412-
__ AndImmediate(CSP, SP, ~(OS::ActivationFrameAlignment() - 1));
7415+
void TsanReadWriteIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
7416+
EmitTsanNativeCode(compiler, locs(), [&]() -> const RuntimeEntry& {
7417+
const Register array_reg = locs()->in(kArrayPos).reg();
7418+
Register index_reg = locs()->in(kIndexPos).reg();
7419+
7420+
bool is_untagged = array()->definition()->representation() == kUntagged;
7421+
7422+
#if defined(TARGET_ARCH_X64)
7423+
bool index_unboxed = index_unboxed_;
7424+
if (index_scale_ == 1 && !index_unboxed) {
7425+
__ movq(TMP, index_reg);
7426+
__ SmiUntag(TMP);
7427+
index_unboxed = true;
7428+
index_reg = TMP;
7429+
} else if (index_scale_ == 16 && index_unboxed) {
7430+
// X64 does not support addressing mode using TIMES_16.
7431+
__ movq(TMP, index_reg);
7432+
__ SmiUntag(TMP);
7433+
index_unboxed = false;
7434+
index_reg = TMP;
7435+
} else if (!index_unboxed) {
7436+
// Note: we don't bother to ensure index is a writable input because any
7437+
// other instructions using it must also not rely on the upper bits
7438+
// when compressed.
7439+
__ ExtendNonNegativeSmi(index_reg);
7440+
}
7441+
__ leaq(CallingConventions::ArgumentRegisters[0],
7442+
compiler::Assembler::ElementAddressForRegIndex(
7443+
is_untagged, class_id(), index_scale_, index_unboxed, array_reg,
7444+
index_reg));
7445+
#elif defined(TARGET_ARCH_ARM64)
7446+
__ ComputeElementAddressForRegIndex(R0, is_untagged, class_id(),
7447+
index_scale(), index_unboxed_,
7448+
array_reg, index_reg);
7449+
#elif defined(TARGET_ARCH_RISCV64)
7450+
__ ComputeElementAddressForRegIndex(A0, is_untagged, class_id(),
7451+
index_scale(), index_unboxed_,
7452+
array_reg, index_reg);
74137453
#else
7414-
__ ReserveAlignedFrameSpace(0);
7415-
#endif
7416-
const Register instance_reg = locs()->in(0).reg();
7417-
intptr_t tag = slot().has_untagged_instance() ? 0 : kHeapObjectTag;
7418-
__ AddImmediate(CallingConventions::ArgumentRegisters[0], instance_reg,
7419-
slot().offset_in_bytes() - tag);
7420-
__ Load(TMP, compiler::Address(THR, TargetFunction().OffsetFromThread()));
7421-
__ Store(TMP,
7422-
compiler::Address(THR, compiler::target::Thread::vm_tag_offset()));
7423-
__ CallCFunction(TMP);
7424-
__ LoadImmediate(TMP, VMTag::kDartTagId);
7425-
__ Store(TMP,
7426-
compiler::Address(THR, compiler::target::Thread::vm_tag_offset()));
7427-
__ MoveRegister(SPREG, saved_sp);
7428-
#if defined(TARGET_ARCH_ARM64)
7429-
__ SetupCSPFromThread(THR);
7454+
UNIMPLEMENTED();
7455+
USE(array_reg);
7456+
USE(index_reg);
7457+
USE(is_untagged);
74307458
#endif
7459+
7460+
intptr_t size = RepresentationUtils::ValueSize(
7461+
RepresentationUtils::RepresentationOfArrayElement(class_id()));
7462+
if (kind_ == Kind::kRead) {
7463+
switch (size) {
7464+
case 1:
7465+
return kTsanRead1RuntimeEntry;
7466+
case 2:
7467+
return kTsanRead2RuntimeEntry;
7468+
case 4:
7469+
return kTsanRead4RuntimeEntry;
7470+
case 8:
7471+
return kTsanRead8RuntimeEntry;
7472+
case 16:
7473+
return kTsanRead16RuntimeEntry;
7474+
default:
7475+
UNREACHABLE();
7476+
}
7477+
} else if (kind_ == kWrite) {
7478+
switch (size) {
7479+
case 1:
7480+
return kTsanWrite1RuntimeEntry;
7481+
case 2:
7482+
return kTsanWrite2RuntimeEntry;
7483+
case 4:
7484+
return kTsanWrite4RuntimeEntry;
7485+
case 8:
7486+
return kTsanWrite8RuntimeEntry;
7487+
case 16:
7488+
return kTsanWrite16RuntimeEntry;
7489+
default:
7490+
UNREACHABLE();
7491+
}
7492+
}
7493+
UNREACHABLE();
7494+
});
74317495
}
74327496

74337497
bool DoubleToIntegerInstr::SupportsFloorAndCeil() {

0 commit comments

Comments
 (0)