Skip to content

Commit e6b28e0

Browse files
committed
Merge
2 parents 4ba980b + d3408a4 commit e6b28e0

File tree

27 files changed

+329
-72
lines changed

27 files changed

+329
-72
lines changed

src/hotspot/share/c1/c1_IR.cpp

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -191,7 +191,8 @@ CodeEmitInfo::CodeEmitInfo(ValueStack* stack, XHandlers* exception_handlers, boo
191191
, _oop_map(NULL)
192192
, _stack(stack)
193193
, _is_method_handle_invoke(false)
194-
, _deoptimize_on_exception(deoptimize_on_exception) {
194+
, _deoptimize_on_exception(deoptimize_on_exception)
195+
, _force_reexecute(false) {
195196
assert(_stack != NULL, "must be non null");
196197
}
197198

@@ -203,7 +204,8 @@ CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack)
203204
, _oop_map(NULL)
204205
, _stack(stack == NULL ? info->_stack : stack)
205206
, _is_method_handle_invoke(info->_is_method_handle_invoke)
206-
, _deoptimize_on_exception(info->_deoptimize_on_exception) {
207+
, _deoptimize_on_exception(info->_deoptimize_on_exception)
208+
, _force_reexecute(info->_force_reexecute) {
207209

208210
// deep copy of exception handlers
209211
if (info->_exception_handlers != NULL) {
@@ -215,7 +217,8 @@ CodeEmitInfo::CodeEmitInfo(CodeEmitInfo* info, ValueStack* stack)
215217
void CodeEmitInfo::record_debug_info(DebugInformationRecorder* recorder, int pc_offset) {
216218
// record the safepoint before recording the debug info for enclosing scopes
217219
recorder->add_safepoint(pc_offset, _oop_map->deep_copy());
218-
_scope_debug_info->record_debug_info(recorder, pc_offset, true/*topmost*/, _is_method_handle_invoke);
220+
bool reexecute = _force_reexecute || _scope_debug_info->should_reexecute();
221+
_scope_debug_info->record_debug_info(recorder, pc_offset, reexecute, _is_method_handle_invoke);
219222
recorder->end_safepoint(pc_offset);
220223
}
221224

src/hotspot/share/c1/c1_IR.hpp

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -232,16 +232,15 @@ class IRScopeDebugInfo: public CompilationResourceObj {
232232
//Whether we should reexecute this bytecode for deopt
233233
bool should_reexecute();
234234

235-
void record_debug_info(DebugInformationRecorder* recorder, int pc_offset, bool topmost, bool is_method_handle_invoke = false) {
235+
void record_debug_info(DebugInformationRecorder* recorder, int pc_offset, bool reexecute, bool is_method_handle_invoke = false) {
236236
if (caller() != NULL) {
237237
// Order is significant: Must record caller first.
238-
caller()->record_debug_info(recorder, pc_offset, false/*topmost*/);
238+
caller()->record_debug_info(recorder, pc_offset, false/*reexecute*/);
239239
}
240240
DebugToken* locvals = recorder->create_scope_values(locals());
241241
DebugToken* expvals = recorder->create_scope_values(expressions());
242242
DebugToken* monvals = recorder->create_monitor_values(monitors());
243243
// reexecute allowed only for the topmost frame
244-
bool reexecute = topmost ? should_reexecute() : false;
245244
bool return_oop = false; // This flag will be ignored since it used only for C2 with escape analysis.
246245
bool rethrow_exception = false;
247246
bool is_opt_native = false;
@@ -264,6 +263,7 @@ class CodeEmitInfo: public CompilationResourceObj {
264263
ValueStack* _stack; // used by deoptimization (contains also monitors
265264
bool _is_method_handle_invoke; // true if the associated call site is a MethodHandle call site.
266265
bool _deoptimize_on_exception;
266+
bool _force_reexecute; // force the reexecute flag on, used for patching stub
267267

268268
FrameMap* frame_map() const { return scope()->compilation()->frame_map(); }
269269
Compilation* compilation() const { return scope()->compilation(); }
@@ -290,7 +290,11 @@ class CodeEmitInfo: public CompilationResourceObj {
290290
bool is_method_handle_invoke() const { return _is_method_handle_invoke; }
291291
void set_is_method_handle_invoke(bool x) { _is_method_handle_invoke = x; }
292292

293+
bool force_reexecute() const { return _force_reexecute; }
294+
void set_force_reexecute() { _force_reexecute = true; }
295+
293296
int interpreter_frame_size() const;
297+
294298
};
295299

296300

src/hotspot/share/c1/c1_LIRAssembler.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ void LIR_Assembler::patching_epilog(PatchingStub* patch, LIR_PatchCode patch_cod
4343
while ((intx) _masm->pc() - (intx) patch->pc_start() < NativeGeneralJump::instruction_size) {
4444
_masm->nop();
4545
}
46+
info->set_force_reexecute();
4647
patch->install(_masm, patch_code, obj, info);
4748
append_code_stub(patch);
4849

src/hotspot/share/ci/ciMethod.cpp

Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -117,9 +117,11 @@ ciMethod::ciMethod(const methodHandle& h_m, ciInstanceKlass* holder) :
117117

118118
if (h_m->method_holder()->is_linked()) {
119119
_can_be_statically_bound = h_m->can_be_statically_bound();
120+
_can_omit_stack_trace = h_m->can_omit_stack_trace();
120121
} else {
121122
// Have to use a conservative value in this case.
122123
_can_be_statically_bound = false;
124+
_can_omit_stack_trace = true;
123125
}
124126

125127
// Adjust the definition of this condition to be more useful:
@@ -176,6 +178,7 @@ ciMethod::ciMethod(ciInstanceKlass* holder,
176178
_intrinsic_id( vmIntrinsics::_none),
177179
_instructions_size(-1),
178180
_can_be_statically_bound(false),
181+
_can_omit_stack_trace(true),
179182
_liveness( NULL)
180183
#if defined(COMPILER2)
181184
,
@@ -766,6 +769,20 @@ bool ciMethod::can_be_statically_bound(ciInstanceKlass* context) const {
766769
return (holder() == context) && can_be_statically_bound();
767770
}
768771

772+
// ------------------------------------------------------------------
773+
// ciMethod::can_omit_stack_trace
774+
//
775+
// Tries to determine whether a method can omit stack trace in throw in compiled code.
776+
bool ciMethod::can_omit_stack_trace() const {
777+
if (!StackTraceInThrowable) {
778+
return true; // stack trace is switched off.
779+
}
780+
if (!OmitStackTraceInFastThrow) {
781+
return false; // Have to provide stack trace.
782+
}
783+
return _can_omit_stack_trace;
784+
}
785+
769786
// ------------------------------------------------------------------
770787
// ciMethod::resolve_invoke
771788
//

src/hotspot/share/ci/ciMethod.hpp

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ class ciMethod : public ciMetadata {
9292
bool _is_c2_compilable;
9393
bool _can_be_parsed;
9494
bool _can_be_statically_bound;
95+
bool _can_omit_stack_trace;
9596
bool _has_reserved_stack_access;
9697
bool _is_overpass;
9798

@@ -364,6 +365,8 @@ class ciMethod : public ciMetadata {
364365

365366
bool can_be_statically_bound(ciInstanceKlass* context) const;
366367

368+
bool can_omit_stack_trace() const;
369+
367370
// Replay data methods
368371
static void dump_name_as_ascii(outputStream* st, Method* method);
369372
void dump_name_as_ascii(outputStream* st);

src/hotspot/share/classfile/vmSymbols.hpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,7 @@
142142
template(java_util_Iterator, "java/util/Iterator") \
143143
template(java_lang_Record, "java/lang/Record") \
144144
template(sun_instrument_InstrumentationImpl, "sun/instrument/InstrumentationImpl") \
145+
template(sun_invoke_util_ValueConversions, "sun/invoke/util/ValueConversions") \
145146
\
146147
template(jdk_internal_loader_NativeLibraries, "jdk/internal/loader/NativeLibraries") \
147148
template(jdk_internal_loader_BuiltinClassLoader, "jdk/internal/loader/BuiltinClassLoader") \

src/hotspot/share/jfr/leakprofiler/checkpoint/objectSampleCheckpoint.cpp

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -202,7 +202,7 @@ static void prepare_for_resolution() {
202202

203203
static bool stack_trace_precondition(const ObjectSample* sample) {
204204
assert(sample != NULL, "invariant");
205-
return sample->has_stack_trace_id() && !sample->is_dead();
205+
return sample->has_stack_trace_id() && !sample->is_dead() && !sample->stacktrace().valid();
206206
}
207207

208208
class StackTraceBlobInstaller {
@@ -249,7 +249,7 @@ void StackTraceBlobInstaller::install(ObjectSample* sample) {
249249
writer.write_type(TYPE_STACKTRACE);
250250
writer.write_count(1);
251251
ObjectSampleCheckpoint::write_stacktrace(stack_trace, writer);
252-
blob = writer.copy();
252+
blob = writer.move();
253253
_cache.put(sample, blob);
254254
sample->set_stacktrace(blob);
255255
}
@@ -278,7 +278,7 @@ void ObjectSampleCheckpoint::on_rotation(const ObjectSampler* sampler) {
278278
}
279279

280280
static bool is_klass_unloaded(traceid klass_id) {
281-
assert(ClassLoaderDataGraph_lock->owned_by_self(), "invariant");
281+
assert_locked_or_safepoint(ClassLoaderDataGraph_lock);
282282
return JfrKlassUnloading::is_unloaded(klass_id);
283283
}
284284

@@ -381,6 +381,12 @@ void ObjectSampleCheckpoint::write(const ObjectSampler* sampler, EdgeStore* edge
381381
assert(sampler != NULL, "invariant");
382382
assert(edge_store != NULL, "invariant");
383383
assert(thread != NULL, "invariant");
384+
{
385+
// First install stacktrace blobs for the most recently added candidates.
386+
MutexLocker lock(SafepointSynchronize::is_at_safepoint() ? nullptr : ClassLoaderDataGraph_lock);
387+
// the lock is needed to ensure the unload lists do not grow in the middle of inspection.
388+
install_stack_traces(sampler);
389+
}
384390
write_sample_blobs(sampler, emit_all, thread);
385391
// write reference chains
386392
if (!edge_store->is_empty()) {

src/hotspot/share/jfr/recorder/checkpoint/types/jfrTypeSet.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,6 +232,7 @@ int write__klass(JfrCheckpointWriter* writer, const void* k) {
232232
int write__klass__leakp(JfrCheckpointWriter* writer, const void* k) {
233233
assert(k != NULL, "invariant");
234234
KlassPtr klass = (KlassPtr)k;
235+
CLEAR_LEAKP(klass);
235236
return write_klass(writer, klass, true);
236237
}
237238

@@ -835,7 +836,7 @@ class MethodIteratorHost {
835836
private:
836837
MethodCallback _method_cb;
837838
KlassCallback _klass_cb;
838-
MethodUsedPredicate<leakp> _method_used_predicate;
839+
MethodUsedPredicate _method_used_predicate;
839840
MethodFlagPredicate<leakp> _method_flag_predicate;
840841
public:
841842
MethodIteratorHost(JfrCheckpointWriter* writer,

src/hotspot/share/jfr/recorder/checkpoint/types/jfrTypeSetUtils.hpp

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -146,16 +146,12 @@ class SymbolPredicate {
146146
}
147147
};
148148

149-
template <bool leakp>
150149
class MethodUsedPredicate {
151150
bool _current_epoch;
152151
public:
153152
MethodUsedPredicate(bool current_epoch) : _current_epoch(current_epoch) {}
154153
bool operator()(const Klass* klass) {
155-
if (_current_epoch) {
156-
return leakp ? IS_LEAKP(klass) : METHOD_USED_THIS_EPOCH(klass);
157-
}
158-
return leakp ? IS_LEAKP(klass) : METHOD_USED_PREVIOUS_EPOCH(klass);
154+
return _current_epoch ? METHOD_USED_THIS_EPOCH(klass) : METHOD_USED_PREVIOUS_EPOCH(klass);
159155
}
160156
};
161157

src/hotspot/share/oops/method.cpp

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -818,6 +818,18 @@ bool Method::can_be_statically_bound(InstanceKlass* context) const {
818818
return (method_holder() == context) && can_be_statically_bound();
819819
}
820820

821+
/**
822+
* Returns false if this is one of specially treated methods for
823+
* which we have to provide stack trace in throw in compiled code.
824+
* Returns true otherwise.
825+
*/
826+
bool Method::can_omit_stack_trace() {
827+
if (klass_name() == vmSymbols::sun_invoke_util_ValueConversions()) {
828+
return false; // All methods in sun.invoke.util.ValueConversions
829+
}
830+
return true;
831+
}
832+
821833
bool Method::is_accessor() const {
822834
return is_getter() || is_setter();
823835
}

0 commit comments

Comments
 (0)