@@ -1542,15 +1542,14 @@ JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method_ic_miss(JavaThread*
15421542#endif /* ASSERT */
15431543
15441544 methodHandle callee_method;
1545- const bool is_optimized = false ;
15461545 bool caller_does_not_scalarize = false ;
15471546 JRT_BLOCK
15481547 callee_method = SharedRuntime::handle_ic_miss_helper(caller_does_not_scalarize, CHECK_NULL);
15491548 // Return Method* through TLS
15501549 current->set_vm_result_metadata (callee_method());
15511550 JRT_BLOCK_END
15521551 // return compiled code entry point after potential safepoints
1553- return get_resolved_entry(current, callee_method, false , is_optimized , caller_does_not_scalarize);
1552+ return get_resolved_entry(current, callee_method, false , false , caller_does_not_scalarize);
15541553JRT_END
15551554
15561555
@@ -1606,11 +1605,11 @@ JRT_BLOCK_ENTRY(address, SharedRuntime::handle_wrong_method(JavaThread* current)
16061605 bool caller_does_not_scalarize = false ;
16071606 JRT_BLOCK
16081607 // Force resolving of caller (if we called from compiled frame)
1609- callee_method = SharedRuntime::reresolve_call_site(is_static_call, is_optimized, caller_does_not_scalarize, CHECK_NULL);
1608+ callee_method = SharedRuntime::reresolve_call_site(is_optimized, caller_does_not_scalarize, CHECK_NULL);
16101609 current->set_vm_result_metadata (callee_method());
16111610 JRT_BLOCK_END
16121611 // return compiled code entry point after potential safepoints
1613- return get_resolved_entry(current, callee_method, is_static_call , is_optimized, caller_does_not_scalarize);
1612+ return get_resolved_entry(current, callee_method, callee_method-> is_static () , is_optimized, caller_does_not_scalarize);
16141613JRT_END
16151614
16161615// Handle abstract method call
@@ -1778,7 +1777,7 @@ methodHandle SharedRuntime::handle_ic_miss_helper(bool& caller_does_not_scalariz
17781777// sites, and static call sites. Typically used to change a call sites
17791778// destination from compiled to interpreted.
17801779//
1781- methodHandle SharedRuntime::reresolve_call_site (bool & is_static_call, bool & is_optimized, bool & caller_does_not_scalarize, TRAPS) {
1780+ methodHandle SharedRuntime::reresolve_call_site (bool & is_optimized, bool & caller_does_not_scalarize, TRAPS) {
17821781 JavaThread* current = THREAD;
17831782 ResourceMark rm (current);
17841783 RegisterMap reg_map (current,
@@ -1791,14 +1790,24 @@ methodHandle SharedRuntime::reresolve_call_site(bool& is_static_call, bool& is_o
17911790 if (caller.is_compiled_frame ()) {
17921791 caller_does_not_scalarize = caller.cb ()->as_nmethod ()->is_compiled_by_c1 ();
17931792 }
1793+ assert (!caller.is_interpreted_frame (), " must be compiled" );
17941794
1795- // Do nothing if the frame isn't a live compiled frame.
1796- // nmethod could be deoptimized by the time we get here
1797- // so no update to the caller is needed.
1795+ // If the frame isn't a live compiled frame (i.e. deoptimized by the time we get here), no IC clearing must be done
1796+ // for the caller. However, when the caller is C2 compiled and the callee a C1 or C2 compiled method, then we still
1797+ // need to figure out whether it was an optimized virtual call with an inline type receiver. Otherwise, we end up
1798+ // using the wrong method entry point and accidentally skip the buffering of the receiver.
1799+ methodHandle callee_method = find_callee_method (caller_does_not_scalarize, CHECK_ (methodHandle ()));
1800+ const bool caller_is_compiled_and_not_deoptimized = caller.is_compiled_frame () && !caller.is_deoptimized_frame ();
1801+ const bool caller_is_continuation_enter_intrinsic =
1802+ caller.is_native_frame () && caller.cb ()->as_nmethod ()->method ()->is_continuation_enter_intrinsic ();
1803+ const bool do_IC_clearing = caller_is_compiled_and_not_deoptimized || caller_is_continuation_enter_intrinsic;
17981804
1799- if ((caller.is_compiled_frame () && !caller.is_deoptimized_frame ()) ||
1800- (caller.is_native_frame () && caller.cb ()->as_nmethod ()->method ()->is_continuation_enter_intrinsic ())) {
1805+ const bool callee_compiled_with_scalarized_receiver = callee_method->has_compiled_code () &&
1806+ !callee_method ()->is_static () &&
1807+ callee_method ()->is_scalarized_arg (0 );
1808+ const bool compute_is_optimized = !caller_does_not_scalarize && callee_compiled_with_scalarized_receiver;
18011809
1810+ if (do_IC_clearing || compute_is_optimized) {
18021811 address pc = caller.pc ();
18031812
18041813 nmethod* caller_nm = CodeCache::find_nmethod (pc);
@@ -1831,21 +1840,24 @@ methodHandle SharedRuntime::reresolve_call_site(bool& is_static_call, bool& is_o
18311840 RelocIterator iter (caller_nm, call_addr, call_addr+1 );
18321841 bool ret = iter.next (); // Get item
18331842 if (ret) {
1834- is_static_call = false ;
18351843 is_optimized = false ;
18361844 switch (iter.type ()) {
18371845 case relocInfo::static_call_type:
1838- is_static_call = true ;
1846+ assert (callee_method-> is_static (), " must be " ) ;
18391847 case relocInfo::opt_virtual_call_type: {
18401848 is_optimized = (iter.type () == relocInfo::opt_virtual_call_type);
1841- CompiledDirectCall* cdc = CompiledDirectCall::at (call_addr);
1842- cdc->set_to_clean ();
1849+ if (do_IC_clearing) {
1850+ CompiledDirectCall* cdc = CompiledDirectCall::at (call_addr);
1851+ cdc->set_to_clean ();
1852+ }
18431853 break ;
18441854 }
18451855 case relocInfo::virtual_call_type: {
1846- // compiled, dispatched call (which used to call an interpreted method)
1847- CompiledIC* inline_cache = CompiledIC_at (caller_nm, call_addr);
1848- inline_cache->set_to_clean ();
1856+ if (do_IC_clearing) {
1857+ // compiled, dispatched call (which used to call an interpreted method)
1858+ CompiledIC* inline_cache = CompiledIC_at (caller_nm, call_addr);
1859+ inline_cache->set_to_clean ();
1860+ }
18491861 break ;
18501862 }
18511863 default :
@@ -1855,8 +1867,6 @@ methodHandle SharedRuntime::reresolve_call_site(bool& is_static_call, bool& is_o
18551867 }
18561868 }
18571869
1858- methodHandle callee_method = find_callee_method (caller_does_not_scalarize, CHECK_ (methodHandle ()));
1859-
18601870#ifndef PRODUCT
18611871 AtomicAccess::inc (&_wrong_method_ctr);
18621872
0 commit comments