55package org .hibernate .event .internal ;
66
77import java .lang .invoke .MethodHandles ;
8- import java .util .Map ;
98
109import org .hibernate .HibernateException ;
1110import org .hibernate .Interceptor ;
1615import org .hibernate .collection .spi .PersistentCollection ;
1716import org .hibernate .engine .internal .Cascade ;
1817import org .hibernate .engine .internal .CascadePoint ;
19- import org .hibernate .engine .internal .Collections ;
2018import org .hibernate .engine .jdbc .spi .JdbcCoordinator ;
2119import org .hibernate .engine .spi .ActionQueue ;
2220import org .hibernate .engine .spi .CascadingActions ;
3937
4038import org .jboss .logging .Logger ;
4139
40+ import static org .hibernate .engine .internal .Collections .processUnreachableCollection ;
4241import static org .hibernate .engine .internal .Collections .skipRemoval ;
4342
4443/**
@@ -126,11 +125,12 @@ protected void logFlushResults(FlushEvent event) {
126125 * any newly referenced entity that must be passed to saveOrUpdate(),
127126 * and also apply orphan delete
128127 */
129- private void prepareEntityFlushes (EventSource session , PersistenceContext persistenceContext ) throws HibernateException {
128+ private void prepareEntityFlushes (EventSource session , PersistenceContext persistenceContext )
129+ throws HibernateException {
130130 LOG .debug ( "Processing flush-time cascades" );
131131 final PersistContext context = PersistContext .create ();
132132 // safe from concurrent modification because of how concurrentEntries() is implemented on IdentityMap
133- for ( Map . Entry < Object , EntityEntry > me : persistenceContext .reentrantSafeEntityEntries () ) {
133+ for ( var me : persistenceContext .reentrantSafeEntityEntries () ) {
134134// for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries() ) ) {
135135 final EntityEntry entry = me .getValue ();
136136 if ( flushable ( entry ) ) {
@@ -145,7 +145,7 @@ void checkForTransientReferences(EventSource session, PersistenceContext persist
145145 // processed, so that all entities which will be persisted are
146146 // persistent when we do the check (I wonder if we could move this
147147 // into Nullability, instead of abusing the Cascade infrastructure)
148- for ( Map . Entry < Object , EntityEntry > me : persistenceContext .reentrantSafeEntityEntries () ) {
148+ for ( var me : persistenceContext .reentrantSafeEntityEntries () ) {
149149 final EntityEntry entry = me .getValue ();
150150 if ( checkable ( entry ) ) {
151151 Cascade .cascade (
@@ -192,11 +192,10 @@ private void prepareCollectionFlushes(PersistenceContext persistenceContext) thr
192192 // Initialize dirty flags for arrays + collections with composite elements
193193 // and reset reached, doupdate, etc.
194194 LOG .debug ( "Dirty checking collections" );
195- final Map <PersistentCollection <?>, CollectionEntry > collectionEntries =
196- persistenceContext .getCollectionEntries ();
195+ final var collectionEntries = persistenceContext .getCollectionEntries ();
197196 if ( collectionEntries != null ) {
198- for ( Map . Entry <PersistentCollection <?>, CollectionEntry > entry :
199- ( ( InstanceIdentityMap < PersistentCollection <?>, CollectionEntry >) collectionEntries ) .toArray () ) {
197+ final var identityMap = ( InstanceIdentityMap <PersistentCollection <?>, CollectionEntry >) collectionEntries ;
198+ for ( var entry : identityMap .toArray () ) {
200199 entry .getValue ().preFlush ( entry .getKey () );
201200 }
202201 }
@@ -221,12 +220,12 @@ private int flushEntities(final FlushEvent event, final PersistenceContext persi
221220 // collections that are changing roles. This might cause entities
222221 // to be loaded.
223222 // So this needs to be safe from concurrent modification problems.
224- final Map . Entry < Object , EntityEntry >[] entityEntries = persistenceContext .reentrantSafeEntityEntries ();
223+ final var entityEntries = persistenceContext .reentrantSafeEntityEntries ();
225224 final int count = entityEntries .length ;
226225
227226 FlushEntityEvent entityEvent = null ; //allow reuse of the event as it's heavily allocated in certain use cases
228227 int eventGenerationId = 0 ; //Used to double-check the instance reuse won't cause problems
229- for ( Map . Entry < Object , EntityEntry > me : entityEntries ) {
228+ for ( var me : entityEntries ) {
230229 // Update the status of the object and if necessary, schedule an update
231230 final EntityEntry entry = me .getValue ();
232231 final Status status = entry .getStatus ();
@@ -270,17 +269,18 @@ private FlushEntityEvent createOrReuseEventInstance(
270269 private int flushCollections (final EventSource session , final PersistenceContext persistenceContext )
271270 throws HibernateException {
272271 LOG .trace ( "Processing unreferenced collections" );
273- final Map < PersistentCollection <?>, CollectionEntry > collectionEntries = persistenceContext .getCollectionEntries ();
272+ final var collectionEntries = persistenceContext .getCollectionEntries ();
274273 final int count ;
275274 if ( collectionEntries == null ) {
276275 count = 0 ;
277276 }
278277 else {
279278 count = collectionEntries .size ();
280- for ( Map .Entry <PersistentCollection <?>, CollectionEntry > me : ( (InstanceIdentityMap <PersistentCollection <?>, CollectionEntry >) collectionEntries ).toArray () ) {
279+ final var identityMap = (InstanceIdentityMap <PersistentCollection <?>, CollectionEntry >) collectionEntries ;
280+ for ( var me : identityMap .toArray () ) {
281281 final CollectionEntry ce = me .getValue ();
282282 if ( !ce .isReached () && !ce .isIgnore () ) {
283- Collections . processUnreachableCollection ( me .getKey (), session );
283+ processUnreachableCollection ( me .getKey (), session );
284284 }
285285 }
286286 }
0 commit comments