5
5
package org .hibernate .event .internal ;
6
6
7
7
import java .lang .invoke .MethodHandles ;
8
- import java .util .Map ;
9
8
10
9
import org .hibernate .HibernateException ;
11
10
import org .hibernate .Interceptor ;
16
15
import org .hibernate .collection .spi .PersistentCollection ;
17
16
import org .hibernate .engine .internal .Cascade ;
18
17
import org .hibernate .engine .internal .CascadePoint ;
19
- import org .hibernate .engine .internal .Collections ;
20
18
import org .hibernate .engine .jdbc .spi .JdbcCoordinator ;
21
19
import org .hibernate .engine .spi .ActionQueue ;
22
20
import org .hibernate .engine .spi .CascadingActions ;
39
37
40
38
import org .jboss .logging .Logger ;
41
39
40
+ import static org .hibernate .engine .internal .Collections .processUnreachableCollection ;
42
41
import static org .hibernate .engine .internal .Collections .skipRemoval ;
43
42
44
43
/**
@@ -126,11 +125,12 @@ protected void logFlushResults(FlushEvent event) {
126
125
* any newly referenced entity that must be passed to saveOrUpdate(),
127
126
* and also apply orphan delete
128
127
*/
129
- private void prepareEntityFlushes (EventSource session , PersistenceContext persistenceContext ) throws HibernateException {
128
+ private void prepareEntityFlushes (EventSource session , PersistenceContext persistenceContext )
129
+ throws HibernateException {
130
130
LOG .debug ( "Processing flush-time cascades" );
131
131
final PersistContext context = PersistContext .create ();
132
132
// safe from concurrent modification because of how concurrentEntries() is implemented on IdentityMap
133
- for ( Map . Entry < Object , EntityEntry > me : persistenceContext .reentrantSafeEntityEntries () ) {
133
+ for ( var me : persistenceContext .reentrantSafeEntityEntries () ) {
134
134
// for ( Map.Entry me : IdentityMap.concurrentEntries( persistenceContext.getEntityEntries() ) ) {
135
135
final EntityEntry entry = me .getValue ();
136
136
if ( flushable ( entry ) ) {
@@ -145,7 +145,7 @@ void checkForTransientReferences(EventSource session, PersistenceContext persist
145
145
// processed, so that all entities which will be persisted are
146
146
// persistent when we do the check (I wonder if we could move this
147
147
// into Nullability, instead of abusing the Cascade infrastructure)
148
- for ( Map . Entry < Object , EntityEntry > me : persistenceContext .reentrantSafeEntityEntries () ) {
148
+ for ( var me : persistenceContext .reentrantSafeEntityEntries () ) {
149
149
final EntityEntry entry = me .getValue ();
150
150
if ( checkable ( entry ) ) {
151
151
Cascade .cascade (
@@ -192,11 +192,10 @@ private void prepareCollectionFlushes(PersistenceContext persistenceContext) thr
192
192
// Initialize dirty flags for arrays + collections with composite elements
193
193
// and reset reached, doupdate, etc.
194
194
LOG .debug ( "Dirty checking collections" );
195
- final Map <PersistentCollection <?>, CollectionEntry > collectionEntries =
196
- persistenceContext .getCollectionEntries ();
195
+ final var collectionEntries = persistenceContext .getCollectionEntries ();
197
196
if ( collectionEntries != null ) {
198
- for ( Map . Entry <PersistentCollection <?>, CollectionEntry > entry :
199
- ( ( InstanceIdentityMap < PersistentCollection <?>, CollectionEntry >) collectionEntries ) .toArray () ) {
197
+ final var identityMap = ( InstanceIdentityMap <PersistentCollection <?>, CollectionEntry >) collectionEntries ;
198
+ for ( var entry : identityMap .toArray () ) {
200
199
entry .getValue ().preFlush ( entry .getKey () );
201
200
}
202
201
}
@@ -221,12 +220,12 @@ private int flushEntities(final FlushEvent event, final PersistenceContext persi
221
220
// collections that are changing roles. This might cause entities
222
221
// to be loaded.
223
222
// So this needs to be safe from concurrent modification problems.
224
- final Map . Entry < Object , EntityEntry >[] entityEntries = persistenceContext .reentrantSafeEntityEntries ();
223
+ final var entityEntries = persistenceContext .reentrantSafeEntityEntries ();
225
224
final int count = entityEntries .length ;
226
225
227
226
FlushEntityEvent entityEvent = null ; //allow reuse of the event as it's heavily allocated in certain use cases
228
227
int eventGenerationId = 0 ; //Used to double-check the instance reuse won't cause problems
229
- for ( Map . Entry < Object , EntityEntry > me : entityEntries ) {
228
+ for ( var me : entityEntries ) {
230
229
// Update the status of the object and if necessary, schedule an update
231
230
final EntityEntry entry = me .getValue ();
232
231
final Status status = entry .getStatus ();
@@ -270,17 +269,18 @@ private FlushEntityEvent createOrReuseEventInstance(
270
269
private int flushCollections (final EventSource session , final PersistenceContext persistenceContext )
271
270
throws HibernateException {
272
271
LOG .trace ( "Processing unreferenced collections" );
273
- final Map < PersistentCollection <?>, CollectionEntry > collectionEntries = persistenceContext .getCollectionEntries ();
272
+ final var collectionEntries = persistenceContext .getCollectionEntries ();
274
273
final int count ;
275
274
if ( collectionEntries == null ) {
276
275
count = 0 ;
277
276
}
278
277
else {
279
278
count = collectionEntries .size ();
280
- for ( Map .Entry <PersistentCollection <?>, CollectionEntry > me : ( (InstanceIdentityMap <PersistentCollection <?>, CollectionEntry >) collectionEntries ).toArray () ) {
279
+ final var identityMap = (InstanceIdentityMap <PersistentCollection <?>, CollectionEntry >) collectionEntries ;
280
+ for ( var me : identityMap .toArray () ) {
281
281
final CollectionEntry ce = me .getValue ();
282
282
if ( !ce .isReached () && !ce .isIgnore () ) {
283
- Collections . processUnreachableCollection ( me .getKey (), session );
283
+ processUnreachableCollection ( me .getKey (), session );
284
284
}
285
285
}
286
286
}
0 commit comments