diff --git a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HANALegacyServerConfiguration.java b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HANALegacyServerConfiguration.java index d0cb0156cf45..9aa1e7c3f5a6 100644 --- a/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HANALegacyServerConfiguration.java +++ b/hibernate-community-dialects/src/main/java/org/hibernate/community/dialect/HANALegacyServerConfiguration.java @@ -17,9 +17,9 @@ import org.hibernate.engine.jdbc.dialect.spi.DialectResolutionInfo; import org.hibernate.internal.util.StringHelper; import org.hibernate.internal.util.config.ConfigurationHelper; +import org.jboss.logging.Logger; import static org.hibernate.cfg.DialectSpecificSettings.HANA_MAX_LOB_PREFETCH_SIZE; -import static org.hibernate.internal.CoreMessageLogger.CORE_LOGGER; /** * Utility class that extracts some initial configuration from the database for {@link HANALegacyDialect}. @@ -60,9 +60,8 @@ public static HANALegacyServerConfiguration fromDialectResolutionInfo(DialectRes } catch (SQLException e) { // Ignore - CORE_LOGGER.debug( - "An error occurred while trying to determine the database version.", - e ); + Logger.getLogger( HANALegacyServerConfiguration.class ) + .debug( "An error occurred while trying to determine the database version.", e ); } if (databaseMajorVersion > 0 && databaseMajorVersion < 4) { @@ -77,9 +76,8 @@ public static HANALegacyServerConfiguration fromDialectResolutionInfo(DialectRes } catch (SQLException e) { // Ignore - CORE_LOGGER.debug( - "An error occurred while trying to determine the value of the HANA parameter indexserver.ini / session / max_lob_prefetch_size.", - e ); + Logger.getLogger( HANALegacyServerConfiguration.class ) + .debug( "An error occurred while trying to determine the value of the HANA parameter indexserver.ini / session / max_lob_prefetch_size.", e ); } } else { @@ -115,7 +113,8 @@ public static DatabaseVersion determineDatabaseVersion(DialectResolutionInfo inf } catch (SQLException e) { // Ignore - CORE_LOGGER.debug( "An error occurred while trying to determine the HANA Cloud version.", e ); + Logger.getLogger( HANALegacyServerConfiguration.class ) + .debug( "An error occurred while trying to determine the HANA Cloud version.", e ); } } return databaseVersion == null diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/ColumnOrderingStrategyStandard.java b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/ColumnOrderingStrategyStandard.java index 1e8632ebcfd2..7513bbb5ca88 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/ColumnOrderingStrategyStandard.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/ColumnOrderingStrategyStandard.java @@ -10,14 +10,12 @@ import java.util.List; import org.hibernate.boot.Metadata; -import org.hibernate.dialect.Dialect; import org.hibernate.dialect.temptable.TemporaryTableColumn; import org.hibernate.engine.jdbc.Size; import org.hibernate.mapping.Column; import org.hibernate.mapping.Constraint; import org.hibernate.mapping.PrimaryKey; import org.hibernate.mapping.Table; -import org.hibernate.mapping.UniqueKey; import org.hibernate.mapping.UserDefinedObjectType; import static java.lang.Math.log; @@ -48,7 +46,7 @@ public List orderConstraintColumns(Constraint constraint, Metadata metad // We try to find uniqueKey constraint containing only primary key. // This uniqueKey then orders primaryKey columns. Otherwise, order as usual. if ( constraint instanceof PrimaryKey primaryKey ) { - final UniqueKey uniqueKey = primaryKey.getOrderingUniqueKey(); + final var uniqueKey = primaryKey.getOrderingUniqueKey(); if ( uniqueKey != null ) { return uniqueKey.getColumns(); } @@ -77,7 +75,7 @@ protected ColumnComparator(Metadata metadata) { @Override public int compare(Column o1, Column o2) { - final Dialect dialect = metadata.getDatabase().getDialect(); + final var dialect = metadata.getDatabase().getDialect(); final int physicalSizeInBytes1 = physicalSizeInBytes( o1.getSqlTypeCode( metadata ), o1.getColumnSize( dialect, metadata ), diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Database.java b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Database.java index 1ea1fd57bd25..23ba3cf59a64 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Database.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Database.java @@ -138,8 +138,8 @@ public Namespace.Name getPhysicalImplicitNamespaceName() { } public Namespace locateNamespace(Identifier catalogName, Identifier schemaName) { - final Namespace.Name name = new Namespace.Name( catalogName, schemaName ); - final Namespace namespace = namespaceMap.get( name ); + final var name = new Namespace.Name( catalogName, schemaName ); + final var namespace = namespaceMap.get( name ); return namespace == null ? makeNamespace( name ) : namespace; } diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/QualifiedNameParser.java b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/QualifiedNameParser.java index 2031523a93f6..2ef9b7b1bb13 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/QualifiedNameParser.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/QualifiedNameParser.java @@ -38,7 +38,7 @@ public NameParts(Identifier catalogName, Identifier schemaName, Identifier objec } private static String toQualifiedText(Identifier catalogName, Identifier schemaName, Identifier objectName) { - final StringBuilder qualified = new StringBuilder(); + final var qualified = new StringBuilder(); if ( catalogName != null ) { qualified.append( catalogName ).append( '.' ); } @@ -106,15 +106,9 @@ public NameParts parse(String text, Identifier defaultCatalog, Identifier defaul throw new IllegalIdentifierException( "Object name to parse must be specified, but found null" ); } - final int quoteCharCount = StringHelper.count( text, "`" ); - final boolean wasQuotedInEntirety = quoteCharCount == 2 && text.startsWith( "`" ) && text.endsWith( "`" ); - - if ( wasQuotedInEntirety ) { - return new NameParts( - defaultCatalog, - defaultSchema, - Identifier.toIdentifier( unquote( text ), true ) - ); + if ( isQuotedInEntirety( text ) ) { + return new NameParts( defaultCatalog, defaultSchema, + Identifier.toIdentifier( unquote( text ), true ) ); } String catalogName = null; @@ -171,12 +165,18 @@ else if ( defaultCatalog != null ) { } return new NameParts( - Identifier.toIdentifier( catalogName, wasQuotedInEntirety||catalogWasQuoted ), - Identifier.toIdentifier( schemaName, wasQuotedInEntirety||schemaWasQuoted ), - Identifier.toIdentifier( name, wasQuotedInEntirety||nameWasQuoted ) + Identifier.toIdentifier( catalogName, catalogWasQuoted ), + Identifier.toIdentifier( schemaName, schemaWasQuoted ), + Identifier.toIdentifier( name, nameWasQuoted ) ); } + private static boolean isQuotedInEntirety(String text) { + return StringHelper.count( text, "`" ) == 2 + && text.startsWith( "`" ) + && text.endsWith( "`" ); + } + private static String unquote(String text) { return text.substring( 1, text.length() - 1 ); } diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Sequence.java b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Sequence.java index 5b1ad14d82c0..8d7d176250fc 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Sequence.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/Sequence.java @@ -56,11 +56,7 @@ public Sequence( int incrementSize, String options) { this.contributor = contributor; - this.name = new QualifiedSequenceName( - catalogName, - schemaName, - sequenceName - ); + this.name = new QualifiedSequenceName( catalogName, schemaName, sequenceName ); this.exportIdentifier = name.render(); this.initialValue = initialValue; this.incrementSize = incrementSize; diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/SimpleAuxiliaryDatabaseObject.java b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/SimpleAuxiliaryDatabaseObject.java index 1d5a49c2fe44..479ac8dc0b38 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/SimpleAuxiliaryDatabaseObject.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/SimpleAuxiliaryDatabaseObject.java @@ -7,7 +7,8 @@ import java.util.Set; import org.hibernate.boot.model.naming.Identifier; -import org.hibernate.internal.util.StringHelper; + +import static org.hibernate.internal.util.StringHelper.replace; /** * A simple implementation of {@link AbstractAuxiliaryDatabaseObject} in which the @@ -144,10 +145,20 @@ protected String getSchemaName() { } private String injectCatalogAndSchema(String ddlString, SqlStringGenerationContext context) { - Identifier defaultedCatalogName = context.catalogWithDefault( catalogName == null ? null : context.toIdentifier( catalogName ) ); - Identifier defaultedSchemaName = context.schemaWithDefault( schemaName == null ? null : context.toIdentifier( schemaName ) ); - String rtn = StringHelper.replace( ddlString, CATALOG_NAME_PLACEHOLDER, defaultedCatalogName == null ? "" : defaultedCatalogName.getText() ); - rtn = StringHelper.replace( rtn, SCHEMA_NAME_PLACEHOLDER, defaultedSchemaName == null ? "" : defaultedSchemaName.getText() ); - return rtn; + final Identifier defaultedCatalogName = + context.catalogWithDefault( catalogName == null ? null + : context.toIdentifier( catalogName ) ); + final Identifier defaultedSchemaName = + context.schemaWithDefault( schemaName == null ? null + : context.toIdentifier( schemaName ) ); + String result = + replace( ddlString, + CATALOG_NAME_PLACEHOLDER, + defaultedCatalogName == null ? "" : defaultedCatalogName.getText() ); + result = + replace( result, + SCHEMA_NAME_PLACEHOLDER, + defaultedSchemaName == null ? "" : defaultedSchemaName.getText() ); + return result; } } diff --git a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/internal/SqlStringGenerationContextImpl.java b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/internal/SqlStringGenerationContextImpl.java index f2e6125dd8ec..ee737e7cca68 100644 --- a/hibernate-core/src/main/java/org/hibernate/boot/model/relational/internal/SqlStringGenerationContextImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/boot/model/relational/internal/SqlStringGenerationContextImpl.java @@ -14,6 +14,7 @@ import org.hibernate.boot.model.relational.QualifiedTableName; import org.hibernate.boot.model.relational.SqlStringGenerationContext; import org.hibernate.cfg.AvailableSettings; +import org.hibernate.cfg.MappingSettings; import org.hibernate.dialect.Dialect; import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; @@ -33,8 +34,8 @@ public static SqlStringGenerationContext fromConfigurationMap( JdbcEnvironment jdbcEnvironment, Database database, Map configurationMap) { - String defaultCatalog = (String) configurationMap.get( AvailableSettings.DEFAULT_CATALOG ); - String defaultSchema = (String) configurationMap.get( AvailableSettings.DEFAULT_SCHEMA ); + final String defaultCatalog = (String) configurationMap.get( MappingSettings.DEFAULT_CATALOG ); + final String defaultSchema = (String) configurationMap.get( MappingSettings.DEFAULT_SCHEMA ); return create( jdbcEnvironment, database, defaultCatalog, defaultSchema, false ); } @@ -48,8 +49,8 @@ public static SqlStringGenerationContext fromConfigurationMapForMigration( JdbcEnvironment jdbcEnvironment, Database database, Map configurationMap) { - String defaultCatalog = (String) configurationMap.get( AvailableSettings.DEFAULT_CATALOG ); - String defaultSchema = (String) configurationMap.get( AvailableSettings.DEFAULT_SCHEMA ); + final String defaultCatalog = (String) configurationMap.get( MappingSettings.DEFAULT_CATALOG ); + final String defaultSchema = (String) configurationMap.get( MappingSettings.DEFAULT_SCHEMA ); return create( jdbcEnvironment, database, defaultCatalog, defaultSchema, true ); } @@ -74,27 +75,34 @@ private static SqlStringGenerationContext create( String defaultCatalog, String defaultSchema, boolean forMigration) { - final Namespace.Name implicitNamespaceName = database.getPhysicalImplicitNamespaceName(); - final IdentifierHelper identifierHelper = jdbcEnvironment.getIdentifierHelper(); - final NameQualifierSupport nameQualifierSupport = jdbcEnvironment.getNameQualifierSupport(); - - Identifier actualDefaultCatalog = null; - if ( nameQualifierSupport.supportsCatalogs() ) { - actualDefaultCatalog = identifierHelper.toIdentifier( defaultCatalog ); - if ( actualDefaultCatalog == null ) { - actualDefaultCatalog = implicitNamespaceName.catalog(); - } - } + final var implicitNamespaceName = database.getPhysicalImplicitNamespaceName(); + final var identifierHelper = jdbcEnvironment.getIdentifierHelper(); + final var nameQualifierSupport = jdbcEnvironment.getNameQualifierSupport(); + final Identifier actualDefaultCatalog = + actualDefaultCatalog( defaultCatalog, nameQualifierSupport, identifierHelper, implicitNamespaceName ); + final Identifier actualDefaultSchema = + actualDefaultSchema( defaultSchema, nameQualifierSupport, identifierHelper, implicitNamespaceName ); + return new SqlStringGenerationContextImpl( jdbcEnvironment, actualDefaultCatalog, actualDefaultSchema, forMigration ); + } - Identifier actualDefaultSchema = null; + private static Identifier actualDefaultSchema(String defaultSchema, NameQualifierSupport nameQualifierSupport, IdentifierHelper identifierHelper, Namespace.Name implicitNamespaceName) { if ( nameQualifierSupport.supportsSchemas() ) { - actualDefaultSchema = identifierHelper.toIdentifier( defaultSchema ); - if ( defaultSchema == null ) { - actualDefaultSchema = implicitNamespaceName.schema(); - } + Identifier actualDefaultSchema = identifierHelper.toIdentifier( defaultSchema ); + return actualDefaultSchema == null ? implicitNamespaceName.schema() : actualDefaultSchema; + } + else { + return null; } + } - return new SqlStringGenerationContextImpl( jdbcEnvironment, actualDefaultCatalog, actualDefaultSchema, forMigration ); + private static Identifier actualDefaultCatalog(String defaultCatalog, NameQualifierSupport nameQualifierSupport, IdentifierHelper identifierHelper, Namespace.Name implicitNamespaceName) { + if ( nameQualifierSupport.supportsCatalogs() ) { + final Identifier actualDefaultCatalog = identifierHelper.toIdentifier( defaultCatalog ); + return actualDefaultCatalog == null ? implicitNamespaceName.catalog() : actualDefaultCatalog; + } + else { + return null; + } } public static SqlStringGenerationContext forTests(JdbcEnvironment jdbcEnvironment) { @@ -123,12 +131,12 @@ private SqlStringGenerationContextImpl( Identifier defaultCatalog, Identifier defaultSchema, boolean migration) { - this.dialect = jdbcEnvironment.getDialect(); - this.identifierHelper = jdbcEnvironment.getIdentifierHelper(); - this.qualifiedObjectNameFormatter = jdbcEnvironment.getQualifiedObjectNameFormatter(); this.defaultCatalog = defaultCatalog; this.defaultSchema = defaultSchema; this.migration = migration; + dialect = jdbcEnvironment.getDialect(); + identifierHelper = jdbcEnvironment.getIdentifierHelper(); + qualifiedObjectNameFormatter = jdbcEnvironment.getQualifiedObjectNameFormatter(); } @Override @@ -138,7 +146,9 @@ public Dialect getDialect() { @Override public Identifier toIdentifier(String text) { - return identifierHelper != null ? identifierHelper.toIdentifier( text ) : Identifier.toIdentifier( text ); + return identifierHelper != null + ? identifierHelper.toIdentifier( text ) + : Identifier.toIdentifier( text ); } @Override @@ -168,16 +178,21 @@ public String format(QualifiedName qualifiedName) { @Override public String formatWithoutCatalog(QualifiedSequenceName qualifiedName) { - QualifiedSequenceName nameToFormat; + return qualifiedObjectNameFormatter.format( nameToFormat( qualifiedName ), dialect ); + } + + private QualifiedSequenceName nameToFormat(QualifiedSequenceName qualifiedName) { if ( qualifiedName.getCatalogName() != null || qualifiedName.getSchemaName() == null && defaultSchema != null ) { - nameToFormat = new QualifiedSequenceName( null, - schemaWithDefault( qualifiedName.getSchemaName() ), qualifiedName.getSequenceName() ); + return new QualifiedSequenceName( + null, + schemaWithDefault( qualifiedName.getSchemaName() ), + qualifiedName.getSequenceName() + ); } else { - nameToFormat = qualifiedName; + return qualifiedName; } - return qualifiedObjectNameFormatter.format( nameToFormat, dialect ); } @Override diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/BytecodeEnhancementLogging.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/BytecodeEnhancementLogging.java index 991d8ef41bf9..5247e46e47b1 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/BytecodeEnhancementLogging.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/BytecodeEnhancementLogging.java @@ -32,7 +32,7 @@ @Internal public interface BytecodeEnhancementLogging extends BasicLogger { String LOGGER_NAME = SubSystemLogging.BASE + ".bytecode.enhancement"; - BytecodeEnhancementLogging LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), BytecodeEnhancementLogging.class, LOGGER_NAME ); + BytecodeEnhancementLogging ENHANCEMENT_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), BytecodeEnhancementLogging.class, LOGGER_NAME ); // ---- trace messages ---- @LogMessage(level = TRACE) diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/BiDirectionalAssociationHandler.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/BiDirectionalAssociationHandler.java index 5c3bb89d6bf9..a0452a453170 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/BiDirectionalAssociationHandler.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/BiDirectionalAssociationHandler.java @@ -20,7 +20,6 @@ import org.hibernate.bytecode.enhance.internal.bytebuddy.EnhancerImpl.AnnotatedFieldDescription; import org.hibernate.bytecode.enhance.spi.EnhancementException; import org.hibernate.bytecode.enhance.spi.EnhancerConstants; -import org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging; import net.bytebuddy.asm.Advice; import net.bytebuddy.description.annotation.AnnotationDescription; @@ -37,6 +36,8 @@ import net.bytebuddy.jar.asm.Opcodes; import net.bytebuddy.jar.asm.Type; +import static org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging.ENHANCEMENT_LOGGER; + final class BiDirectionalAssociationHandler implements Implementation { static Implementation wrap( @@ -61,8 +62,8 @@ static Implementation wrap( bidirectionalAttributeName = mappedBy; } if ( bidirectionalAttributeName == null || bidirectionalAttributeName.isEmpty() ) { - if ( BytecodeEnhancementLogging.LOGGER.isInfoEnabled() ) { - BytecodeEnhancementLogging.LOGGER.bidirectionalNotManagedCouldNotFindTargetField( + if ( ENHANCEMENT_LOGGER.isInfoEnabled() ) { + ENHANCEMENT_LOGGER.bidirectionalNotManagedCouldNotFindTargetField( managedCtClass.getName(), persistentField.getName(), targetEntity.getCanonicalName() @@ -116,8 +117,8 @@ static Implementation wrap( if ( persistentField.hasAnnotation( ManyToMany.class ) ) { if ( persistentField.getType().asErasure().isAssignableTo( Map.class ) || targetType.isAssignableTo( Map.class ) ) { - if ( BytecodeEnhancementLogging.LOGGER.isInfoEnabled() ) { - BytecodeEnhancementLogging.LOGGER.manyToManyInMapNotSupported( + if ( ENHANCEMENT_LOGGER.isInfoEnabled() ) { + ENHANCEMENT_LOGGER.manyToManyInMapNotSupported( managedCtClass.getName(), persistentField.getName() ); @@ -157,8 +158,8 @@ else if ( mtm != null ) { targetClass = mtm.getValue( new MethodDescription.ForLoadedMethod( ManyToMany.class.getDeclaredMethod( "targetEntity" ) ) ); } else { - if ( BytecodeEnhancementLogging.LOGGER.isInfoEnabled() ) { - BytecodeEnhancementLogging.LOGGER.bidirectionalNotManagedCouldNotFindTargetType( + if ( ENHANCEMENT_LOGGER.isInfoEnabled() ) { + ENHANCEMENT_LOGGER.bidirectionalNotManagedCouldNotFindTargetType( managedCtClass.getName(), persistentField.getName() ); @@ -242,8 +243,8 @@ private static String getMappedByManyToMany(AnnotatedFieldDescription target, Ty if ( context.isPersistentField( annotatedF ) && target.getName().equals( getMappedBy( annotatedF, entityType( annotatedF.getType() ), context ) ) && target.getDeclaringType().asErasure().isAssignableTo( entityType( annotatedF.getType() ) ) ) { - if ( BytecodeEnhancementLogging.LOGGER.isTraceEnabled() ) { - BytecodeEnhancementLogging.LOGGER.tracef( + if ( ENHANCEMENT_LOGGER.isTraceEnabled() ) { + ENHANCEMENT_LOGGER.tracef( "mappedBy association for field [%s#%s] is [%s#%s]", target.getDeclaringType().asErasure().getName(), target.getName(), diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/EnhancerImpl.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/EnhancerImpl.java index 680c93943e84..e19bf8af977c 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/EnhancerImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/EnhancerImpl.java @@ -45,7 +45,6 @@ import org.hibernate.engine.spi.CompositeOwner; import org.hibernate.engine.spi.ExtendedSelfDirtinessTracker; import org.hibernate.engine.spi.Managed; -import org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging; import java.lang.annotation.Annotation; import java.lang.reflect.Modifier; @@ -64,6 +63,7 @@ import static net.bytebuddy.matcher.ElementMatchers.isStatic; import static net.bytebuddy.matcher.ElementMatchers.named; import static net.bytebuddy.matcher.ElementMatchers.not; +import static org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging.ENHANCEMENT_LOGGER; import static org.hibernate.bytecode.enhance.internal.bytebuddy.FeatureMismatchException.Feature.ASSOCIATION_MANAGEMENT; import static org.hibernate.bytecode.enhance.internal.bytebuddy.FeatureMismatchException.Feature.DIRTY_CHECK; @@ -170,19 +170,19 @@ private DynamicType.Builder doEnhance(Supplier> builde verifyReEnhancement( managedCtClass, infoAnnotation.load(), enhancementContext ); } // verification succeeded (or not done) - we can simply skip the enhancement - BytecodeEnhancementLogging.LOGGER.skippingAlreadyAnnotated( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.skippingAlreadyAnnotated( managedCtClass.getName() ); return null; } // can't effectively enhance interfaces if ( managedCtClass.isInterface() ) { - BytecodeEnhancementLogging.LOGGER.skippingInterface( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.skippingInterface( managedCtClass.getName() ); return null; } // can't effectively enhance records if ( managedCtClass.isRecord() ) { - BytecodeEnhancementLogging.LOGGER.skippingRecord( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.skippingRecord( managedCtClass.getName() ); return null; } @@ -192,7 +192,7 @@ private DynamicType.Builder doEnhance(Supplier> builde return null; } - BytecodeEnhancementLogging.LOGGER.enhancingAsEntity( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.enhancingAsEntity( managedCtClass.getName() ); DynamicType.Builder builder = builderSupplier.get(); builder = builder .implement( constants.INTERFACES_for_ManagedEntity ) @@ -372,7 +372,7 @@ else if ( enhancementContext.isCompositeClass( managedCtClass ) ) { return null; } - BytecodeEnhancementLogging.LOGGER.enhancingAsComposite( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.enhancingAsComposite( managedCtClass.getName() ); DynamicType.Builder builder = builderSupplier.get(); builder = builder.implement( constants.INTERFACES_for_ManagedComposite ); @@ -409,19 +409,19 @@ else if ( enhancementContext.isMappedSuperclassClass( managedCtClass ) ) { if ( checkUnsupportedAttributeNaming( managedCtClass, enhancementContext ) ) { return null; } - - BytecodeEnhancementLogging.LOGGER.enhancingAsMappedSuperclass( managedCtClass.getName() ); - - DynamicType.Builder builder = builderSupplier.get(); - builder = builder.implement( constants.INTERFACES_for_ManagedMappedSuperclass ); - return createTransformer( managedCtClass ).applyTo( builder ); + else { + ENHANCEMENT_LOGGER.enhancingAsMappedSuperclass( managedCtClass.getName() ); + DynamicType.Builder builder = builderSupplier.get(); + builder.implement( constants.INTERFACES_for_ManagedMappedSuperclass ); + return createTransformer( managedCtClass ).applyTo( builder ); + } } else if ( enhancementContext.doExtendedEnhancement() ) { - BytecodeEnhancementLogging.LOGGER.extendedEnhancement( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.extendedEnhancement( managedCtClass.getName() ); return createTransformer( managedCtClass ).applyExtended( builderSupplier.get() ); } else { - BytecodeEnhancementLogging.LOGGER.skippingNotEntityOrComposite( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.skippingNotEntityOrComposite( managedCtClass.getName() ); return null; } } @@ -434,7 +434,7 @@ private void verifyReEnhancement( final String enhancementVersion = existingInfo.version(); if ( "ignore".equals( enhancementVersion ) ) { // for testing - BytecodeEnhancementLogging.LOGGER.skippingReEnhancementVersionCheck( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.skippingReEnhancementVersionCheck( managedCtClass.getName() ); } else if ( !Version.getVersionString().equals( enhancementVersion ) ) { throw new VersionMismatchException( managedCtClass, enhancementVersion, Version.getVersionString() ); @@ -605,7 +605,7 @@ private static boolean checkUnsupportedAttributeNaming(TypeDescription managedCt // We shouldn't even be in this method if using LEGACY, see top of this method. return switch ( strategy ) { case SKIP -> { - BytecodeEnhancementLogging.LOGGER.propertyAccessorNoFieldSkip( + ENHANCEMENT_LOGGER.propertyAccessorNoFieldSkip( managedCtClass.getName(), fieldName, methodDescription.getName() @@ -662,7 +662,7 @@ private boolean alreadyEnhanced(TypeDescription managedCtClass) { private DynamicType.Builder addInterceptorHandling(DynamicType.Builder builder, TypeDescription managedCtClass) { // interceptor handling is only needed if class has lazy-loadable attributes if ( enhancementContext.hasLazyLoadableAttributes( managedCtClass ) ) { - BytecodeEnhancementLogging.LOGGER.weavingPersistentAttributeInterceptable( managedCtClass.getName() ); + ENHANCEMENT_LOGGER.weavingPersistentAttributeInterceptable( managedCtClass.getName() ); builder = builder.implement( constants.INTERFACES_for_PersistentAttributeInterceptable ); diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/FieldAccessEnhancer.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/FieldAccessEnhancer.java index fb9d6a79cccc..c5dbac7a840c 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/FieldAccessEnhancer.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/FieldAccessEnhancer.java @@ -6,6 +6,7 @@ import static net.bytebuddy.matcher.ElementMatchers.hasDescriptor; import static net.bytebuddy.matcher.ElementMatchers.named; +import static org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging.ENHANCEMENT_LOGGER; import jakarta.persistence.Id; @@ -16,7 +17,6 @@ import org.hibernate.bytecode.enhance.internal.bytebuddy.EnhancerImpl.AnnotatedFieldDescription; import org.hibernate.bytecode.enhance.spi.EnhancementException; import org.hibernate.bytecode.enhance.spi.EnhancerConstants; -import org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging; import net.bytebuddy.asm.AsmVisitorWrapper; import net.bytebuddy.description.field.FieldList; @@ -73,7 +73,7 @@ public void visitFieldInsn(int opcode, String owner, String name, String desc) { && !field.hasAnnotation( Id.class ) && !field.getName().equals( "this$0" ) ) { - BytecodeEnhancementLogging.LOGGER.extendedTransformingFieldAccess( + ENHANCEMENT_LOGGER.extendedTransformingFieldAccess( declaredOwnerType.getName(), field.getName(), instrumentedType.getName(), diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/PersistentAttributeTransformer.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/PersistentAttributeTransformer.java index fc69a1c3e250..eaa5eb739662 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/PersistentAttributeTransformer.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/internal/bytebuddy/PersistentAttributeTransformer.java @@ -7,6 +7,7 @@ import static net.bytebuddy.matcher.ElementMatchers.anyOf; import static net.bytebuddy.matcher.ElementMatchers.nameStartsWith; import static net.bytebuddy.matcher.ElementMatchers.not; +import static org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging.ENHANCEMENT_LOGGER; import java.util.ArrayList; import java.util.Arrays; @@ -17,7 +18,6 @@ import org.hibernate.bytecode.enhance.internal.bytebuddy.EnhancerImpl.AnnotatedFieldDescription; import org.hibernate.bytecode.enhance.spi.EnhancerConstants; -import org.hibernate.bytecode.enhance.internal.BytecodeEnhancementLogging; import net.bytebuddy.asm.Advice; import net.bytebuddy.asm.AsmVisitorWrapper; @@ -127,8 +127,8 @@ public static PersistentAttributeTransformer collectPersistentFields( } AnnotatedFieldDescription[] orderedFields = enhancementContext.order( persistentFieldList.toArray( new AnnotatedFieldDescription[0] ) ); - if ( BytecodeEnhancementLogging.LOGGER.isTraceEnabled() ) { - BytecodeEnhancementLogging.LOGGER.persistentFieldsForEntity( + if ( ENHANCEMENT_LOGGER.isTraceEnabled() ) { + ENHANCEMENT_LOGGER.persistentFieldsForEntity( managedCtClass.getName(), Arrays.toString( orderedFields ) ); @@ -154,7 +154,7 @@ else if ( !enhancementContext.isMappedSuperclassClass( managedCtSuperclass.asEra return collectInheritPersistentFields( managedCtSuperclass, enhancementContext ); } - BytecodeEnhancementLogging.LOGGER.foundMappedSuperclass( String.valueOf( managedCtSuperclass ) ); + ENHANCEMENT_LOGGER.foundMappedSuperclass( String.valueOf( managedCtSuperclass ) ); List persistentFieldList = new ArrayList<>(); diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/BytecodeInterceptorLogging.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/BytecodeInterceptorLogging.java index 44f8d72799eb..849ad0d467b9 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/BytecodeInterceptorLogging.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/BytecodeInterceptorLogging.java @@ -33,7 +33,7 @@ public interface BytecodeInterceptorLogging extends BasicLogger { String LOGGER_NAME = SubSystemLogging.BASE + ".bytecode.interceptor"; Logger LOGGER = Logger.getLogger( LOGGER_NAME ); - BytecodeInterceptorLogging MESSAGE_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), BytecodeInterceptorLogging.class, LOGGER_NAME ); + BytecodeInterceptorLogging BYTECODE_INTERCEPTOR_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), BytecodeInterceptorLogging.class, LOGGER_NAME ); @LogMessage(level = WARN) @Message( diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementAsProxyLazinessInterceptor.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementAsProxyLazinessInterceptor.java index ef2821ae7d08..3c9745754cf8 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementAsProxyLazinessInterceptor.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementAsProxyLazinessInterceptor.java @@ -18,6 +18,7 @@ import org.hibernate.type.Type; import static java.util.Collections.unmodifiableSet; +import static org.hibernate.bytecode.enhance.spi.interceptor.BytecodeInterceptorLogging.BYTECODE_INTERCEPTOR_LOGGER; import static org.hibernate.engine.internal.ManagedTypeHelper.asPersistentAttributeInterceptable; import static org.hibernate.engine.internal.ManagedTypeHelper.asSelfDirtinessTracker; import static org.hibernate.engine.internal.ManagedTypeHelper.isSelfDirtinessTrackerType; @@ -143,8 +144,8 @@ private Object extractIdValue(Object target, String attributeName) { } public Object forceInitialize(Object target, String attributeName) { - if ( BytecodeInterceptorLogging.MESSAGE_LOGGER.isTraceEnabled() ) { - BytecodeInterceptorLogging.MESSAGE_LOGGER.enhancementAsProxyLazinessForceInitialize( + if ( BYTECODE_INTERCEPTOR_LOGGER.isTraceEnabled() ) { + BYTECODE_INTERCEPTOR_LOGGER.enhancementAsProxyLazinessForceInitialize( entityKey.getEntityName(), entityKey.getIdentifier(), attributeName @@ -164,8 +165,8 @@ public Object forceInitialize( String attributeName, SharedSessionContractImplementor session, boolean isTemporarySession) { - if ( BytecodeInterceptorLogging.MESSAGE_LOGGER.isTraceEnabled() ) { - BytecodeInterceptorLogging.MESSAGE_LOGGER.enhancementAsProxyLazinessForceInitialize( + if ( BYTECODE_INTERCEPTOR_LOGGER.isTraceEnabled() ) { + BYTECODE_INTERCEPTOR_LOGGER.enhancementAsProxyLazinessForceInitialize( entityKey.getEntityName(), entityKey.getIdentifier(), attributeName diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementHelper.java b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementHelper.java index 4cb4bede5908..9391bd4f6d0f 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/enhance/spi/interceptor/EnhancementHelper.java @@ -19,6 +19,8 @@ import org.hibernate.mapping.ToOne; import org.hibernate.mapping.Value; +import static org.hibernate.bytecode.enhance.spi.interceptor.BytecodeInterceptorLogging.BYTECODE_INTERCEPTOR_LOGGER; + /** * @author Steve Ebersole */ @@ -42,7 +44,7 @@ public static boolean includeInBaseFetchGroup( if ( ! isEnhanced ) { if ( value instanceof ToOne toOne ) { if ( toOne.isUnwrapProxy() ) { - BytecodeInterceptorLogging.MESSAGE_LOGGER.debugf( + BYTECODE_INTERCEPTOR_LOGGER.debugf( "To-one property `%s#%s` was mapped with LAZY + NO_PROXY but the class was not enhanced", bootMapping.getPersistentClass().getEntityName(), bootMapping.getName() @@ -73,7 +75,7 @@ public static boolean includeInBaseFetchGroup( // however, at the time being that leads to inefficient SQL - so for now // we simply log a message that we are ignoring the `@LazyGroup` for to-ones - BytecodeInterceptorLogging.MESSAGE_LOGGER.lazyGroupIgnoredForToOne( + BYTECODE_INTERCEPTOR_LOGGER.lazyGroupIgnoredForToOne( bootMapping.getPersistentClass().getEntityName(), bootMapping.getName(), bootMapping.getLazyGroup() diff --git a/hibernate-core/src/main/java/org/hibernate/bytecode/internal/bytebuddy/ByteBuddyState.java b/hibernate-core/src/main/java/org/hibernate/bytecode/internal/bytebuddy/ByteBuddyState.java index 45e60a50b4ee..e79942865e70 100644 --- a/hibernate-core/src/main/java/org/hibernate/bytecode/internal/bytebuddy/ByteBuddyState.java +++ b/hibernate-core/src/main/java/org/hibernate/bytecode/internal/bytebuddy/ByteBuddyState.java @@ -4,10 +4,7 @@ */ package org.hibernate.bytecode.internal.bytebuddy; -import java.io.File; -import java.io.IOException; import java.lang.invoke.MethodHandles; -import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.function.BiFunction; @@ -17,7 +14,6 @@ import net.bytebuddy.description.type.TypeDescription; import org.hibernate.HibernateException; import org.hibernate.bytecode.enhance.internal.bytebuddy.EnhancerImplConstants; -import org.hibernate.bytecode.enhance.spi.EnhancerConstants; import org.hibernate.bytecode.spi.BasicProxyFactory; import org.hibernate.engine.spi.PrimeAmongSecondarySupertypes; import org.hibernate.proxy.ProxyConfiguration; @@ -46,7 +42,8 @@ import static net.bytebuddy.matcher.ElementMatchers.not; import static net.bytebuddy.matcher.ElementMatchers.returns; import static net.bytebuddy.matcher.ElementMatchers.takesNoArguments; -import static org.hibernate.internal.CoreMessageLogger.CORE_LOGGER; +import static org.hibernate.bytecode.enhance.spi.EnhancerConstants.PERSISTENT_FIELD_READER_PREFIX; +import static org.hibernate.bytecode.enhance.spi.EnhancerConstants.PERSISTENT_FIELD_WRITER_PREFIX; /** * A utility to hold all ByteBuddy related state, as in the current version of @@ -57,8 +54,6 @@ public final class ByteBuddyState { private static final MethodHandles.Lookup LOOKUP = MethodHandles.lookup(); - private static final boolean DEBUG = false; - private final ByteBuddy byteBuddy; private final ProxyDefinitionHelpers proxyDefinitionHelpers = new ProxyDefinitionHelpers(); @@ -79,9 +74,9 @@ public ByteBuddyState() { } ByteBuddyState(ClassFileVersion classFileVersion) { - this.byteBuddy = new ByteBuddy( classFileVersion ).with( TypeValidation.DISABLED ); - this.proxyCache = new TypeCache( TypeCache.Sort.WEAK ); - this.basicProxyCache = new TypeCache( TypeCache.Sort.WEAK ); + byteBuddy = new ByteBuddy( classFileVersion ).with( TypeValidation.DISABLED ); + proxyCache = new TypeCache<>( TypeCache.Sort.WEAK ); + basicProxyCache = new TypeCache<>( TypeCache.Sort.WEAK ); } /** @@ -166,12 +161,9 @@ public Class load(Class referenceClass, Function> rewriteClassFunction) { - DynamicType.Builder builder = rewriteClassFunction.apply( byteBuddy ); - if ( builder == null ) { - return null; - } + var builder = rewriteClassFunction.apply( byteBuddy ); + return builder == null ? null : make( typePool, builder ).getBytes(); - return make( typePool, builder ).getBytes(); } /** @@ -208,7 +200,7 @@ void clearState() { */ public Class load(Class referenceClass, String className, BiFunction> makeClassFunction) { try { - Class result = referenceClass.getClassLoader().loadClass(className); + final var result = referenceClass.getClassLoader().loadClass( className ); if ( result.getClassLoader() == referenceClass.getClassLoader() ) { return result; } @@ -218,10 +210,8 @@ public Class load(Class referenceClass, String className, BiFunction load(Class referenceClass, TypeCache ca referenceClass.getClassLoader(), cacheKey, () -> make( makeProxyFunction.apply( byteBuddy ) ) - .load( - referenceClass.getClassLoader(), - resolveClassLoadingStrategy( referenceClass ) - ) + .load( referenceClass.getClassLoader(), + resolveClassLoadingStrategy( referenceClass ) ) .getLoaded(), cache ); @@ -262,23 +250,7 @@ private Unloaded make(DynamicType.Builder builder) { } private Unloaded make(TypePool typePool, DynamicType.Builder builder) { - Unloaded unloadedClass; - if ( typePool != null ) { - unloadedClass = builder.make( typePool ); - } - else { - unloadedClass = builder.make(); - } - - if ( DEBUG ) { - try { - unloadedClass.saveIn( new File( System.getProperty( "java.io.tmpdir" ) + "/bytebuddy/" ) ); - } - catch (IOException e) { - CORE_LOGGER.warn( "Unable to save generated class %1$s", unloadedClass.getTypeDescription().getName(), e ); - } - } - return unloadedClass; + return typePool == null ? builder.make() : builder.make( typePool ); } public EnhancerImplConstants getEnhancerConstants() { @@ -298,27 +270,38 @@ public static class ProxyDefinitionHelpers { private final FieldAccessor.PropertyConfigurable interceptorFieldAccessor; private ProxyDefinitionHelpers() { - this.groovyGetMetaClassFilter = isSynthetic().and( named( "getMetaClass" ) - .and( returns( td -> "groovy.lang.MetaClass".equals( td.getName() ) ) ) ); - this.virtualNotFinalizerFilter = isVirtual().and( not( isFinalizer() ) ); - this.proxyNonInterceptedMethodFilter = nameStartsWith( "$$_hibernate_" ).and( isVirtual() ) - // HHH-15090: Don't apply extended enhancement reader/writer methods to the proxy; - // those need to be executed on the actual entity. - .and( not( nameStartsWith( EnhancerConstants.PERSISTENT_FIELD_READER_PREFIX ) ) ) - .and( not( nameStartsWith( EnhancerConstants.PERSISTENT_FIELD_WRITER_PREFIX ) ) ); + groovyGetMetaClassFilter = + isSynthetic().and( named( "getMetaClass" ) + .and( returns( td -> "groovy.lang.MetaClass".equals( td.getName() ) ) ) ); + virtualNotFinalizerFilter = + isVirtual().and( not( isFinalizer() ) ); + proxyNonInterceptedMethodFilter = + nameStartsWith( "$$_hibernate_" ).and( isVirtual() ) + // HHH-15090: Don't apply extended enhancement reader/writer methods to the proxy; + // those need to be executed on the actual entity. + .and( not( nameStartsWith( PERSISTENT_FIELD_READER_PREFIX ) ) ) + .and( not( nameStartsWith( PERSISTENT_FIELD_WRITER_PREFIX ) ) ); // Populate the toFullyIgnore list - for ( Method m : PrimeAmongSecondarySupertypes.class.getMethods() ) { + for ( var method : PrimeAmongSecondarySupertypes.class.getMethods() ) { //We need to ignore both the match of each default method on PrimeAmongSecondarySupertypes - toFullyIgnore.add( isDeclaredBy( PrimeAmongSecondarySupertypes.class ).and( named( m.getName() ) ).and( takesNoArguments() ) ); + toFullyIgnore.add( + isDeclaredBy( PrimeAmongSecondarySupertypes.class ) + .and( named( method.getName() ) ) + .and( takesNoArguments() ) ); //And the override in the interface it belongs to - which we happen to have in the return type - toFullyIgnore.add( isDeclaredBy( m.getReturnType() ).and( named( m.getName() ) ).and( takesNoArguments() ) ); + toFullyIgnore.add( + isDeclaredBy( method.getReturnType() ) + .and( named( method.getName() ) ) + .and( takesNoArguments() ) ); } - this.delegateToInterceptorDispatcherMethodDelegation = MethodDelegation.to( ProxyConfiguration.InterceptorDispatcher.class ); + delegateToInterceptorDispatcherMethodDelegation = + MethodDelegation.to( ProxyConfiguration.InterceptorDispatcher.class ); - this.interceptorFieldAccessor = FieldAccessor.ofField( ProxyConfiguration.INTERCEPTOR_FIELD_NAME ) - .withAssigner( Assigner.DEFAULT, Assigner.Typing.DYNAMIC ); + interceptorFieldAccessor = + FieldAccessor.ofField( ProxyConfiguration.INTERCEPTOR_FIELD_NAME ) + .withAssigner( Assigner.DEFAULT, Assigner.Typing.DYNAMIC ); } public ElementMatcher getGroovyGetMetaClassFilter() { @@ -342,8 +325,8 @@ public FieldAccessor.PropertyConfigurable getInterceptorFieldAccessor() { } public DynamicType.Builder appendIgnoreAlsoAtEnd(DynamicType.Builder builder) { - for ( ElementMatcher m : toFullyIgnore ) { - builder = builder.ignoreAlso( m ); + for ( var elementMatcher : toFullyIgnore ) { + builder = builder.ignoreAlso( elementMatcher ); } return builder; } diff --git a/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java b/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java index 8ca6bd057ecf..f57f61df443d 100644 --- a/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java +++ b/hibernate-core/src/main/java/org/hibernate/cfg/Configuration.java @@ -200,10 +200,8 @@ public Configuration(BootstrapServiceRegistry serviceRegistry) { } private XmlMappingBinderAccess createMappingBinderAccess(BootstrapServiceRegistry serviceRegistry) { - return new XmlMappingBinderAccess( - serviceRegistry, - (settingName) -> properties == null ? null : properties.get( settingName ) - ); + return new XmlMappingBinderAccess( serviceRegistry, + settingName -> properties == null ? null : properties.get( settingName ) ); } /** @@ -578,9 +576,7 @@ public Configuration registerTypeOverride(UserType type, String[] keys) { if ( userTypeRegistrations == null ) { userTypeRegistrations = new ArrayList<>(); } - userTypeRegistrations.add( - metadataBuilder -> metadataBuilder.applyBasicType( type, keys ) - ); + userTypeRegistrations.add( builder -> builder.applyBasicType( type, keys ) ); return this; } @@ -759,7 +755,12 @@ public Configuration addClass(Class entityClass) throws MappingException { if ( entityClass == null ) { throw new IllegalArgumentException( "The specified class cannot be null" ); } - return addResource( entityClass.getName().replace( '.', '/' ) + ".hbm.xml" ); + return addResource( hbmFileName( entityClass ) ); + } + + private static String hbmFileName(Class entityClass) { + return entityClass.getName().replace( '.', '/' ) + + ".hbm.xml"; } /** @@ -1023,8 +1024,8 @@ public Configuration setColumnOrderingStrategy(ColumnOrderingStrategy columnOrde * @throws HibernateException usually indicates an invalid configuration or invalid mapping information */ public SessionFactory buildSessionFactory(ServiceRegistry serviceRegistry) throws HibernateException { - CORE_LOGGER.trace( "Building session factory using provided StandardServiceRegistry" ); - final MetadataBuilder metadataBuilder = + CORE_LOGGER.buildingFactoryWithProvidedRegistry(); + final var metadataBuilder = metadataSources.getMetadataBuilder( (StandardServiceRegistry) serviceRegistry ); if ( implicitNamingStrategy != null ) { @@ -1043,11 +1044,11 @@ public SessionFactory buildSessionFactory(ServiceRegistry serviceRegistry) throw metadataBuilder.applySharedCacheMode( sharedCacheMode ); } - for ( TypeContributor typeContributor : typeContributorRegistrations ) { + for ( var typeContributor : typeContributorRegistrations ) { metadataBuilder.applyTypes( typeContributor ); } - for ( FunctionContributor functionContributor : functionContributorRegistrations ) { + for ( var functionContributor : functionContributorRegistrations ) { metadataBuilder.applyFunctions( functionContributor ); } @@ -1126,7 +1127,7 @@ public SessionFactory buildSessionFactory(ServiceRegistry serviceRegistry) throw * @throws HibernateException usually indicates an invalid configuration or invalid mapping information */ public SessionFactory buildSessionFactory() throws HibernateException { - CORE_LOGGER.trace( "Building session factory using internal StandardServiceRegistryBuilder" ); + CORE_LOGGER.buildingFactoryWithInternalRegistryBuilder(); standardServiceRegistryBuilder.applySettings( properties ); var serviceRegistry = standardServiceRegistryBuilder.build(); try { diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java index 36820b621d0a..083168527b00 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/Collections.java @@ -46,8 +46,8 @@ private static void processDereferencedCollection(PersistentCollection collec final var loadedPersister = entry.getLoadedPersister(); if ( loadedPersister != null && CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.trace( "Collection dereferenced: " - + collectionInfoString( loadedPersister, collection, entry.getLoadedKey(), session ) ); + CORE_LOGGER.collectionDereferenced( + collectionInfoString( loadedPersister, collection, entry.getLoadedKey(), session ) ); } // do a check @@ -112,8 +112,8 @@ private static void processNeverReferencedCollection(PersistentCollection col final Object loadedKey = entry.getLoadedKey(); if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.trace( "Found collection with unloaded owner: " - + collectionInfoString( loadedPersister, collection, loadedKey, session ) ); + CORE_LOGGER.collectionWithUnloadedOwner( + collectionInfoString( loadedPersister, collection, loadedKey, session ) ); } entry.setCurrentPersister( loadedPersister ); @@ -161,8 +161,8 @@ public static void processReachableCollection( // the class of the collection owner is enhanced for lazy loading, // and we found an un-initialized PersistentCollection, so skip it if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.trace( "Skipping uninitialized bytecode-lazy collection: " - + collectionInfoString( persister, collection, collectionEntry.getCurrentKey(), session ) ); + CORE_LOGGER.skippingUninitializedBytecodeLazyCollection( + collectionInfoString( persister, collection, collectionEntry.getCurrentKey(), session ) ); } collectionEntry.setReached( true ); collectionEntry.setProcessed( true ); @@ -187,8 +187,7 @@ private static void logReachedCollection( CollectionEntry collectionEntry) { if ( CORE_LOGGER.isTraceEnabled() ) { if ( collection.wasInitialized() ) { - CORE_LOGGER.tracef( - "Collection found: %s, was: %s (initialized)", + CORE_LOGGER.collectionFoundInitialized( collectionInfoString( persister, collection, @@ -204,8 +203,7 @@ private static void logReachedCollection( ); } else { - CORE_LOGGER.tracef( - "Collection found: %s, was: %s (uninitialized)", + CORE_LOGGER.collectionFoundUninitialized( collectionInfoString( persister, collection, @@ -268,7 +266,7 @@ private static void prepareCollectionForUpdate( // we will need to remove the old entries collectionEntry.setDoremove( true ); if ( collectionEntry.isDorecreate() ) { - CORE_LOGGER.trace( "Forcing collection initialization" ); + CORE_LOGGER.forcingCollectionInitialization(); collection.forceInitialization(); } } diff --git a/hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryContext.java b/hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryContext.java index 6ceed033992a..cf38cd9b7d62 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryContext.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/internal/EntityEntryContext.java @@ -18,7 +18,6 @@ import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; import java.util.IdentityHashMap; import java.util.Map; import java.util.function.Consumer; @@ -209,22 +208,21 @@ private void putImmutableManagedEntity(ManagedEntity managed, int instanceId, Im private void checkNotAssociatedWithOtherPersistenceContextIfMutable(ManagedEntity managedEntity) { // we only have to check mutable managedEntity final var entityEntry = (EntityEntryImpl) managedEntity.$$_hibernate_getEntityEntry(); - if ( entityEntry == null || - !entityEntry.getPersister().isMutable() || - entityEntry.getPersistenceContext() == null || - entityEntry.getPersistenceContext() == persistenceContext ) { - return; - } - if ( entityEntry.getPersistenceContext().getSession().isOpen() ) { - // NOTE: otherPersistenceContext may be operating on the entityEntry in a different thread. - // it is not safe to associate entityEntry with this EntityEntryContext. - throw new HibernateException( - "Illegal attempt to associate a ManagedEntity with two open persistence contexts: " + entityEntry - ); - } - else { - // otherPersistenceContext is associated with a closed PersistenceContext - CORE_LOGGER.stalePersistenceContextInEntityEntry( entityEntry.toString() ); + if ( entityEntry != null + && entityEntry.getPersister().isMutable() + && entityEntry.getPersistenceContext() != null + && entityEntry.getPersistenceContext() != persistenceContext ) { + if ( entityEntry.getPersistenceContext().getSession().isOpen() ) { + // NOTE: otherPersistenceContext may be operating on the entityEntry in a different thread. + // it is not safe to associate entityEntry with this EntityEntryContext. + throw new HibernateException( + "Illegal attempt to associate a ManagedEntity with two open persistence contexts: " + entityEntry + ); + } + else { + // otherPersistenceContext is associated with a closed PersistenceContext + CORE_LOGGER.stalePersistenceContextInEntityEntry( entityEntry.toString() ); + } } } @@ -273,18 +271,7 @@ public EntityEntry removeEntityEntry(Object entity) { dirty = true; - if ( managedEntity instanceof ImmutableManagedEntityHolder holder ) { - assert entity == holder.managedEntity; - if ( !isReferenceCachingEnabled( holder.$$_hibernate_getEntityEntry().getPersister() ) ) { - immutableManagedEntityXref.remove( managedEntity.$$_hibernate_getInstanceId(), entity ); - } - else { - nonEnhancedEntityXref.remove( entity ); - } - } - else if ( !isManagedEntity( entity ) ) { - nonEnhancedEntityXref.remove( entity ); - } + removeXref( entity, managedEntity ); // re-link count--; @@ -293,7 +280,6 @@ else if ( !isManagedEntity( entity ) ) { // handle as a special case... head = null; tail = null; - assert managedEntity.$$_hibernate_getPreviousManagedEntity() == null; assert managedEntity.$$_hibernate_getNextManagedEntity() == null; } @@ -324,6 +310,21 @@ else if ( !isManagedEntity( entity ) ) { return clearManagedEntity( managedEntity ); } + private void removeXref(Object entity, ManagedEntity managedEntity) { + if ( managedEntity instanceof ImmutableManagedEntityHolder holder ) { + assert entity == holder.managedEntity; + if ( !isReferenceCachingEnabled( holder.$$_hibernate_getEntityEntry().getPersister() ) ) { + immutableManagedEntityXref.remove( managedEntity.$$_hibernate_getInstanceId(), entity ); + } + else { + nonEnhancedEntityXref.remove( entity ); + } + } + else if ( !isManagedEntity( entity ) ) { + nonEnhancedEntityXref.remove( entity ); + } + } + /** * The main bugaboo with {@code IdentityMap} that warranted this class in the * first place. @@ -439,7 +440,7 @@ private static void downgradeLockOnManagedEntity(final ManagedEntity node) { * @throws IOException Indicates an IO exception accessing the given stream */ public void serialize(ObjectOutputStream oos) throws IOException { - CORE_LOGGER.tracef( "Starting serialization of [%s] EntityEntry entries", count ); + CORE_LOGGER.startingEntityEntrySerialization( count ); oos.writeInt( count ); if ( count == 0 ) { return; @@ -454,7 +455,6 @@ public void serialize(ObjectOutputStream oos) throws IOException { oos.writeInt( managedEntity.$$_hibernate_getEntityEntry().getClass().getName().length() ); oos.writeChars( managedEntity.$$_hibernate_getEntityEntry().getClass().getName() ); managedEntity.$$_hibernate_getEntityEntry().serialize( oos ); - managedEntity = managedEntity.$$_hibernate_getNextManagedEntity(); } } @@ -473,7 +473,7 @@ public void serialize(ObjectOutputStream oos) throws IOException { public static EntityEntryContext deserialize(ObjectInputStream ois, StatefulPersistenceContext rtn) throws IOException, ClassNotFoundException { final int count = ois.readInt(); - CORE_LOGGER.tracef( "Starting deserialization of [%s] EntityEntry entries", count ); + CORE_LOGGER.startingEntityEntryDeserialization( count ); final var context = new EntityEntryContext( rtn ); context.count = count; @@ -540,24 +540,20 @@ public static EntityEntryContext deserialize(ObjectInputStream ois, StatefulPers private static EntityEntry deserializeEntityEntry( char[] entityEntryClassNames, ObjectInputStream ois, StatefulPersistenceContext persistenceContext){ - EntityEntry entry = null; - final String entityEntryClassName = new String( entityEntryClassNames ); - final Class entityEntryClass = + final var entityEntryClass = persistenceContext.getSession().getFactory().getClassLoaderService() .classForName( entityEntryClassName ); - try { - final Method deserializeMethod = - entityEntryClass.getDeclaredMethod( "deserialize", ObjectInputStream.class, PersistenceContext.class ); - entry = (EntityEntry) deserializeMethod.invoke( null, ois, persistenceContext ); + final var deserializeMethod = + entityEntryClass.getDeclaredMethod( "deserialize", + ObjectInputStream.class, PersistenceContext.class ); + return (EntityEntry) deserializeMethod.invoke( null, ois, persistenceContext ); } catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e) { - CORE_LOGGER.errorf( "Enable to deserialize [%s]", entityEntryClassName ); + CORE_LOGGER.unableToDeserialize( entityEntryClassName ); + return null; } - - return entry; - } public int getNumberOfManagedEntities() { diff --git a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/batch/JdbcBatchLogging.java b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/batch/JdbcBatchLogging.java index 34d61d5f2993..1a693908d5fb 100644 --- a/hibernate-core/src/main/java/org/hibernate/engine/jdbc/batch/JdbcBatchLogging.java +++ b/hibernate-core/src/main/java/org/hibernate/engine/jdbc/batch/JdbcBatchLogging.java @@ -81,4 +81,8 @@ public interface JdbcBatchLogging extends BasicLogger { @LogMessage(level = TRACE) @Message("PreparedStatementDetails did not contain PreparedStatement on releaseStatements: %s") void noPreparedStatements(String sqlString); + + @LogMessage(level = TRACE) + @Message( "Success of batch update unknown: %s") + void batchSuccessUnknown(int batchPosition); } diff --git a/hibernate-core/src/main/java/org/hibernate/generator/values/internal/GeneratedValuesHelper.java b/hibernate-core/src/main/java/org/hibernate/generator/values/internal/GeneratedValuesHelper.java index 924259592153..3bf005cbc69f 100644 --- a/hibernate-core/src/main/java/org/hibernate/generator/values/internal/GeneratedValuesHelper.java +++ b/hibernate-core/src/main/java/org/hibernate/generator/values/internal/GeneratedValuesHelper.java @@ -96,8 +96,10 @@ public static GeneratedValues getGeneratedValues( final var results = readGeneratedValues( resultSet, statement, persister, mappingProducer, session ); if ( CORE_LOGGER.isDebugEnabled() ) { - CORE_LOGGER.debug( "Extracted generated values for entity " - + infoString( persister ) + ": " + ArrayHelper.toString( results ) ); + CORE_LOGGER.extractedGeneratedValues( + infoString( persister ), + ArrayHelper.toString( results ) + ); } for ( int i = 0; i < results.length; i++ ) { diff --git a/hibernate-core/src/main/java/org/hibernate/internal/CoreMessageLogger.java b/hibernate-core/src/main/java/org/hibernate/internal/CoreMessageLogger.java index abdef1e79889..261a907db62d 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/CoreMessageLogger.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/CoreMessageLogger.java @@ -13,11 +13,14 @@ import org.hibernate.HibernateException; import org.hibernate.Internal; +import org.hibernate.boot.registry.classloading.spi.ClassLoadingException; import org.hibernate.cache.CacheException; import org.hibernate.engine.spi.CascadingAction; import org.hibernate.engine.spi.CollectionEntry; import org.hibernate.internal.log.SubSystemLogging; +import org.hibernate.tool.schema.spi.CommandAcceptanceException; +import org.hibernate.tool.schema.spi.GenerationTarget; import org.jboss.logging.BasicLogger; import org.jboss.logging.Logger; import org.jboss.logging.annotations.Cause; @@ -310,6 +313,10 @@ void missingArguments( @Message(value = "Unsuccessful: %s", id = 478) void unsuccessfulSchemaManagementCommand(String command); + @LogMessage(level = DEBUG) + @Message( value = "Error performing delayed DROP command [%s]", id = 479 ) + void unsuccessfulDelayedDropCommand(CommandAcceptanceException e); + @LogMessage(level = WARN) @Message( value = """ @@ -469,4 +476,110 @@ void unableToLocateStaticMetamodelField( @LogMessage(level = TRACE) @Message( id = 6022, value = "Reset storedSnapshot to %s for %s" ) void resetStoredSnapshot(Serializable storedSnapshot, CollectionEntry collectionEntry); + + @LogMessage(level = TRACE) + @Message( id = 6041, value = "Building session factory using provided StandardServiceRegistry" ) + void buildingFactoryWithProvidedRegistry(); + + @LogMessage(level = TRACE) + @Message( id = 6042, value = "Building session factory using internal StandardServiceRegistryBuilder" ) + void buildingFactoryWithInternalRegistryBuilder(); + + @LogMessage(level = TRACE) + @Message( id = 6043, value = "Found collection with unloaded owner: %s" ) + void collectionWithUnloadedOwner(String info); + + @LogMessage(level = TRACE) + @Message( id = 6044, value = "Forcing collection initialization" ) + void forcingCollectionInitialization(); + + @LogMessage(level = TRACE) + @Message( id = 6045, value = "Collection dereferenced: %s" ) + void collectionDereferenced(String info); + + @LogMessage(level = TRACE) + @Message( id = 6046, value = "Skipping uninitialized bytecode-lazy collection: %s" ) + void skippingUninitializedBytecodeLazyCollection(String info); + + @LogMessage(level = TRACE) + @Message( id = 6047, value = "Collection found: %s, was: %s (initialized)" ) + void collectionFoundInitialized(String is, String was); + + @LogMessage(level = TRACE) + @Message( id = 6048, value = "Collection found: %s, was: %s (uninitialized)" ) + void collectionFoundUninitialized(String is, String was); + + @LogMessage(level = TRACE) + @Message( id = 6049, value = "Created collection wrapper for: %s" ) + void createdCollectionWrapper(String s); + + @LogMessage(level = TRACE) + @Message( id = 6051, value = "Starting serialization of [%s] EntityEntry entries" ) + void startingEntityEntrySerialization(int count); + + @LogMessage(level = TRACE) + @Message( id = 6052, value = "Starting deserialization of [%s] EntityEntry entries" ) + void startingEntityEntryDeserialization(int count); + + @LogMessage(level = ERROR) + @Message( id = 6053, value = "Unable to deserialize [%s]" ) + void unableToDeserialize(String entityEntryClassName); + + @LogMessage(level = TRACE) + @Message( id = 6061, value = "Extracted generated values for entity %s - %s" ) + void extractedGeneratedValues(String info, String results); + + @LogMessage(level = WARN) + @Message( id = 6062, value = "Could not resolve type name [%s] as Java type" ) + void couldNotResolveTypeName(String typeName, @Cause ClassLoadingException exception); + + @LogMessage(level = DEBUG) + @Message( id = 6063, value = "Problem releasing GenerationTarget [%s]" ) + void problemReleasingGenerationTarget(GenerationTarget target, @Cause Exception e); + + @LogMessage(level = WARN) + @Message( id = 6064, value = "Unable to close temp session" ) + void unableToCLoseTempSession(); + + // AbstractEntityPersister + + @LogMessage(level = TRACE) + @Message( id = 6565, value = "Initializing lazy properties from datastore (triggered for '%s')" ) + void initializingLazyPropertiesFromDatastore(String fieldName); + + @LogMessage(level = TRACE) + @Message( id = 6566, value = "Initializing lazy properties from second-level cache" ) + void initializingLazyPropertiesFromSecondLevelCache(); + + @LogMessage(level = TRACE) + @Message( id = 6567, value = "Done initializing lazy properties" ) + void doneInitializingLazyProperties(); + + @LogMessage(level = TRACE) + @Message( id = 6568, value = "Resolving unique key [%s] to identifier for entity [%s]" ) + void resolvingUniqueKeyToIdentifier(Object key, String entityName); + + @LogMessage(level = TRACE) + @Message( id = 6569, value = "Reading entity version: %s" ) + void readingEntityVersion(String info); + + @LogMessage(level = TRACE) + @Message( id = 6570, value = "Fetching entity: %s" ) + void fetchingEntity(String info); + + @LogMessage(level = TRACE) + @Message( id = 6571, value = "%s is dirty" ) + void propertyIsDirty(String qualifiedProperty); + + @LogMessage(level = TRACE) + @Message( id = 6572, value = "Forcing version increment [%s]" ) + void forcingVersionIncrement(String info); + + @LogMessage(level = TRACE) + @Message( id = 6573, value = "Getting current natural-id snapshot state for `%s#%s" ) + void gettingCurrentNaturalIdSnapshot(String entityName, Object id); + + @LogMessage(level = TRACE) + @Message( id = 6574, value = "Initializing lazy properties of: %s, field access: %s" ) + void initializingLazyPropertiesOf(String info, String fieldName); } diff --git a/hibernate-core/src/main/java/org/hibernate/internal/log/IncubationLogger.java b/hibernate-core/src/main/java/org/hibernate/internal/log/IncubationLogger.java index da9f85388ab0..ddcff5ae6a81 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/log/IncubationLogger.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/log/IncubationLogger.java @@ -13,7 +13,7 @@ import java.lang.invoke.MethodHandles; -import static org.jboss.logging.Logger.Level.WARN; +import static org.jboss.logging.Logger.Level.INFO; /** * @author Steve Ebersole @@ -26,11 +26,10 @@ public interface IncubationLogger { IncubationLogger INCUBATION_LOGGER = Logger.getMessageLogger( MethodHandles.lookup(), IncubationLogger.class, CATEGORY ); - @LogMessage(level = WARN) + @LogMessage(level = INFO) @Message( id = 90006001, - value = "Encountered incubating setting [%s]. See javadoc on corresponding " + - "`org.hibernate.cfg.AvailableSettings` constant for details." + value = "Setting '%s' is still incubating (see Javadoc of corresponding member of 'org.hibernate.cfg.AvailableSettings')" ) void incubatingSetting(String settingName); } diff --git a/hibernate-core/src/main/java/org/hibernate/internal/util/EntityPrinter.java b/hibernate-core/src/main/java/org/hibernate/internal/util/EntityPrinter.java index 5a213b7488cd..2f8d9f649c3c 100644 --- a/hibernate-core/src/main/java/org/hibernate/internal/util/EntityPrinter.java +++ b/hibernate-core/src/main/java/org/hibernate/internal/util/EntityPrinter.java @@ -7,7 +7,6 @@ import java.util.HashMap; import java.util.Map; -import org.hibernate.Hibernate; import org.hibernate.HibernateException; import org.hibernate.bytecode.enhance.spi.LazyPropertyInitializer; import org.hibernate.engine.spi.EntityHolder; @@ -16,6 +15,7 @@ import org.hibernate.engine.spi.TypedValue; import org.hibernate.type.Type; +import static org.hibernate.Hibernate.isInitialized; import static org.hibernate.internal.CoreMessageLogger.CORE_LOGGER; /** @@ -60,7 +60,7 @@ public String toString(String entityName, Object entity) throws HibernateExcepti if ( values[i] == LazyPropertyInitializer.UNFETCHED_PROPERTY ) { strValue = values[i].toString(); } - else if ( !Hibernate.isInitialized( values[i] ) ) { + else if ( !isInitialized( values[i] ) ) { strValue = ""; } else { @@ -87,8 +87,10 @@ public String toString(Map namedTypedValues) throws Hibernat final Map result = new HashMap<>(); for ( var entry : namedTypedValues.entrySet() ) { final String key = entry.getKey(); - final TypedValue value = entry.getValue(); - result.put( key, value.getType().toLoggableString( value.getValue(), factory ) ); + final var typedValue = entry.getValue(); + result.put( key, + typedValue.getType() + .toLoggableString( typedValue.getValue(), factory ) ); } return result.toString(); } @@ -101,14 +103,13 @@ public void logEntities(Iterable> entitiesByE int i = 0; for ( var entityKeyAndEntity : entitiesByEntityKey ) { final var holder = entityKeyAndEntity.getValue(); - if ( holder.getEntity() == null ) { - continue; - } - if ( i++ > 20 ) { - CORE_LOGGER.debug( "More......" ); - break; + if ( holder.getEntity() != null ) { + if ( i++ > 20 ) { + CORE_LOGGER.debug( "More......" ); + break; + } + CORE_LOGGER.debug( toString( entityKeyAndEntity.getKey().getEntityName(), holder.getEntity() ) ); } - CORE_LOGGER.debug( toString( entityKeyAndEntity.getKey().getEntityName(), holder.getEntity() ) ); } } } diff --git a/hibernate-core/src/main/java/org/hibernate/jdbc/Expectations.java b/hibernate-core/src/main/java/org/hibernate/jdbc/Expectations.java index c47ee3d9667b..079f87d3a527 100644 --- a/hibernate-core/src/main/java/org/hibernate/jdbc/Expectations.java +++ b/hibernate-core/src/main/java/org/hibernate/jdbc/Expectations.java @@ -15,7 +15,7 @@ import static java.sql.Statement.EXECUTE_FAILED; import static java.sql.Statement.SUCCESS_NO_INFO; -import static org.hibernate.internal.CoreMessageLogger.CORE_LOGGER; +import static org.hibernate.engine.jdbc.batch.JdbcBatchLogging.BATCH_MESSAGE_LOGGER; /** * Useful operations for dealing with {@link Expectation}s. @@ -67,7 +67,7 @@ static void checkBatched(int expectedRowCount, int rowCount, int batchPosition, case EXECUTE_FAILED: throw new BatchFailedException( "Batch update failed: " + batchPosition ); case SUCCESS_NO_INFO: - CORE_LOGGER.debugf( "Success of batch update unknown: %s", batchPosition ); + BATCH_MESSAGE_LOGGER.batchSuccessUnknown( batchPosition ); break; default: if ( expectedRowCount > rowCount ) { diff --git a/hibernate-core/src/main/java/org/hibernate/mapping/BasicValue.java b/hibernate-core/src/main/java/org/hibernate/mapping/BasicValue.java index 44207f8d665f..71a6067d04f2 100644 --- a/hibernate-core/src/main/java/org/hibernate/mapping/BasicValue.java +++ b/hibernate-core/src/main/java/org/hibernate/mapping/BasicValue.java @@ -812,17 +812,18 @@ private static Resolution interpretExplicitlyNamedType( final var managedBeanRegistry = bootstrapContext.getManagedBeanRegistry(); final var typeConfiguration = bootstrapContext.getTypeConfiguration(); - final JpaAttributeConverterCreationContext converterCreationContext = new JpaAttributeConverterCreationContext() { - @Override - public ManagedBeanRegistry getManagedBeanRegistry() { - return managedBeanRegistry; - } - - @Override - public TypeConfiguration getTypeConfiguration() { - return typeConfiguration; - } - }; + final var converterCreationContext = + new JpaAttributeConverterCreationContext() { + @Override + public ManagedBeanRegistry getManagedBeanRegistry() { + return managedBeanRegistry; + } + + @Override + public TypeConfiguration getTypeConfiguration() { + return typeConfiguration; + } + }; // Name could refer to: // 1) a named converter - HBM support for JPA's AttributeConverter via its `type="..."` XML attribute @@ -857,8 +858,9 @@ public TypeConfiguration getTypeConfiguration() { } // see if it is a named basic type - final BasicType basicTypeByName = - typeConfiguration.getBasicTypeRegistry().getRegisteredType( name ); + final var basicTypeByName = + typeConfiguration.getBasicTypeRegistry() + .getRegisteredType( name ); if ( basicTypeByName != null ) { return getNamedBasicTypeResolution( explicitMutabilityPlanAccess, @@ -870,7 +872,9 @@ public TypeConfiguration getTypeConfiguration() { } // see if it is a named TypeDefinition - final var typeDefinition = context.getTypeDefinitionRegistry().resolve( name ); + final var typeDefinition = + context.getTypeDefinitionRegistry() + .resolve( name ); if ( typeDefinition != null ) { final var resolution = typeDefinition.resolve( localTypeParams, @@ -884,7 +888,7 @@ public TypeConfiguration getTypeConfiguration() { // see if the name is a UserType or BasicType implementor class name try { - final Class typeNamedClass = classForName( name, bootstrapContext ); + final var typeNamedClass = classForName( name, bootstrapContext ); // if there are no local config params, register an implicit TypeDefinition for this custom type // later uses may find it and reuse its cacheable reference if ( isEmpty( localTypeParams ) ) { @@ -903,10 +907,10 @@ public TypeConfiguration getTypeConfiguration() { } catch (ClassLoadingException e) { // allow the exception below to trigger - CORE_LOGGER.debugf( "Could not resolve type-name [%s] as Java type : %s", name, e ); + CORE_LOGGER.couldNotResolveTypeName( name, e ); } - throw new MappingException( "Could not resolve named type : " + name ); + throw new MappingException( "Could not resolve named type: " + name ); } private static NamedBasicTypeResolution getNamedBasicTypeResolution( @@ -919,8 +923,9 @@ private static NamedBasicTypeResolution getNamedBasicTypeResolution( final JavaType domainJtd; if ( converterDescriptor != null ) { //noinspection unchecked - valueConverter = (BasicValueConverter) - converterDescriptor.createJpaAttributeConverter( converterCreationContext ); + valueConverter = + (BasicValueConverter) + converterDescriptor.createJpaAttributeConverter( converterCreationContext ); domainJtd = valueConverter.getDomainJavaType(); } else { @@ -997,7 +1002,10 @@ public int resolveJdbcTypeCode(int jdbcTypeCode) { return aggregateColumn == null ? jdbcTypeCode : getDialect().getAggregateSupport() - .aggregateComponentSqlTypeCode( aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode(), jdbcTypeCode ); + .aggregateComponentSqlTypeCode( + aggregateColumn.getType().getJdbcType().getDefaultSqlTypeCode(), + jdbcTypeCode + ); } @Override @@ -1143,7 +1151,7 @@ public boolean isDisallowedWrapperArray() { } private boolean isWrapperByteOrCharacterArray() { - final Class javaTypeClass = getResolution().getDomainJavaType().getJavaTypeClass(); + final var javaTypeClass = getResolution().getDomainJavaType().getJavaTypeClass(); return javaTypeClass == Byte[].class || javaTypeClass == Character[].class; } diff --git a/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java b/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java index 20ed381de259..ba7571613fc9 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/entity/AbstractEntityPersister.java @@ -201,8 +201,6 @@ import org.hibernate.sql.ast.tree.select.SelectStatement; import org.hibernate.sql.exec.spi.JdbcOperation; import org.hibernate.sql.exec.spi.JdbcParametersList; -import org.hibernate.sql.model.ast.ColumnValueBinding; -import org.hibernate.sql.model.ast.MutatingTableReference; import org.hibernate.sql.model.ast.builder.MutationGroupBuilder; import org.hibernate.sql.model.ast.builder.TableInsertBuilder; import org.hibernate.sql.results.graph.DomainResult; @@ -283,6 +281,7 @@ import static org.hibernate.internal.util.collections.CollectionHelper.toSmallList; import static org.hibernate.loader.ast.internal.MultiKeyLoadHelper.supportsSqlArrayType; import static org.hibernate.metamodel.RepresentationMode.POJO; +import static org.hibernate.metamodel.mapping.EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME; import static org.hibernate.metamodel.mapping.internal.GeneratedValuesProcessor.getGeneratedAttributes; import static org.hibernate.metamodel.mapping.internal.MappingModelCreationHelper.buildBasicAttributeMapping; import static org.hibernate.metamodel.mapping.internal.MappingModelCreationHelper.buildEncapsulatedCompositeIdentifierMapping; @@ -1457,8 +1456,7 @@ private Object initializedLazyField( } if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.tracev( - "Initializing lazy properties of: {0}, field access: {1}", + CORE_LOGGER.initializingLazyPropertiesOf( infoString( this, id, getFactory() ), fieldName ); @@ -1601,7 +1599,7 @@ private Object initLazyProperties( SharedSessionContractImplementor session) { assert hasLazyProperties(); - CORE_LOGGER.tracef( "Initializing lazy properties from datastore (triggered for '%s')", fieldName ); + CORE_LOGGER.initializingLazyPropertiesFromDatastore( fieldName ); final var interceptor = asPersistentAttributeInterceptable( entity ).$$_hibernate_getInterceptor(); assert interceptor != null : "Expecting bytecode interceptor to be non-null"; @@ -1639,7 +1637,7 @@ private Object initLazyProperties( } } } - CORE_LOGGER.trace( "Done initializing lazy properties" ); + CORE_LOGGER.doneInitializingLazyProperties(); return finalResult; } catch (JDBCException ex) { @@ -1700,7 +1698,7 @@ protected Object initializeLazyPropertiesFromCache( final SharedSessionContractImplementor session, final EntityEntry entry, final CacheEntry cacheEntry) { - CORE_LOGGER.trace( "Initializing lazy properties from second-level cache" ); + CORE_LOGGER.initializingLazyPropertiesFromSecondLevelCache(); Object result = null; final var disassembledValues = cacheEntry.getDisassembledState(); for ( int j = 0; j < lazyPropertyNames.length; j++ ) { @@ -1718,7 +1716,7 @@ protected Object initializeLazyPropertiesFromCache( } } } - CORE_LOGGER.trace( "Done initializing lazy properties" ); + CORE_LOGGER.doneInitializingLazyProperties(); return result; } @@ -1769,7 +1767,7 @@ protected void initializeLazyProperty(Object entity, EntityEntry entry, Object p setPropertyValue( entity, index, propValue ); final var loadedState = entry.getLoadedState(); if ( loadedState != null ) { - // object have been loaded with setReadOnly(true); HHH-2236 + // object has been loaded with setReadOnly(true); HHH-2236 loadedState[index] = type.deepCopy( propValue, factory ); } // If the entity has deleted state, then update that as well @@ -2034,8 +2032,7 @@ public Object[] getDatabaseSnapshot(Object id, SharedSessionContractImplementor @Override public Object getIdByUniqueKey(Object key, String uniquePropertyName, SharedSessionContractImplementor session) { if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.tracef( "resolving unique key [%s] to identifier for entity [%s]", - key, getEntityName() ); + CORE_LOGGER.resolvingUniqueKeyToIdentifier( key, getEntityName() ); } return getUniqueKeyLoader( uniquePropertyName, session ).resolveId( key, session ); @@ -2094,8 +2091,8 @@ private Object calculateNextVersion(Object id, Object currentVersion, SharedSess .generate( session, null, currentVersion, FORCE_INCREMENT ); if ( CORE_LOGGER.isTraceEnabled() ) { final var versionType = getVersionType(); - CORE_LOGGER.trace( - "Forcing version increment [" + infoString( this, id, factory ) + "; " + CORE_LOGGER.forcingVersionIncrement( + "[" + infoString( this, id, factory ) + "; " + versionType.toLoggableString( currentVersion, factory ) + " -> " + versionType.toLoggableString( nextVersion, factory ) + "]" ); @@ -2137,7 +2134,7 @@ else if ( isVersionGeneratedOnExecution() ) { public Object getCurrentVersion(Object id, SharedSessionContractImplementor session) throws HibernateException { if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.tracev( "Getting version: {0}", infoString( this, id, getFactory() ) ); + CORE_LOGGER.readingEntityVersion( infoString( this, id, getFactory() ) ); } final String versionSelectString = getVersionSelectString(); try { @@ -2285,7 +2282,7 @@ private DiscriminatorType buildDiscriminatorType() { discriminatorBasicType, new UnifiedAnyDiscriminatorConverter<>( getNavigableRole() - .append( EntityDiscriminatorMapping.DISCRIMINATOR_ROLE_NAME ), + .append( DISCRIMINATOR_ROLE_NAME ), factory.getTypeConfiguration().getJavaTypeRegistry() .resolveDescriptor( discriminatedType() ), discriminatorBasicType.getRelationalJavaType(), @@ -3469,9 +3466,9 @@ public void addDiscriminatorToInsertGroup(MutationGroupBuilder insertGroupBuilde public void addSoftDeleteToInsertGroup(MutationGroupBuilder insertGroupBuilder) { if ( softDeleteMapping != null ) { final TableInsertBuilder insertBuilder = insertGroupBuilder.getTableDetailsBuilder( getIdentifierTableName() ); - final MutatingTableReference mutatingTable = insertBuilder.getMutatingTable(); - final ColumnReference columnReference = new ColumnReference( mutatingTable, softDeleteMapping ); - final ColumnValueBinding nonDeletedValueBinding = softDeleteMapping.createNonDeletedValueBinding( columnReference ); + final var mutatingTable = insertBuilder.getMutatingTable(); + final var columnReference = new ColumnReference( mutatingTable, softDeleteMapping ); + final var nonDeletedValueBinding = softDeleteMapping.createNonDeletedValueBinding( columnReference ); insertBuilder.addValueColumn( nonDeletedValueBinding ); } } @@ -3513,10 +3510,10 @@ public Object load(Object id, Object optionalObject, LockOptions lockOptions, Sh private Object doLoad(Object id, Object optionalObject, LockOptions lockOptions, Boolean readOnly, SharedSessionContractImplementor session) throws HibernateException { if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.tracev( "Fetching entity: {0}", infoString( this, id, getFactory() ) ); + CORE_LOGGER.fetchingEntity( infoString( this, id, getFactory() ) ); } - final SingleIdEntityLoader loader = determineLoaderToUse( session, lockOptions ); + final var loader = determineLoaderToUse( session, lockOptions ); return optionalObject == null ? loader.load( id, lockOptions, readOnly, session ) : loader.load( id, optionalObject, lockOptions, readOnly, session ); @@ -3562,47 +3559,57 @@ public Object initializeEnhancedEntityUsedAsProxy( Object entity, String nameOfAttributeBeingAccessed, SharedSessionContractImplementor session) { - final var enhancementMetadata = getBytecodeEnhancementMetadata(); - if ( enhancementMetadata.extractLazyInterceptor( entity ) + if ( getBytecodeEnhancementMetadata().extractLazyInterceptor( entity ) instanceof EnhancementAsProxyLazinessInterceptor proxyInterceptor ) { - final var entityKey = proxyInterceptor.getEntityKey(); - final Object identifier = entityKey.getIdentifier(); - - Object loaded = null; - if ( canReadFromCache && session.isEventSource() ) { - final var eventSource = (EventSource) session; - loaded = eventSource.loadFromSecondLevelCache( this, entityKey, entity, LockMode.NONE ); - } - if ( loaded == null ) { - final var lockOptions = new LockOptions(); - loaded = determineLoaderToUse( session, lockOptions ).load( identifier, entity, lockOptions, session ); - } - + final Object id = entityKey.getIdentifier(); + final Object loaded = loadEnhancedEntityUsedAsProxy( entity, session, entityKey ); if ( loaded == null ) { final var persistenceContext = session.getPersistenceContext(); persistenceContext.removeEntry( entity ); persistenceContext.removeEntity( entityKey ); - factory.getEntityNotFoundDelegate().handleEntityNotFound( entityKey.getEntityName(), identifier ); + factory.getEntityNotFoundDelegate().handleEntityNotFound( entityKey.getEntityName(), id ); } + return readEnhancedEntityAttribute( entity, id, nameOfAttributeBeingAccessed, session ); + } + else { + throw new AssertionFailure( "The BytecodeLazyAttributeInterceptor was not an instance of EnhancementAsProxyLazinessInterceptor" ); + } + } - final var interceptor = enhancementMetadata.injectInterceptor( entity, identifier, session ); - - final Object value; - if ( nameOfAttributeBeingAccessed == null ) { - return null; - } - else if ( interceptor.isAttributeLoaded( nameOfAttributeBeingAccessed ) ) { - value = getPropertyValue( entity, nameOfAttributeBeingAccessed ); - } - else { - value = initializeLazyProperty( nameOfAttributeBeingAccessed, entity, session ); + private Object loadEnhancedEntityUsedAsProxy( + Object entity, + SharedSessionContractImplementor session, + EntityKey entityKey) { + if ( canReadFromCache && session.isEventSource() ) { + final Object cachedEntity = + session.loadFromSecondLevelCache( this, entityKey, entity, LockMode.NONE ); + if ( cachedEntity != null ) { + return cachedEntity; } - - return interceptor.readObject( entity, nameOfAttributeBeingAccessed, value ); } + final var lockOptions = new LockOptions(); + return determineLoaderToUse( session, lockOptions ) + .load( entityKey.getIdentifier(), entity, lockOptions, session ); + } - throw new IllegalStateException(); + private Object readEnhancedEntityAttribute( + Object entity, Object id, String nameOfAttributeBeingAccessed, + SharedSessionContractImplementor session) { + final var interceptor = + getBytecodeEnhancementMetadata() + .injectInterceptor( entity, id, session ); + final Object value; + if ( nameOfAttributeBeingAccessed == null ) { + return null; + } + else if ( interceptor.isAttributeLoaded( nameOfAttributeBeingAccessed ) ) { + value = getPropertyValue( entity, nameOfAttributeBeingAccessed ); + } + else { + value = initializeLazyProperty( nameOfAttributeBeingAccessed, entity, session ); + } + return interceptor.readObject( entity, nameOfAttributeBeingAccessed, value ); } @Override @@ -3731,7 +3738,7 @@ private void logDirtyProperties(int[] props) { if ( CORE_LOGGER.isTraceEnabled() ) { for ( int prop : props ) { final String propertyName = getAttributeMapping( prop ).getAttributeName(); - CORE_LOGGER.trace( qualify( getEntityName(), propertyName ) + " is dirty" ); + CORE_LOGGER.propertyIsDirty( qualify( getEntityName(), propertyName ) ); } } } @@ -4431,11 +4438,7 @@ protected void verifyHasNaturalId() { public Object getNaturalIdentifierSnapshot(Object id, SharedSessionContractImplementor session) { verifyHasNaturalId(); if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.tracef( - "Getting current natural-id snapshot state for `%s#%s", - getEntityName(), - id - ); + CORE_LOGGER.gettingCurrentNaturalIdSnapshot( getEntityName(), id ); } return getNaturalIdLoader().resolveIdToNaturalId( id, session ); } diff --git a/hibernate-core/src/main/java/org/hibernate/persister/entity/ExplicitSqlStringGenerationContext.java b/hibernate-core/src/main/java/org/hibernate/persister/entity/ExplicitSqlStringGenerationContext.java index dfb54b2f4d08..89264e2a4e38 100644 --- a/hibernate-core/src/main/java/org/hibernate/persister/entity/ExplicitSqlStringGenerationContext.java +++ b/hibernate-core/src/main/java/org/hibernate/persister/entity/ExplicitSqlStringGenerationContext.java @@ -84,16 +84,21 @@ public String format(QualifiedName qualifiedName) { @Override public String formatWithoutCatalog(QualifiedSequenceName qualifiedName) { - QualifiedSequenceName nameToFormat; + return nameFormater().format( nameToFormat( qualifiedName ), getDialect() ); + } + + private QualifiedSequenceName nameToFormat(QualifiedSequenceName qualifiedName) { if ( qualifiedName.getCatalogName() != null || qualifiedName.getSchemaName() == null && defaultSchema != null ) { - nameToFormat = new QualifiedSequenceName( null, - schemaWithDefault( qualifiedName.getSchemaName() ), qualifiedName.getSequenceName() ); + return new QualifiedSequenceName( + null, + schemaWithDefault( qualifiedName.getSchemaName() ), + qualifiedName.getSequenceName() + ); } else { - nameToFormat = qualifiedName; + return qualifiedName; } - return nameFormater().format( nameToFormat, getDialect() ); } @Override diff --git a/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java b/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java index 61d6d918b956..d0241e34b98a 100644 --- a/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java +++ b/hibernate-core/src/main/java/org/hibernate/proxy/AbstractLazyInitializer.java @@ -236,13 +236,13 @@ protected void permissiveInitialization() { session.close(); } catch (Exception e) { - CORE_LOGGER.warn( "Unable to close temporary session used to load lazy proxy associated to no session" ); + CORE_LOGGER.unableToCLoseTempSession(); } } } catch (Exception e) { - CORE_LOGGER.error( "Initialization failure [" + entityName + "#" + id + "]", e ); - throw new LazyInitializationException( e.getMessage() ); + throw new LazyInitializationException( "Could not initialize proxy [" + + entityName + "#" + id + "]: " + e.getMessage() ); } } else if ( session.isOpenOrWaitingForAutoClose() && session.isConnected() ) { diff --git a/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultSetsSet.java b/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultSetsSet.java index 177133bbb5de..6a27ffce50c5 100644 --- a/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultSetsSet.java +++ b/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultSetsSet.java @@ -6,8 +6,6 @@ import java.sql.ResultSet; import java.util.HashMap; -import java.util.Iterator; -import java.util.Map; import java.util.function.Consumer; /** @@ -75,9 +73,9 @@ else if ( more != null ) { //Any entry will do, so we take the first one if there's any. private void scaleDown() { if ( more != null && !more.isEmpty() ) { - Iterator> iterator = more.entrySet().iterator(); - Map.Entry entry = iterator.next(); - final ResultSet resultSet = entry.getKey(); + var iterator = more.entrySet().iterator(); + var entry = iterator.next(); + final var resultSet = entry.getKey(); iterator.remove(); first = resultSet; } diff --git a/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultsetsTrackingContainer.java b/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultsetsTrackingContainer.java index 6816564cf0d3..880124487568 100644 --- a/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultsetsTrackingContainer.java +++ b/hibernate-core/src/main/java/org/hibernate/resource/jdbc/internal/ResultsetsTrackingContainer.java @@ -174,8 +174,10 @@ else if ( xref != null ) { } private boolean warnOnNotNull(ResultSetsSet existingEntry) { - // Keep this at DEBUG level, rather than warn. Numerous connection pool implementations can return a - // proxy/wrapper around the JDBC Statement, causing excessive logging here. See HHH-8210. + // Keep this at DEBUG level, rather than WARN. + // Connection pool implementations often return a + // proxy/wrapper around the JDBC Statement, + // causing excessive logging here. See HHH-8210. if ( existingEntry == null ) { CORE_LOGGER.trace( "ResultSet statement was not registered (on register)" ); } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/AbstractInformationExtractorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/AbstractInformationExtractorImpl.java index 59c59a1f33ec..851757b6ee73 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/AbstractInformationExtractorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/AbstractInformationExtractorImpl.java @@ -4,6 +4,7 @@ */ package org.hibernate.tool.schema.extract.internal; +import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.SQLException; @@ -18,7 +19,6 @@ import org.hibernate.JDBCException; import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.relational.QualifiedTableName; -import org.hibernate.cfg.AvailableSettings; import org.hibernate.dialect.DB2Dialect; import org.hibernate.dialect.Dialect; import org.hibernate.engine.config.spi.ConfigurationService; @@ -26,6 +26,7 @@ import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.engine.jdbc.env.spi.JdbcEnvironment; import org.hibernate.engine.jdbc.env.spi.NameQualifierSupport; +import org.hibernate.tool.schema.extract.internal.ForeignKeyInformationImpl.ColumnReferenceMappingImpl; import org.hibernate.tool.schema.extract.spi.ColumnInformation; import org.hibernate.tool.schema.extract.spi.ExtractionContext; import org.hibernate.tool.schema.extract.spi.ForeignKeyInformation; @@ -39,6 +40,8 @@ import static java.util.Collections.addAll; import static org.hibernate.boot.model.naming.DatabaseIdentifier.toIdentifier; +import static org.hibernate.cfg.SchemaToolingSettings.ENABLE_SYNONYMS; +import static org.hibernate.cfg.SchemaToolingSettings.EXTRA_PHYSICAL_TABLE_TYPES; import static org.hibernate.engine.jdbc.spi.SQLExceptionLogging.ERROR_LOG; import static org.hibernate.internal.CoreMessageLogger.CORE_LOGGER; import static org.hibernate.internal.util.StringHelper.EMPTY_STRINGS; @@ -65,34 +68,33 @@ public abstract class AbstractInformationExtractorImpl implements InformationExt public AbstractInformationExtractorImpl(ExtractionContext extractionContext) { this.extractionContext = extractionContext; - - final ConfigurationService configService = - extractionContext.getServiceRegistry().requireService( ConfigurationService.class ); - + final var configService = + extractionContext.getServiceRegistry() + .requireService( ConfigurationService.class ); useJdbcMetadataDefaultsSetting = configService.getSetting( "hibernate.temp.use_jdbc_metadata_defaults", StandardConverters.BOOLEAN, Boolean.TRUE ); - final String extraPhysicalTableTypesConfig = configService.getSetting( - AvailableSettings.EXTRA_PHYSICAL_TABLE_TYPES, + EXTRA_PHYSICAL_TABLE_TYPES, StandardConverters.STRING, configService.getSetting( - AvailableSettings.EXTRA_PHYSICAL_TABLE_TYPES, + EXTRA_PHYSICAL_TABLE_TYPES, StandardConverters.STRING, "" ) ); - final Dialect dialect = extractionContext.getJdbcEnvironment().getDialect(); - this.extraPhysicalTableTypes = getPhysicalTableTypes( extraPhysicalTableTypesConfig, dialect ); - this.tableTypes = getTableTypes( configService, dialect ); + final var dialect = extractionContext.getJdbcEnvironment().getDialect(); + extraPhysicalTableTypes = getPhysicalTableTypes( extraPhysicalTableTypesConfig, dialect ); + tableTypes = getTableTypes( configService, dialect ); } private String[] getPhysicalTableTypes(String extraPhysicalTableTypesConfig, Dialect dialect) { final List physicalTableTypesList = new ArrayList<>(); if ( !isBlank( extraPhysicalTableTypesConfig ) ) { - addAll( physicalTableTypesList, splitTrimmingTokens( ",;", extraPhysicalTableTypesConfig, false ) ); + addAll( physicalTableTypesList, + splitTrimmingTokens( ",;", extraPhysicalTableTypesConfig, false ) ); } dialect.augmentPhysicalTableTypes( physicalTableTypesList ); return physicalTableTypesList.toArray( EMPTY_STRINGS ); @@ -102,7 +104,7 @@ private String[] getTableTypes(ConfigurationService configService, Dialect diale final List tableTypesList = new ArrayList<>(); tableTypesList.add( "TABLE" ); tableTypesList.add( "VIEW" ); - if ( getBoolean( AvailableSettings.ENABLE_SYNONYMS, configService.getSettings() ) ) { + if ( getBoolean( ENABLE_SYNONYMS, configService.getSettings() ) ) { if ( dialect instanceof DB2Dialect ) { //TODO: should not use Dialect types directly! tableTypesList.add( "ALIAS" ); } @@ -282,26 +284,22 @@ protected abstract T processSchemaResultSet( @Override public boolean schemaExists(Identifier catalog, Identifier schema) { + final var helper = getIdentifierHelper(); final String catalogFilter = - getIdentifierHelper() - .toMetaDataCatalogName( catalog == null ? extractionContext.getDefaultCatalog() : catalog ); + helper.toMetaDataCatalogName( catalog == null ? extractionContext.getDefaultCatalog() : catalog ); final String schemaFilter = - getIdentifierHelper() - .toMetaDataSchemaName( schema == null ? extractionContext.getDefaultSchema() : schema ); + helper.toMetaDataSchemaName( schema == null ? extractionContext.getDefaultSchema() : schema ); try { return processSchemaResultSet( catalogFilter, schemaFilter, resultSet -> { - if ( !resultSet.next() ) { return false; } - - if ( resultSet.next() ) { + else if ( resultSet.next() ) { final String catalogName = catalog == null ? "" : catalog.getCanonicalName(); final String schemaName = schema == null ? "" : schema.getCanonicalName(); - CORE_LOGGER.debugf( "Multiple schemas found with that name [%s.%s]", catalogName, @@ -327,12 +325,15 @@ private TableInformation extractTableInformation(ResultSet resultSet) throws SQL ); } + private Connection getConnection() { + return extractionContext.getJdbcConnection(); + } + @Override public TableInformation getTable(Identifier catalog, Identifier schema, Identifier tableName) { if ( catalog != null || schema != null ) { // The table defined an explicit namespace. In such cases we only ever want to look // in the identified namespace - return locateTableInNamespace( catalog, schema, tableName ); } else { @@ -355,10 +356,8 @@ public TableInformation getTable(Identifier catalog, Identifier schema, Identifi // 2) look in default namespace final Identifier defaultCatalog = extractionContext.getDefaultCatalog(); final Identifier defaultSchema = extractionContext.getDefaultSchema(); - if ( defaultCatalog != null - || defaultSchema != null ) { - final TableInformation tableInfo = - locateTableInNamespace( defaultCatalog, defaultSchema, tableName ); + if ( defaultCatalog != null || defaultSchema != null ) { + final var tableInfo = locateTableInNamespace( defaultCatalog, defaultSchema, tableName ); if ( tableInfo != null ) { return tableInfo; } @@ -384,49 +383,55 @@ private Identifier getCurrentSchema() { if ( getNameQualifierSupport() == NameQualifierSupport.CATALOG ) { return null; } - if ( currentSchema != null ) { + else if ( currentSchema != null ) { return currentSchema; } - final Identifier schema = getJdbcEnvironment().getCurrentSchema(); - if ( schema != null ) { - currentSchema = schema; - } - if ( !useJdbcMetadataDefaultsSetting ) { - try { - currentSchema = getIdentifierHelper() - .toIdentifier( extractionContext.getJdbcConnection().getSchema() ); - } - catch (SQLException sqle) { - ERROR_LOG.logErrorCodes( sqle.getErrorCode(), sqle.getSQLState() ); + else { + final Identifier schema = getJdbcEnvironment().getCurrentSchema(); + if ( schema != null ) { + currentSchema = schema; } - catch (AbstractMethodError ignore) { - // jConnect and jTDS report that they "support" schemas, but they don't really + if ( !useJdbcMetadataDefaultsSetting ) { + try { + currentSchema = + getIdentifierHelper() + .toIdentifier( getConnection().getSchema() ); + } + catch (SQLException sqle) { + ERROR_LOG.logErrorCodes( sqle.getErrorCode(), sqle.getSQLState() ); + } + catch (AbstractMethodError ignore) { + // jConnect and jTDS report that they "support" schemas, but they don't really + } } + return currentSchema; } - return currentSchema; } private Identifier getCurrentCatalog() { if ( getNameQualifierSupport() == NameQualifierSupport.SCHEMA ) { return null; } - if ( currentCatalog != null ) { + else if ( currentCatalog != null ) { return currentCatalog; } - final Identifier catalog = getJdbcEnvironment().getCurrentCatalog(); - if ( catalog != null ) { - currentCatalog = catalog; - } - if ( !useJdbcMetadataDefaultsSetting ) { - try { - currentCatalog = getIdentifierHelper() - .toIdentifier( extractionContext.getJdbcConnection().getCatalog() ); + else { + final Identifier catalog = getJdbcEnvironment().getCurrentCatalog(); + if ( catalog != null ) { + currentCatalog = catalog; } - catch (SQLException sqle) { - ERROR_LOG.logErrorCodes( sqle.getErrorCode(), sqle.getSQLState() ); + if ( !useJdbcMetadataDefaultsSetting ) { + try { + currentCatalog = + getIdentifierHelper() + .toIdentifier( getConnection().getCatalog() ); + } + catch (SQLException sqle) { + ERROR_LOG.logErrorCodes( sqle.getErrorCode(), sqle.getSQLState() ); + } } + return currentCatalog; } - return currentCatalog; } private String getCurrentCatalogFilter(JdbcEnvironment jdbcEnvironment) { @@ -439,7 +444,7 @@ private String getCurrentCatalogFilter(JdbcEnvironment jdbcEnvironment) { } if ( !useJdbcMetadataDefaultsSetting ) { try { - currentCatalogFilter = extractionContext.getJdbcConnection().getCatalog(); + currentCatalogFilter = getConnection().getCatalog(); } catch (SQLException sqle) { ERROR_LOG.logErrorCodes( sqle.getErrorCode(), sqle.getSQLState() ); @@ -456,10 +461,9 @@ private String getCurrentSchemaFilter(JdbcEnvironment jdbcEnvironment) { if ( currentSchema != null ) { currentSchemaFilter = toMetaDataObjectName( currentSchema ); } - if ( !useJdbcMetadataDefaultsSetting ) { try { - currentSchemaFilter = extractionContext.getJdbcConnection().getSchema(); + currentSchemaFilter = getConnection().getSchema(); } catch (SQLException sqle) { ERROR_LOG.logErrorCodes( sqle.getErrorCode(), sqle.getSQLState() ); @@ -473,77 +477,68 @@ private String getCurrentSchemaFilter(JdbcEnvironment jdbcEnvironment) { @Override public NameSpaceTablesInformation getTables(Identifier catalog, Identifier schema) { + final String catalogFilter = getCatalogFilter( catalog ); + final String schemaFilter = getSchemaFilter( schema ); + try { + return processTableResultSet( + catalogFilter, + schemaFilter, + "%", + tableTypes, + resultSet -> { + final var tablesInformation = extractNameSpaceTablesInformation( resultSet ); + populateTablesWithColumns( catalogFilter, schemaFilter, tablesInformation ); + return tablesInformation; + } ); + } + catch (SQLException sqlException) { + throw convertSQLException( sqlException, "Error accessing table metadata" ); + } + } - final String catalogFilter; - final String schemaFilter; - - final NameQualifierSupport nameQualifierSupport = getNameQualifierSupport(); - - if ( nameQualifierSupport.supportsCatalogs() ) { + private String getCatalogFilter(Identifier catalog) { + if ( supportsCatalogs() ) { if ( catalog == null ) { // look in the current namespace final String currentCatalogFilter = getCurrentCatalogFilter( getJdbcEnvironment() ); if ( currentCatalogFilter != null ) { - catalogFilter = currentCatalogFilter; + return currentCatalogFilter; } else { - if ( extractionContext.getDefaultCatalog() != null ) { - // 2) look in default namespace - catalogFilter = toMetaDataObjectName( extractionContext.getDefaultCatalog() ); - } - else { - catalogFilter = null; - } + // 2) look in default namespace + final Identifier defaultCatalog = extractionContext.getDefaultCatalog(); + return defaultCatalog != null ? toMetaDataObjectName( defaultCatalog ) : null; } } else { - catalogFilter = toMetaDataObjectName( catalog ); + return toMetaDataObjectName( catalog ); } } else { - catalogFilter = null; + return null; } + } - if ( nameQualifierSupport.supportsSchemas() ) { + private String getSchemaFilter(Identifier schema) { + if ( supportsSchemas() ) { if ( schema == null ) { // 1) look in current namespace final String currentSchemaFilter = getCurrentSchemaFilter( getJdbcEnvironment() ); if ( currentSchemaFilter != null ) { - schemaFilter = currentSchemaFilter; + return currentSchemaFilter; } else { - if ( extractionContext.getDefaultSchema() != null ) { - // 2) look in default namespace - schemaFilter = toMetaDataObjectName( extractionContext.getDefaultSchema() ); - } - else { - schemaFilter = null; - } + // 2) look in default namespace + final Identifier defaultSchema = extractionContext.getDefaultSchema(); + return defaultSchema != null ? toMetaDataObjectName( defaultSchema ) : null; } } else { - schemaFilter = toMetaDataObjectName( schema ); + return toMetaDataObjectName( schema ); } } else { - schemaFilter = null; - } - - try { - return processTableResultSet( - catalogFilter, - schemaFilter, - "%", - tableTypes, - resultSet -> { - final NameSpaceTablesInformation tablesInformation = - extractNameSpaceTablesInformation( resultSet ); - populateTablesWithColumns( catalogFilter, schemaFilter, tablesInformation ); - return tablesInformation; - } ); - } - catch (SQLException sqlException) { - throw convertSQLException( sqlException, "Error accessing table metadata" ); + return null; } } @@ -655,7 +650,7 @@ protected ColumnInformationImpl columnInformation(TableInformation tableInformat private NameSpaceTablesInformation extractNameSpaceTablesInformation(ResultSet resultSet) throws SQLException { - final NameSpaceTablesInformation tables = new NameSpaceTablesInformation( getIdentifierHelper() ); + final var tables = new NameSpaceTablesInformation( getIdentifierHelper() ); while ( resultSet.next() ) { tables.addTableInformation( extractTableInformation( resultSet ) ); } @@ -718,65 +713,51 @@ private TableInformation locateTableInNamespace( Identifier catalog, Identifier schema, Identifier tableName) { - final Identifier catalogToUse; - final Identifier schemaToUse; - - final String catalogFilter; - final String schemaFilter; + final String catalogFilter = catalogFilter( catalog ); + final String schemaFilter = schemaFilter( schema ); + final Identifier catalogToUse = supportsCatalogs() ? catalog : null; + final Identifier schemaToUse = supportsSchemas() ? schema : null; + final String tableNameFilter = toMetaDataObjectName( tableName ); + try { + return processTableResultSet( + catalogFilter, + schemaFilter, + tableNameFilter, + tableTypes, + resultSet -> extractTableInformation( catalogToUse, schemaToUse, tableName, resultSet ) + ); - final NameQualifierSupport nameQualifierSupport = getNameQualifierSupport(); + } + catch (SQLException sqlException) { + throw convertSQLException( sqlException, "Error accessing table metadata" ); + } + } - if ( nameQualifierSupport.supportsCatalogs() ) { + private String catalogFilter(Identifier catalog) { + if ( supportsCatalogs() ) { if ( catalog == null ) { - String defaultCatalog; try { - defaultCatalog = extractionContext.getJdbcConnection().getCatalog(); + return getConnection().getCatalog(); } catch (SQLException ignore) { - defaultCatalog = ""; + return ""; } - catalogToUse = null; - catalogFilter = defaultCatalog; } else { - catalogToUse = catalog; - catalogFilter = toMetaDataObjectName( catalog ); + return toMetaDataObjectName( catalog ); } } else { - catalogToUse = null; - catalogFilter = null; + return null; } + } - if ( nameQualifierSupport.supportsSchemas() ) { - if ( schema == null ) { - schemaToUse = null; - schemaFilter = ""; - } - else { - schemaToUse = schema; - schemaFilter = toMetaDataObjectName( schema ); - } + private String schemaFilter(Identifier schema) { + if ( supportsSchemas() ) { + return schema == null ? "" : toMetaDataObjectName( schema ); } else { - schemaToUse = null; - schemaFilter = null; - } - - final String tableNameFilter = toMetaDataObjectName( tableName ); - - try { - return processTableResultSet( - catalogFilter, - schemaFilter, - tableNameFilter, - tableTypes, - resultSet -> extractTableInformation( catalogToUse, schemaToUse, tableName, resultSet ) - ); - - } - catch (SQLException sqlException) { - throw convertSQLException( sqlException, "Error accessing table metadata" ); + return null; } } @@ -784,6 +765,14 @@ private NameQualifierSupport getNameQualifierSupport() { return getJdbcEnvironment().getNameQualifierSupport(); } + private boolean supportsCatalogs() { + return getNameQualifierSupport().supportsCatalogs(); + } + + private boolean supportsSchemas() { + return getNameQualifierSupport().supportsSchemas(); + } + private TableInformation extractTableInformation( Identifier catalog, Identifier schema, @@ -826,24 +815,29 @@ private TableInformation extractTableInformation( protected abstract String getResultSetTableTypesPhysicalTableConstant(); protected boolean isPhysicalTableType(String tableType) { + final boolean isTableType = + getResultSetTableTypesPhysicalTableConstant() + .equalsIgnoreCase( tableType ); if ( extraPhysicalTableTypes == null ) { - return getResultSetTableTypesPhysicalTableConstant().equalsIgnoreCase( tableType ); + return isTableType; } else { - if ( getResultSetTableTypesPhysicalTableConstant().equalsIgnoreCase( tableType ) ) { + if ( isTableType ) { return true; } - for ( String extraPhysicalTableType : extraPhysicalTableTypes ) { - if ( extraPhysicalTableType.equalsIgnoreCase( tableType ) ) { - return true; + else { + for ( String extraPhysicalTableType : extraPhysicalTableTypes ) { + if ( extraPhysicalTableType.equalsIgnoreCase( tableType ) ) { + return true; + } } + return false; } - return false; } } protected void addColumns(TableInformation tableInformation) { - final QualifiedTableName tableName = tableInformation.getName(); + final var tableName = tableInformation.getName(); final Identifier catalog = tableName.getCatalogName(); final Identifier schema = tableName.getSchemaName(); try { @@ -891,7 +885,7 @@ protected abstract T processPrimaryKeysResultSet( @Override public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation) { - final QualifiedTableName tableName = tableInformation.getName(); + final var tableName = tableInformation.getName(); final Identifier catalog = tableName.getCatalogName(); final Identifier schema = tableName.getSchemaName(); try { @@ -912,20 +906,20 @@ public PrimaryKeyInformation getPrimaryKey(TableInformationImpl tableInformation private PrimaryKeyInformation extractPrimaryKeyInformation(TableInformation tableInformation, ResultSet resultSet) throws SQLException { - final List pkColumns = new ArrayList<>(); + final List columns = new ArrayList<>(); boolean firstPass = true; - Identifier pkIdentifier = null; + Identifier primaryKeyIdentifier = null; while ( resultSet.next() ) { final String currentPkName = resultSet.getString( getResultSetPrimaryKeyNameLabel() ); - final Identifier currentPkIdentifier = + final Identifier currentPrimaryKeyIdentifier = currentPkName == null ? null : toIdentifier( currentPkName ); if ( firstPass ) { - pkIdentifier = currentPkIdentifier; + primaryKeyIdentifier = currentPrimaryKeyIdentifier; firstPass = false; } else { - if ( !Objects.equals( pkIdentifier, currentPkIdentifier ) ) { + if ( !Objects.equals( primaryKeyIdentifier, currentPrimaryKeyIdentifier ) ) { throw new SchemaExtractionException( "Encountered primary keys differing name on table " + tableInformation.getName().toString() ); } @@ -934,12 +928,12 @@ private PrimaryKeyInformation extractPrimaryKeyInformation(TableInformation tabl final int columnPosition = resultSet.getInt( getResultSetColumnPositionColumn() ); final int index = columnPosition - 1; // Fill up the array list with nulls up to the desired index, because some JDBC drivers don't return results ordered by column position - while ( pkColumns.size() <= index ) { - pkColumns.add( null ); + while ( columns.size() <= index ) { + columns.add( null ); } final Identifier columnIdentifier = toIdentifier( resultSet.getString( getResultSetColumnNameLabel() ) ); - pkColumns.set( index, tableInformation.getColumn( columnIdentifier ) ); + columns.set( index, tableInformation.getColumn( columnIdentifier ) ); } if ( firstPass ) { // we did not find any results (no pk) @@ -947,13 +941,13 @@ private PrimaryKeyInformation extractPrimaryKeyInformation(TableInformation tabl } else { // validate column list is properly contiguous - for ( int i = 0; i < pkColumns.size(); i++ ) { - if ( pkColumns.get( i ) == null ) { + for ( int i = 0; i < columns.size(); i++ ) { + if ( columns.get( i ) == null ) { throw new SchemaExtractionException( "Primary Key information was missing for KEY_SEQ = " + ( i+1) ); } } // build the return - return new PrimaryKeyInformationImpl( pkIdentifier, pkColumns ); + return new PrimaryKeyInformationImpl( primaryKeyIdentifier, columns ); } } @@ -1037,7 +1031,7 @@ protected abstract T processIndexInfoResultSet( @Override public Iterable getIndexes(TableInformation tableInformation) { - final QualifiedTableName tableName = tableInformation.getName(); + final var tableName = tableInformation.getName(); final Identifier catalog = tableName.getCatalogName(); final Identifier schema = tableName.getSchemaName(); @@ -1055,16 +1049,10 @@ public Iterable getIndexes(TableInformation tableInformation) != DatabaseMetaData.tableIndexStatistic ) { final Identifier indexIdentifier = toIdentifier( resultSet.getString( getResultSetIndexNameLabel() ) ); - IndexInformationImpl.Builder builder = builders.get( indexIdentifier ); - if ( builder == null ) { - builder = IndexInformationImpl.builder( indexIdentifier ); - builders.put( indexIdentifier, builder ); - } - + var builder = indexInformationBuilder( builders, indexIdentifier ); final Identifier columnIdentifier = toIdentifier( resultSet.getString( getResultSetColumnNameLabel() ) ); - final ColumnInformation columnInformation = - tableInformation.getColumn( columnIdentifier ); + final var columnInformation = tableInformation.getColumn( columnIdentifier ); if ( columnInformation == null ) { // See HHH-10191: this may happen when dealing with Oracle/PostgreSQL function indexes CORE_LOGGER.logCannotLocateIndexColumnInformation( @@ -1087,14 +1075,25 @@ public Iterable getIndexes(TableInformation tableInformation) + tableInformation.getName() ); } - final List indexes = new ArrayList<>(); - for ( IndexInformationImpl.Builder builder : builders.values() ) { - IndexInformationImpl index = builder.build(); + final List indexes = new ArrayList<>( builders.size() ); + for ( var builder : builders.values() ) { + final var index = builder.build(); indexes.add( index ); } return indexes; } + private static IndexInformationImpl.Builder indexInformationBuilder( + Map builders, + Identifier indexIdentifier) { + var builder = builders.get( indexIdentifier ); + if ( builder == null ) { + builder = IndexInformationImpl.builder( indexIdentifier ); + builders.put( indexIdentifier, builder ); + } + return builder; + } + /** * Must do the following: *
    @@ -1256,22 +1255,20 @@ protected abstract T processCrossReferenceResultSet( @Override public Iterable getForeignKeys(TableInformation tableInformation) { - final QualifiedTableName tableName = tableInformation.getName(); + final var tableName = tableInformation.getName(); final Identifier catalog = tableName.getCatalogName(); final Identifier schema = tableName.getSchemaName(); - final String catalogFilter = catalog == null ? "" : catalog.getText(); final String schemaFilter = schema == null ? "" : schema.getText(); - - final Map fkBuilders = new HashMap<>(); + final Map builders = new HashMap<>(); try { final String table = tableInformation.getName().getTableName().getText(); processImportedKeysResultSet( catalogFilter, schemaFilter, table, resultSet -> { - process( tableInformation, resultSet, fkBuilders ); + process( tableInformation, resultSet, builders ); return null; } ); - final Dialect dialect = getJdbcEnvironment().getDialect(); + final var dialect = getJdbcEnvironment().getDialect(); if ( dialect.useCrossReferenceForeignKeys() ) { processCrossReferenceResultSet( null, @@ -1281,7 +1278,7 @@ public Iterable getForeignKeys(TableInformation tableInfo schemaFilter, table, resultSet -> { - process( tableInformation, resultSet, fkBuilders ); + process( tableInformation, resultSet, builders ); return null; } ); @@ -1293,12 +1290,11 @@ public Iterable getForeignKeys(TableInformation tableInfo + tableInformation.getName() ); } - final List fks = new ArrayList<>(); - for ( ForeignKeyBuilder fkBuilder : fkBuilders.values() ) { - ForeignKeyInformation fk = fkBuilder.build(); - fks.add( fk ); + final List foreignKeys = new ArrayList<>( builders.size() ); + for ( var foreignKeyBuilder : builders.values() ) { + foreignKeys.add( foreignKeyBuilder.build() ); } - return fks; + return foreignKeys; } private void process( @@ -1307,55 +1303,61 @@ private void process( Map fkBuilders) throws SQLException { while ( resultSet.next() ) { - // IMPL NOTE : The builder is mainly used to collect the column reference mappings - final Identifier fkIdentifier = toIdentifier( resultSet.getString( getResultSetForeignKeyLabel() ) ); - ForeignKeyBuilder fkBuilder = fkBuilders.get( fkIdentifier ); - if ( fkBuilder == null ) { - fkBuilder = generateForeignKeyBuilder( fkIdentifier ); - fkBuilders.put( fkIdentifier, fkBuilder ); - } - - final TableInformation pkTableInformation = extractionContext.getDatabaseObjectAccess() - .locateTableInformation( extractPrimaryKeyTableName( resultSet ) ); - if ( pkTableInformation != null ) { + // IMPL NOTE: The builder is mainly used to collect the column reference mappings + final Identifier foreignKeyIdentifier = + toIdentifier( resultSet.getString( getResultSetForeignKeyLabel() ) ); + final var foreignKeyBuilder = getForeignKeyBuilder( fkBuilders, foreignKeyIdentifier ); + final var primaryKeyTableInformation = + extractionContext.getDatabaseObjectAccess() + .locateTableInformation( extractPrimaryKeyTableName( resultSet ) ); + if ( primaryKeyTableInformation != null ) { // the assumption here is that we have not seen this table already based on fully-qualified name // during previous step of building all table metadata so most likely this is // not a match based solely on schema/catalog and that another row in this result set // should match. - final Identifier fkColumnIdentifier = + final Identifier foreignKeyColumnIdentifier = toIdentifier( resultSet.getString( getResultSetForeignKeyColumnNameLabel() ) ); final Identifier pkColumnIdentifier = toIdentifier( resultSet.getString( getResultSetPrimaryKeyColumnNameLabel() ) ); - fkBuilder.addColumnMapping( - tableInformation.getColumn( fkColumnIdentifier ), - pkTableInformation.getColumn( pkColumnIdentifier ) + foreignKeyBuilder.addColumnMapping( + tableInformation.getColumn( foreignKeyColumnIdentifier ), + primaryKeyTableInformation.getColumn( pkColumnIdentifier ) ); } } } + private ForeignKeyBuilder getForeignKeyBuilder( + Map builders, Identifier foreignKeyIdentifier) { + var foreignKeyBuilder = builders.get( foreignKeyIdentifier ); + if ( foreignKeyBuilder == null ) { + foreignKeyBuilder = generateForeignKeyBuilder( foreignKeyIdentifier ); + builders.put( foreignKeyIdentifier, foreignKeyBuilder ); + } + return foreignKeyBuilder; + } + private ForeignKeyBuilder generateForeignKeyBuilder(Identifier fkIdentifier) { return new ForeignKeyBuilderImpl( fkIdentifier ); } protected interface ForeignKeyBuilder { ForeignKeyBuilder addColumnMapping(ColumnInformation referencing, ColumnInformation referenced); - ForeignKeyInformation build(); } protected static class ForeignKeyBuilderImpl implements ForeignKeyBuilder { - private final Identifier fkIdentifier; + private final Identifier foreignKeyIdentifier; private final List columnMappingList = new ArrayList<>(); - public ForeignKeyBuilderImpl(Identifier fkIdentifier) { - this.fkIdentifier = fkIdentifier; + public ForeignKeyBuilderImpl(Identifier foreignKeyIdentifier) { + this.foreignKeyIdentifier = foreignKeyIdentifier; } @Override public ForeignKeyBuilder addColumnMapping(ColumnInformation referencing, ColumnInformation referenced) { - columnMappingList.add( new ForeignKeyInformationImpl.ColumnReferenceMappingImpl( referencing, referenced ) ); + columnMappingList.add( new ColumnReferenceMappingImpl( referencing, referenced ) ); return this; } @@ -1364,10 +1366,10 @@ public ForeignKeyInformationImpl build() { if ( columnMappingList.isEmpty() ) { throw new SchemaManagementException( "Attempt to resolve foreign key metadata from JDBC metadata failed to find " + - "column mappings for foreign key named [" + fkIdentifier.getText() + "]" + "column mappings for foreign key named [" + foreignKeyIdentifier.getText() + "]" ); } - return new ForeignKeyInformationImpl( fkIdentifier, columnMappingList ); + return new ForeignKeyInformationImpl( foreignKeyIdentifier, columnMappingList ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java index 1bbd3c7a9703..1f39f9a72643 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/DatabaseInformationImpl.java @@ -44,33 +44,33 @@ public DatabaseInformationImpl( SchemaManagementTool tool) throws SQLException { this.jdbcEnvironment = jdbcEnvironment; this.context = context; - this.extractionContext = tool.getExtractionTool().createExtractionContext( - serviceRegistry, - jdbcEnvironment, - context, - ddlTransactionIsolator, - this - ); - - this.extractor = tool.getExtractionTool().createInformationExtractor( extractionContext ); - + final var extractionTool = tool.getExtractionTool(); + extractionContext = + extractionTool.createExtractionContext( + serviceRegistry, + jdbcEnvironment, + context, + ddlTransactionIsolator, + this + ); + extractor = extractionTool.createInformationExtractor( extractionContext ); // because we do not have defined a way to locate sequence info by name initializeSequences(); } + private static QualifiedSequenceName unqualifiedSequenceName(QualifiedSequenceName sequenceName) { + return new QualifiedSequenceName( null, null, sequenceName.getSequenceName() ); + } + private void initializeSequences() throws SQLException { - Iterable itr = jdbcEnvironment.getDialect() - .getSequenceInformationExtractor() - .extractMetadata( extractionContext ); - for ( SequenceInformation sequenceInformation : itr ) { + final var sequences = + jdbcEnvironment.getDialect().getSequenceInformationExtractor() + .extractMetadata( extractionContext ); + for ( var sequenceInformation : sequences ) { sequenceInformationMap.put( // for now, follow the legacy behavior of storing just the // unqualified sequence name. - new QualifiedSequenceName( - null, - null, - sequenceInformation.getSequenceName().getSequenceName() - ), + unqualifiedSequenceName( sequenceInformation.getSequenceName() ), sequenceInformation ); } @@ -107,7 +107,6 @@ public TableInformation getTableInformation(QualifiedTableName tableName) { if ( tableName.getObjectName() == null ) { throw new IllegalArgumentException( "Passed table name cannot be null" ); } - return extractor.getTable( context.catalogWithDefault( tableName.getCatalogName() ), context.schemaWithDefault( tableName.getSchemaName() ), @@ -153,9 +152,8 @@ public TableInformation locateTableInformation(QualifiedTableName tableName) { public SequenceInformation locateSequenceInformation(QualifiedSequenceName sequenceName) { // again, follow legacy behavior if ( sequenceName.getCatalogName() != null || sequenceName.getSchemaName() != null ) { - sequenceName = new QualifiedSequenceName( null, null, sequenceName.getSequenceName() ); + sequenceName = unqualifiedSequenceName( sequenceName ); } - return sequenceInformationMap.get( sequenceName ); } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ForeignKeyInformationImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ForeignKeyInformationImpl.java index 91f9eefa910c..e02133092e5b 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ForeignKeyInformationImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/ForeignKeyInformationImpl.java @@ -14,19 +14,19 @@ * @author Steve Ebersole */ public class ForeignKeyInformationImpl implements ForeignKeyInformation { - private final Identifier fkIdentifier; + private final Identifier foreignKeyIdentifier; private final List columnMappingList; public ForeignKeyInformationImpl( - Identifier fkIdentifier, + Identifier foreignKeyIdentifier, List columnMappingList) { - this.fkIdentifier = fkIdentifier; + this.foreignKeyIdentifier = foreignKeyIdentifier; this.columnMappingList = columnMappingList; } @Override public Identifier getForeignKeyIdentifier() { - return fkIdentifier; + return foreignKeyIdentifier; } @Override @@ -34,15 +34,8 @@ public Iterable getColumnReferenceMappings() { return columnMappingList; } - public static class ColumnReferenceMappingImpl implements ColumnReferenceMapping { - private final ColumnInformation referencing; - private final ColumnInformation referenced; - - public ColumnReferenceMappingImpl(ColumnInformation referencing, ColumnInformation referenced) { - this.referencing = referencing; - this.referenced = referenced; - } - + public record ColumnReferenceMappingImpl(ColumnInformation referencing, ColumnInformation referenced) + implements ColumnReferenceMapping { @Override public ColumnInformation getReferencingColumnMetadata() { return referencing; diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/InformationExtractorJdbcDatabaseMetaDataImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/InformationExtractorJdbcDatabaseMetaDataImpl.java index 02f5cf95da33..e9a7c816a70f 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/InformationExtractorJdbcDatabaseMetaDataImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/InformationExtractorJdbcDatabaseMetaDataImpl.java @@ -5,18 +5,21 @@ package org.hibernate.tool.schema.extract.internal; import java.sql.DatabaseMetaData; -import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.StringTokenizer; import org.hibernate.boot.model.naming.DatabaseIdentifier; import org.hibernate.boot.model.naming.Identifier; -import org.hibernate.boot.model.relational.QualifiedTableName; import org.hibernate.dialect.Dialect; import org.hibernate.tool.schema.extract.spi.ExtractionContext; import org.hibernate.tool.schema.extract.spi.TableInformation; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.sql.DatabaseMetaData.columnNoNulls; +import static java.sql.DatabaseMetaData.columnNullable; + /** * Implementation of the InformationExtractor contract which uses the standard JDBC {@link java.sql.DatabaseMetaData} * API for extraction. @@ -42,7 +45,7 @@ protected String getResultSetTableTypesPhysicalTableConstant() { @Override public T processCatalogsResultSet(ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = getJdbcDatabaseMetaData().getCatalogs() ) { + try ( var resultSet = getJdbcDatabaseMetaData().getCatalogs() ) { return processor.process( resultSet ); } } @@ -53,7 +56,7 @@ protected T processSchemaResultSet( String schemaPattern, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getSchemas( catalog, schemaPattern ) ) { return processor.process( resultSet ); @@ -68,7 +71,7 @@ protected T processTableResultSet( String[] types, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getTables( catalog, schemaPattern, tableNamePattern, types) ) { return processor.process( resultSet ); @@ -83,7 +86,7 @@ protected T processColumnsResultSet( String columnNamePattern, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getColumns( catalog, schemaPattern, tableNamePattern, columnNamePattern ) ) { return processor.process( resultSet ); @@ -97,7 +100,7 @@ protected T processPrimaryKeysResultSet( Identifier tableName, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getPrimaryKeys( catalogFilter, schemaFilter, tableName.getText() ) ) { return processor.process( resultSet ); @@ -113,7 +116,7 @@ protected T processIndexInfoResultSet( boolean approximate, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getIndexInfo( catalog, schema, table, unique, approximate ) ) { return processor.process( resultSet ); @@ -127,7 +130,7 @@ protected T processImportedKeysResultSet( String table, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getImportedKeys( catalog, schema, table ) ) { return processor.process( resultSet ); @@ -144,7 +147,7 @@ protected T processCrossReferenceResultSet( String foreignTable, ExtractionContext.ResultSetProcessor processor) throws SQLException { - try ( ResultSet resultSet = + try ( var resultSet = getJdbcDatabaseMetaData() .getCrossReference( parentCatalog, parentSchema, parentTable, foreignCatalog, foreignSchema, foreignTable) ) { @@ -153,24 +156,22 @@ protected T processCrossReferenceResultSet( } protected void addColumns(TableInformation tableInformation) { - final Dialect dialect = getJdbcEnvironment().getDialect(); - final ExtractionContext extractionContext = getExtractionContext(); - + final var dialect = getJdbcEnvironment().getDialect(); + final var extractionContext = getExtractionContext(); // We use this dummy query to retrieve the table information through the ResultSetMetaData // Significantly better than using DatabaseMetaData especially on Oracle with synonyms enabled - final QualifiedTableName qualifiedTableName = tableInformation.getName(); + final var qualifiedTableName = tableInformation.getName(); final String tableName = extractionContext.getSqlStringGenerationContext() - // The name comes from the database, so the case is correct - // But we quote here to avoid issues with reserved words + // The name comes from the database, so the case is correct, + // but we quote here to avoid issues with reserved words .format( qualifiedTableName.quote() ); - try { extractionContext.getQueryResults( "select * from " + tableName + " where 1=0", null, resultSet -> { - final ResultSetMetaData metaData = resultSet.getMetaData(); + final var metaData = resultSet.getMetaData(); final int columnCount = metaData.getColumnCount(); for ( int i = 1; i <= columnCount; i++ ) { tableInformation.addColumn( columnInformation( tableInformation, metaData, i, dialect ) ); @@ -186,15 +187,15 @@ protected void addColumns(TableInformation tableInformation) { private static Boolean interpretNullable(int nullable) { return switch ( nullable ) { - case ResultSetMetaData.columnNullable -> Boolean.TRUE; - case ResultSetMetaData.columnNoNulls -> Boolean.FALSE; + case columnNullable -> TRUE; + case columnNoNulls -> FALSE; default -> null; }; } private static ColumnInformationImpl columnInformation( TableInformation tableInformation, ResultSetMetaData metaData, int i, Dialect dialect) - throws SQLException { + throws SQLException { final String columnName = metaData.getColumnName( i ); final int columnType = metaData.getColumnType( i ); final String typeName = diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorLegacyImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorLegacyImpl.java index 8e30484f9200..3e973472bac6 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorLegacyImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorLegacyImpl.java @@ -36,7 +36,7 @@ public Iterable extractMetadata(ExtractionContext extractio return extractionContext.getQueryResults( lookupSql, null, - (ExtractionContext.ResultSetProcessor>) resultSet -> { + resultSet -> { final IdentifierHelper identifierHelper = extractionContext.getJdbcEnvironment() .getIdentifierHelper(); final List sequenceInformationList = new ArrayList<>(); diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorMariaDBDatabaseImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorMariaDBDatabaseImpl.java index 00033c999a7a..d9c31fe67902 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorMariaDBDatabaseImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorMariaDBDatabaseImpl.java @@ -7,15 +7,15 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import org.hibernate.boot.model.naming.Identifier; import org.hibernate.boot.model.relational.QualifiedSequenceName; -import org.hibernate.engine.jdbc.env.spi.IdentifierHelper; import org.hibernate.tool.schema.extract.spi.ExtractionContext; import org.hibernate.tool.schema.extract.spi.SequenceInformation; +import static java.util.Collections.emptyList; + /** * @author Vlad Mihalcea, Magnus Hagström */ @@ -35,55 +35,53 @@ public class SequenceInformationExtractorMariaDBDatabaseImpl extends SequenceInf @Override public Iterable extractMetadata(ExtractionContext extractionContext) throws SQLException { final String lookupSql = extractionContext.getJdbcEnvironment().getDialect().getQuerySequencesString(); - // *should* never happen, but to be safe in the interest of performance... - if (lookupSql == null) { + if ( lookupSql == null ) { return SequenceInformationExtractorNoOpImpl.INSTANCE.extractMetadata(extractionContext); } - final List sequenceNames = extractionContext.getQueryResults( lookupSql, null, resultSet -> { - final List sequences = new ArrayList<>(); - while ( resultSet.next() ) { - sequences.add( resultSetSequenceName( resultSet ) ); - } - return sequences; - }); + final List sequenceNames = + extractionContext.getQueryResults( lookupSql, null, resultSet -> { + final List sequences = new ArrayList<>(); + while ( resultSet.next() ) { + sequences.add( resultSetSequenceName( resultSet ) ); + } + return sequences; + }); - if ( !sequenceNames.isEmpty() ) { - StringBuilder sequenceInfoQueryBuilder = new StringBuilder(); + if ( sequenceNames.isEmpty() ) { + return emptyList(); + } + else { + final var sequenceInfoQueryBuilder = new StringBuilder(); for ( String sequenceName : sequenceNames ) { - if ( sequenceInfoQueryBuilder.length() > 0 ) { + if ( !sequenceInfoQueryBuilder.isEmpty() ) { sequenceInfoQueryBuilder.append( UNION_ALL ); } - sequenceInfoQueryBuilder.append( String.format( SQL_SEQUENCE_QUERY, sequenceName, Identifier.toIdentifier( sequenceName ) ) ); + sequenceInfoQueryBuilder.append( + String.format( SQL_SEQUENCE_QUERY, sequenceName, + Identifier.toIdentifier( sequenceName ) ) ); } return extractionContext.getQueryResults( sequenceInfoQueryBuilder.toString(), null, - (ExtractionContext.ResultSetProcessor>) resultSet -> { + resultSet -> { final List sequenceInformationList = new ArrayList<>(); - final IdentifierHelper identifierHelper = extractionContext.getJdbcEnvironment() - .getIdentifierHelper(); - + final var identifierHelper = + extractionContext.getJdbcEnvironment().getIdentifierHelper(); while ( resultSet.next() ) { - SequenceInformation sequenceInformation = new SequenceInformationImpl( - new QualifiedSequenceName( - null, - null, - identifierHelper.toIdentifier( resultSetSequenceName(resultSet) ) - ), - resultSetStartValueSize(resultSet), - resultSetMinValue(resultSet), - resultSetMaxValue(resultSet), - resultSetIncrementValue(resultSet) - ); - sequenceInformationList.add(sequenceInformation); + sequenceInformationList.add( new SequenceInformationImpl( + new QualifiedSequenceName( null, null, + identifierHelper.toIdentifier( resultSetSequenceName( resultSet ) ) ), + resultSetStartValueSize( resultSet ), + resultSetMinValue( resultSet ), + resultSetMaxValue( resultSet ), + resultSetIncrementValue( resultSet ) + ) ); } return sequenceInformationList; }); } - - return Collections.emptyList(); } protected String resultSetSequenceName(ResultSet resultSet) throws SQLException { diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorNoOpImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorNoOpImpl.java index 248a6d862932..8540cb0ff36f 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorNoOpImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/SequenceInformationExtractorNoOpImpl.java @@ -5,12 +5,13 @@ package org.hibernate.tool.schema.extract.internal; import java.sql.SQLException; -import java.util.Collections; import org.hibernate.tool.schema.extract.spi.ExtractionContext; import org.hibernate.tool.schema.extract.spi.SequenceInformation; import org.hibernate.tool.schema.extract.spi.SequenceInformationExtractor; +import static java.util.Collections.emptyList; + /** * @author Steve Ebersole */ @@ -21,8 +22,7 @@ public class SequenceInformationExtractorNoOpImpl implements SequenceInformation public static final SequenceInformationExtractorNoOpImpl INSTANCE = new SequenceInformationExtractorNoOpImpl(); @Override - @SuppressWarnings("unchecked") public Iterable extractMetadata(ExtractionContext extractionContext) throws SQLException { - return Collections.emptyList(); + return emptyList(); } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/TableInformationImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/TableInformationImpl.java index 5f395e54837a..8bf5b8f8ca2f 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/TableInformationImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/extract/internal/TableInformationImpl.java @@ -91,12 +91,11 @@ public Iterable getForeignKeys() { protected Map foreignKeys() { if ( foreignKeys == null ) { - final Map fkMap = new HashMap<>(); - final Iterable fks = extractor.getForeignKeys( this ); - for ( ForeignKeyInformation fk : fks ) { - fkMap.put( fk.getForeignKeyIdentifier(), fk ); + final Map result = new HashMap<>(); + for ( var foreignKeyInformation : extractor.getForeignKeys( this ) ) { + result.put( foreignKeyInformation.getForeignKeyIdentifier(), foreignKeyInformation ); } - this.foreignKeys = fkMap; + foreignKeys = result; } return foreignKeys; } @@ -117,8 +116,7 @@ public Iterable getIndexes() { protected Map indexes() { if ( indexes == null ) { final Map indexMap = new HashMap<>(); - final Iterable indexes = extractor.getIndexes( this ); - for ( IndexInformation index : indexes ) { + for ( var index : extractor.getIndexes( this ) ) { indexMap.put( index.getIndexIdentifier(), index ); } this.indexes = indexMap; diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java index b9901e4de62f..bed0045b9031 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaCreatorImpl.java @@ -133,7 +133,7 @@ public void doCreation( target.release(); } catch (Exception e) { - CORE_LOGGER.debugf( "Problem releasing GenerationTarget [%s]: %s", target, e.getMessage() ); + CORE_LOGGER.problemReleasingGenerationTarget( target, e ); } } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java index a821b39f9281..9ae06336f28a 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaDropperImpl.java @@ -629,7 +629,7 @@ public void perform(ServiceRegistry serviceRegistry) { // implicitly we do not "halt on error", but we do want to // report the problem CORE_LOGGER.unsuccessfulSchemaManagementCommand( command ); - CORE_LOGGER.debugf( e, "Error performing delayed DROP command [%s]", command ); + CORE_LOGGER.unsuccessfulDelayedDropCommand( e ); } } } diff --git a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaPopulatorImpl.java b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaPopulatorImpl.java index 3ab46bead498..e4d58aadb13e 100644 --- a/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaPopulatorImpl.java +++ b/hibernate-core/src/main/java/org/hibernate/tool/schema/internal/SchemaPopulatorImpl.java @@ -71,7 +71,7 @@ public void doPopulation(Dialect dialect, ExecutionOptions options, GenerationTa target.release(); } catch (Exception e) { - CORE_LOGGER.debugf( "Problem releasing GenerationTarget [%s] : %s", target, e.getMessage() ); + CORE_LOGGER.problemReleasingGenerationTarget( target, e ); } } } diff --git a/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java b/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java index daba47cc51a2..d60943cb7ba1 100644 --- a/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java +++ b/hibernate-core/src/main/java/org/hibernate/type/CollectionType.java @@ -822,8 +822,8 @@ else if ( overridingEager != null ? overridingEager : !persister.isLazy() ) { persistenceContext.addCollectionHolder( collection ); } if ( CORE_LOGGER.isTraceEnabled() ) { - CORE_LOGGER.trace( "Created collection wrapper: " - + collectionInfoString( persister, collection, key, session ) ); + CORE_LOGGER.createdCollectionWrapper( + collectionInfoString( persister, collection, key, session ) ); } return collection; }