package org.hibernate.id;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
import org.hibernate.HibernateException;
import org.hibernate.MappingException;
import org.hibernate.cfg.ObjectNameNormalizer;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.spi.SessionImplementor;
import org.hibernate.internal.CoreMessageLogger;
import org.hibernate.internal.util.StringHelper;
import org.hibernate.mapping.Table;
import org.hibernate.type.Type;
import org.jboss.logging.Logger;
public class IncrementGenerator implements IdentifierGenerator, Configurable {
private static final CoreMessageLogger LOG = Logger.getMessageLogger(CoreMessageLogger.class, IncrementGenerator.class.getName());
private Class returnClass;
private String sql;
private IntegralDataTypeHolder previousValueHolder;
public synchronized Serializable generate(SessionImplementor session, Object object) throws HibernateException {
if ( sql != null ) {
initializePreviousValueHolder( session );
}
return previousValueHolder.makeValueThenIncrement();
}
public void configure(Type type, Properties params, Dialect dialect) throws MappingException {
returnClass = type.getReturnedClass();
ObjectNameNormalizer normalizer =
( ObjectNameNormalizer ) params.get( PersistentIdentifierGenerator.IDENTIFIER_NORMALIZER );
String column = params.getProperty( "column" );
if ( column == null ) {
column = params.getProperty( PersistentIdentifierGenerator.PK );
}
column = dialect.quote( normalizer.normalizeIdentifierQuoting( column ) );
String tableList = params.getProperty( "tables" );
if ( tableList == null ) {
tableList = params.getProperty( PersistentIdentifierGenerator.TABLES );
}
String[] tables = StringHelper.split( ", ", tableList );
final String schema = dialect.quote(
normalizer.normalizeIdentifierQuoting(
params.getProperty( PersistentIdentifierGenerator.SCHEMA )
)
);
final String catalog = dialect.quote(
normalizer.normalizeIdentifierQuoting(
params.getProperty( PersistentIdentifierGenerator.CATALOG )
)
);
StringBuilder buf = new StringBuilder();
for ( int i=0; i < tables.length; i++ ) {
final String tableName = dialect.quote( normalizer.normalizeIdentifierQuoting( tables[i] ) );
if ( tables.length > 1 ) {
buf.append( "select max(" ).append( column ).append( ") as mx from " );
}
buf.append( Table.qualify( catalog, schema, tableName ) );
if ( i < tables.length-1 ) {
buf.append( " union " );
}
}
if ( tables.length > 1 ) {
buf.insert( 0, "( " ).append( " ) ids_" );
column = "ids_.mx";
}
sql = "select max(" + column + ") from " + buf.toString();
}
private void initializePreviousValueHolder(SessionImplementor session) {
previousValueHolder = IdentifierGeneratorHelper.getIntegralDataTypeHolder( returnClass );
final boolean debugEnabled = LOG.isDebugEnabled();
if ( debugEnabled ) {
LOG.debugf( "Fetching initial value: %s", sql );
}
try {
PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareStatement( sql );
try {
ResultSet rs = session.getTransactionCoordinator().getJdbcCoordinator().getResultSetReturn().extract( st );
try {
if (rs.next()) previousValueHolder.initialize(rs, 0L).increment();
else previousValueHolder.initialize(1L);
sql = null;
if ( debugEnabled ) {
LOG.debugf( "First free id: %s", previousValueHolder.makeValue() );
}
}
finally {
session.getTransactionCoordinator().getJdbcCoordinator().release( rs, st );
}
}
finally {
session.getTransactionCoordinator().getJdbcCoordinator().release( st );
}
}
catch (SQLException sqle) {
throw session.getFactory().getSQLExceptionHelper().convert(
sqle,
"could not fetch initial value for increment generator",
sql
);
}
}
}