文章目录
- springboot启动初始化ddl过程
- 如何自定义修改 table
springboot启动初始化ddl过程
跟踪Springboot整合hibernate的启动代码:
SessionFactoryImpl 的初始化里做了非常多的事情,初始化各种资源,并调用 SchemaManagementToolCoordinator.process 先执行脚本任务再执行 Db任务:
performScriptAction( actions.getScriptAction(), metadata, tool, serviceRegistry, executionOptions, configService );
performDatabaseAction( actions.getDatabaseAction(), metadata, tool, serviceRegistry, executionOptions );
在这里就开始进行 spring.jpa.hibernate.ddl-auto 配置的项,进行处理了。
在 doMigration 时,就获取到 tables 的元数据:
public class GroupedSchemaMigratorImpl extends AbstractSchemaMigrator {
public GroupedSchemaMigratorImpl(HibernateSchemaManagementTool tool, SchemaFilter schemaFilter) {
super(tool, schemaFilter);
}
protected NameSpaceTablesInformation performTablesMigration(Metadata metadata, DatabaseInformation existingDatabase, ExecutionOptions options, Dialect dialect, Formatter formatter, Set<String> exportIdentifiers, boolean tryToCreateCatalogs, boolean tryToCreateSchemas, Set<Identifier> exportedCatalogs, Namespace namespace, GenerationTarget[] targets) {
NameSpaceTablesInformation tablesInformation = new NameSpaceTablesInformation(metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper());
if (this.schemaFilter.includeNamespace(namespace)) {
this.createSchemaAndCatalog(existingDatabase, options, dialect, formatter, tryToCreateCatalogs, tryToCreateSchemas, exportedCatalogs, namespace, targets);
NameSpaceTablesInformation tables = existingDatabase.getTablesInformation(namespace);
Iterator var14 = namespace.getTables().iterator();
Table table;
TableInformation tableInformation;
while(var14.hasNext()) {
table = (Table)var14.next();
if (this.schemaFilter.includeTable(table) && table.isPhysicalTable()) {
this.checkExportIdentifier(table, exportIdentifiers);
tableInformation = tables.getTableInformation(table);
if (tableInformation == null) {
this.createTable(table, dialect, metadata, formatter, options, targets);
} else if (tableInformation != null && tableInformation.isPhysicalTable()) {
tablesInformation.addTableInformation(tableInformation);
this.migrateTable(table, tableInformation, dialect, metadata, formatter, options, targets);
}
}
}
var14 = namespace.getTables().iterator();
while(true) {
do {
do {
do {
if (!var14.hasNext()) {
return tablesInformation;
}
table = (Table)var14.next();
} while(!this.schemaFilter.includeTable(table));
} while(!table.isPhysicalTable());
tableInformation = tablesInformation.getTableInformation(table);
} while(tableInformation != null && (tableInformation == null || !tableInformation.isPhysicalTable()));
this.applyIndexes(table, tableInformation, dialect, metadata, formatter, options, targets);
this.applyUniqueKeys(table, tableInformation, dialect, metadata, formatter, options, targets);
}
} else {
return tablesInformation;
}
}
}
如何自定义修改 table
再往下的过程,到 HibernateSchemaManagementTool 这个核心类:
ddl-auto 语义的实现,就在这里。
那要自定义修改 table ,最常见的需要就是 hibernate 只能增加,对于修改字段长度这一类的涉及不到。当然这种修改只限于开发技术交流,生产环境慎用。
回归正题,要想修改字段长度有这么几个关键数据:
- table 的元数据
- Entity 的元数据
hibernate 本身的功能,如 Migrator 既然能识别到字段缺失,那肯定也能获取到这2个元数据。也就是2个参数:registry 和 metadata。
了解了需要的数据,就有了思路:
public void migrateDB(DataSourceProperties dataSource, EntityManager entityManager, String ddlAuto, String dialectStr) {
log.info("schema 生成:{}", dataSource.getUrl());
Set<EntityType<?>> entities = entityManager.getMetamodel().getEntities();
Properties p = new Properties();
// 数据库方言,最终输出的方言
p.put(AvailableSettings.DIALECT, dialectStr);
// 自动执行的动作
p.put(AvailableSettings.HBM2DDL_AUTO, ddlAuto);
// 分隔符,默认为空
p.put(AvailableSettings.HBM2DDL_DELIMITER, ";");
// 是否展示SQL
p.put(AvailableSettings.SHOW_SQL, true);
p.put("hibernate.connection.driver_class", dataSource.getDriverClassName());
p.put("hibernate.connection.url", dataSource.getUrl());
p.put("hibernate.connection.username", dataSource.getUsername());
p.put("hibernate.connection.password", dataSource.getPassword());
// 是否使用默认的jdbc元数据,默认为true,读取项目自身的元数据
p.put("hibernate.temp.use_jdbc_metadata_defaults", true);
// p.put("hibernate.temp.use_jdbc_metadata_defaults", false);
ConfigurationHelper.resolvePlaceHolders(p);
try (ServiceRegistry registry = new StandardServiceRegistryBuilder().applySettings(p).build()) {
Map settings = registry.getService(ConfigurationService.class).getSettings();
MetadataSources metadataSources = new MetadataSources(registry);
entities.forEach(entityType -> metadataSources.addAnnotatedClass(entityType.getJavaType()));
Metadata metadata = metadataSources.buildMetadata();
HashMap properties = new HashMap<>();
properties.putAll(registry.getService(ConfigurationService.class).getSettings());
log.info("开始migration");
SchemaManagementToolCoordinator.process(metadata, registry, settings, null);
log.info("开始alter migration");
SchemaAlterTool schemaAlterTool = new SchemaAlterTool(registry);
schemaAlterTool.performTablesAlter(registry, metadata);
}
}
public void migrateDB(String jdbcUrl, DataSourceProperties dataSource, Set<EntityType<?>> entities, String ddlAuto) {
log.info("schema 生成:{}", jdbcUrl);
Properties p = new Properties();
// 数据库方言,最终输出的方言
p.put(AvailableSettings.DIALECT, MySQL5InnoDBDialect.class.getName());
// 自动执行的动作
p.put(AvailableSettings.HBM2DDL_AUTO, ddlAuto);
// 分隔符,默认为空
p.put(AvailableSettings.HBM2DDL_DELIMITER, ";");
// 是否展示SQL
p.put(AvailableSettings.SHOW_SQL, true);
p.put("hibernate.connection.driver_class", dataSource.getDriverClassName());
p.put("hibernate.connection.url", jdbcUrl);
p.put("hibernate.connection.username", dataSource.getUsername());
p.put("hibernate.connection.password", dataSource.getPassword());
// 是否使用默认的jdbc元数据,默认为true,读取项目自身的元数据
p.put("hibernate.temp.use_jdbc_metadata_defaults", true);
// p.put("hibernate.temp.use_jdbc_metadata_defaults", false);
ConfigurationHelper.resolvePlaceHolders(p);
try (ServiceRegistry registry = new StandardServiceRegistryBuilder().applySettings(p).build()) {
Map settings = registry.getService(ConfigurationService.class).getSettings();
MetadataSources metadataSources = new MetadataSources(registry);
entities.forEach(entityType -> metadataSources.addAnnotatedClass(entityType.getJavaType()));
Metadata metadata = metadataSources.buildMetadata();
HashMap properties = new HashMap<>();
properties.putAll(registry.getService(ConfigurationService.class).getSettings());
log.info("开始migration");
SchemaManagementToolCoordinator.process(metadata, registry, settings, null);
log.info("开始alter migration");
SchemaAlterTool schemaAlterTool = new SchemaAlterTool(registry);
schemaAlterTool.performTablesAlter(registry, metadata);
}
}
自定义 hibernate 的entity 关联 table:
public class SchemaAlterTool {
private Logger log = LoggerFactory.getLogger(SchemaAlterTool.class);
protected SchemaFilter schemaFilter;
ServiceRegistry registry;
private HibernateSchemaManagementTool tool;
private JdbcContext jdbcContext;
public SchemaAlterTool(ServiceRegistry registry) {
this.registry = registry;
SchemaFilterProvider schemaFilterProvider = registry.getService(StrategySelector.class).resolveDefaultableStrategy(
SchemaFilterProvider.class,
null,
DefaultSchemaFilterProvider.INSTANCE
);
this.schemaFilter = schemaFilterProvider.getMigrateFilter();
}
protected void validateColumnType(Table table, Column column, ColumnInformation columnInformation, Metadata metadata, Dialect dialect) {
boolean typesMatch = column.getSqlTypeCode(metadata) == columnInformation.getTypeCode() || column.getSqlType(dialect, metadata).toLowerCase(Locale.ROOT).startsWith(columnInformation.getTypeName().toLowerCase(Locale.ROOT));
if (!typesMatch) {
throw new SchemaManagementException(String.format("Schema-alter: wrong column type encountered in column [%s] in table [%s]; found [%s (Types#%s)], but expecting [%s (Types#%s)]", column.getName(), table.getQualifiedTableName(), columnInformation.getTypeName().toLowerCase(Locale.ROOT), JdbcTypeNameMapper.getTypeName(columnInformation.getTypeCode()), column.getSqlType().toLowerCase(Locale.ROOT), JdbcTypeNameMapper.getTypeName(column.getSqlTypeCode(metadata))));
}
}
/**
* @param column 模型信息
* @param columnInformation 数据库信息
* @return
*/
protected boolean validateColumnLength(Column column, ColumnInformation columnInformation, Metadata metadata, Dialect dialect) {
Value value = column.getValue();
String name = value.getType().getName();
if (!"string".equals(name)) {
return true;
}
// 255 默认值忽略
if (column.getLength() == 255) {
return true;
}
// boolean typesMatch = column.getSqlTypeCode(metadata) == columnInformation.getTypeCode() || column.getSqlType(dialect, metadata).toLowerCase(Locale.ROOT).startsWith(columnInformation.getTypeName().toLowerCase(Locale.ROOT));
// if (!typesMatch) {
// return true;
// }
if (column.getSqlType(dialect, metadata).toLowerCase(Locale.ROOT).startsWith("clob")) {
return true;
}
// columnInformation.getTypeName()
// 有大佬定义 columnDefination
String sqlType = column.getSqlType();
if (!StringUtils.isEmpty(sqlType) && sqlType.length() > 24) {
return true;
}
if (!(column.getLength() == columnInformation.getColumnSize())) {
log.info("column {} not match length {}", column.getName(), column.getLength());
return false;
}
return true;
}
public NameSpaceTablesInformation performTablesAlter(ServiceRegistry registry, Metadata metadata) {
tool = (HibernateSchemaManagementTool) registry.getService(SchemaManagementTool.class);
Map configurationValues = registry.getService(ConfigurationService.class).getSettings();
ExecutionOptions options = new ExecutionOptions() {
@Override
public boolean shouldManageNamespaces() {
return true;
}
@Override
public Map getConfigurationValues() {
return configurationValues;
}
@Override
public ExceptionHandler getExceptionHandler() {
return ExceptionHandlerLoggedImpl.INSTANCE;
}
};
NameSpaceTablesInformation tablesInformation = new NameSpaceTablesInformation(metadata.getDatabase().getJdbcEnvironment().getIdentifierHelper());
jdbcContext = tool.resolveJdbcContext(options.getConfigurationValues());
final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator(jdbcContext);
final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation(
tool.getServiceRegistry(),
isolator,
metadata.getDatabase().getDefaultNamespace().getName()
);
try {
for (Namespace namespace : metadata.getDatabase().getNamespaces()) {
if (schemaFilter.includeNamespace(namespace)) {
alterTables(metadata, databaseInformation, options, jdbcContext.getDialect(), namespace);
}
}
} finally {
try {
databaseInformation.cleanup();
} catch (Exception e) {
log.debug("Problem releasing DatabaseInformation : " + e.getMessage());
}
isolator.release();
}
return tablesInformation;
}
protected void alterTables(Metadata metadata, DatabaseInformation databaseInformation, ExecutionOptions options, Dialect dialect, Namespace namespace) {
NameSpaceTablesInformation tables = databaseInformation.getTablesInformation(namespace);
Iterator tableIter = namespace.getTables().iterator();
while (tableIter.hasNext()) {
Table table = (Table) tableIter.next();
if (this.schemaFilter.includeTable(table) && table.isPhysicalTable()) {
this.alterTable(table, tables.getTableInformation(table), metadata, options, dialect);
}
}
}
protected void alterTable(Table table, TableInformation tableInformation, Metadata metadata, ExecutionOptions options, Dialect dialect) {
if (tableInformation == null) {
throw new SchemaManagementException(String.format("Schema-alter: missing table [%s]", table.getQualifiedTableName().toString()));
} else {
Iterator selectableItr = table.getColumnIterator();
while (selectableItr.hasNext()) {
Selectable selectable = (Selectable) selectableItr.next();
if (Column.class.isInstance(selectable)) {
Column column = (Column) selectable;
ColumnInformation existingColumn = tableInformation.getColumn(Identifier.toIdentifier(column.getQuotedName()));
if (existingColumn == null) {
throw new SchemaManagementException(String.format("Schema-alter: missing column [%s] in table [%s]", column.getName(), table.getQualifiedTableName()));
}
// this.validateColumnType(table, column, existingColumn, metadata, dialect);
if (!validateColumnLength(column, existingColumn, metadata, dialect)) {
log.info("table alter:{}", table.getQualifiedTableName());
boolean format = Helper.interpretFormattingEnabled(options.getConfigurationValues());
Formatter formatter = format ? FormatStyle.DDL.getFormatter() : FormatStyle.NONE.getFormatter();
final GenerationTarget[] targets = new GenerationTarget[]{new GenerationTargetToDatabase(tool.getDdlTransactionIsolator(jdbcContext), true)};
migrateTable(table, column, tableInformation, dialect, metadata, formatter, options, targets);
}
}
}
}
}
protected void migrateTable(Table table, Column column, TableInformation tableInformation, Dialect dialect, Metadata metadata, Formatter formatter, ExecutionOptions options, GenerationTarget... targets) {
Database database = metadata.getDatabase();
AlterTable alterTable = new AlterTable(table);
applySqlStrings(false, alterTable.sqlAlterStrings(dialect, metadata, tableInformation,
column,
database.getDefaultNamespace().getPhysicalName().getCatalog(),
database.getDefaultNamespace().getPhysicalName().getSchema()),
formatter,
options,
targets);
for (int i = 0; i < targets.length; i++) {
targets[i].release();
}
}
void applySqlStrings(boolean quiet, Iterator sqlStrings, Formatter formatter, ExecutionOptions options, GenerationTarget... targets) {
if (sqlStrings != null) {
while (sqlStrings.hasNext()) {
String sqlString = (String) sqlStrings.next();
applySqlString(quiet, sqlString, formatter, options, targets);
}
}
}
private static void applySqlString(boolean quiet, String sqlString, Formatter formatter, ExecutionOptions options, GenerationTarget... targets) {
if (!StringHelper.isEmpty(sqlString)) {
String sqlStringFormatted = formatter.format(sqlString);
GenerationTarget[] exeProcessor = targets;
int excCount = targets.length;
for (int i = 0; i < excCount; ++i) {
GenerationTarget target = exeProcessor[i];
try {
target.accept(sqlStringFormatted);
} catch (CommandAcceptanceException var11) {
if (!quiet) {
options.getExceptionHandler().handleException(var11);
}
}
}
}
}
}