1.环境
Spring Boot 3.4.1
mybatis 3.5.17
mybatis-plus 3.5.9
flowable 7.0.1
DM Database Server x64 V8
连接字符串:
# 不需要指定兼容 oracle 模式(compatibleMode=oracle),也不需要 drivers
url: jdbc:dm://192.168.67.10:5236?schema=dpf
username: test
password: test_111TEST
2.重写 liquibase
方式一:
如果不想自建适配类,可以直接引用开源包 db-migration ,版本目前是 2.0.8,懒人选择,git 地址:https://gitee.com/mengweijin/db-migration。
<!--注意引入顺序,db-migration 必须在前面先引入。-->
<dependency>
<groupId>com.github.mengweijin</groupId>
<artifactId>db-migration</artifactId>
<version>${db-migration.version}</version>
</dependency>
<!--liquibase 的版本固定使用 4.27.0 -->
<dependency>
<groupId>org.liquibase</groupId>
<artifactId>liquibase-core</artifactId>
<version>4.27.0</version>
</dependency>
方式二:
重写适配类,因为 db-migration 封装了很多用不到的东西,从里面提取适配达梦数据库的即可。
1.新建配置类 FlowableSpringProcessEngineBeanPostProcessor:
import org.flowable.common.engine.impl.AbstractEngineConfiguration;
import org.flowable.common.engine.impl.util.ReflectUtil;
import org.flowable.spring.SpringProcessEngineConfiguration;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanPostProcessor;
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
import org.springframework.context.annotation.Configuration;
import org.springframework.lang.Nullable;
import java.lang.reflect.Field;
import java.util.Properties;
@Configuration
@ConditionalOnClass({SpringProcessEngineConfiguration.class})
public class FlowableSpringProcessEngineBeanPostProcessor implements BeanPostProcessor {
@Override
public Object postProcessAfterInitialization(@Nullable Object bean, @Nullable String beanName) throws BeansException {
if(bean instanceof SpringProcessEngineConfiguration) {
Properties databaseTypeMappings = SpringProcessEngineConfiguration.getDefaultDatabaseTypeMappings();
databaseTypeMappings.setProperty("DM DBMS", AbstractEngineConfiguration.DATABASE_TYPE_ORACLE);
Field field = ReflectUtil.getField("databaseTypeMappings", bean);
ReflectUtil.setField(field, bean, databaseTypeMappings);
}
return bean;
}
}
2.新建反射工具类 ReflectUtils:
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.sql.Connection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class ReflectUtils {
public static boolean hasStaticField(Class<?> cls, String fieldName) {
return getAllStaticFieldName(cls).contains(fieldName);
}
/**
* 获取某个类的所有静态属性
*/
public static List<String> getAllStaticFieldName(Class<?> clazz) {
List<String> list = new ArrayList<>();
Field[] declaredFields = clazz.getDeclaredFields();
for (Field field : declaredFields) {
if (Modifier.isStatic(field.getModifiers())) {
list.add(field.getName());
}
}
return list;
}
public static List<Field> getAllFields(Class<?> clazz) {
Field[] declaredFields = clazz.getDeclaredFields();
return new ArrayList<>(Arrays.asList(declaredFields));
}
public static Object getFieldValue(Object obj, Field field) {
if (null == field) {
return null;
}
if (obj instanceof Class) {
// 静态字段获取时对象为null
obj = null;
}
setAccessible(field);
Object result;
try {
result = field.get(obj);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}
return result;
}
public static <T extends AccessibleObject> void setAccessible(T accessibleObject) {
if (null != accessibleObject && !accessibleObject.isAccessible()) {
accessibleObject.setAccessible(true);
}
}
public static boolean isClassExist(String className) {
try {
Class.forName(className);
return true;
} catch (ClassNotFoundException e) {
return false;
}
}
public static Object getHikariProxyConnectionDelegateFieldValue(Connection connection) {
try {
Field delegate = connection.getClass().getSuperclass().getDeclaredField("delegate");
return getFieldValue(connection, delegate);
} catch (NoSuchFieldException e) {
throw new RuntimeException(e);
}
}
}
3.重写 liquibase 包部分代码,包名路径要一致,共涉及以下内容:
-
-
- liquibase.database.AbstractJdbcDatabase
- liquibase.database.core.DmDatabase(此为新建类)
- liquibase.datatype.core.BooleanType
- liquibase.datatype.core.CharType
liquibase.snapshot.JdbcDatabaseSnapshot - liquibase.snapshot.jvm.ColumnSnapshotGeneratorInformix
- resources.META-INF.services.liquibase.database.Database
-
4.重写内容:
liquibase.database.AbstractJdbcDatabase:
package liquibase.database;
import liquibase.CatalogAndSchema;
import liquibase.GlobalConfiguration;
import liquibase.Scope;
import liquibase.change.Change;
import liquibase.changelog.*;
import liquibase.configuration.ConfiguredValue;
import liquibase.database.core.*;
import liquibase.database.jvm.JdbcConnection;
import liquibase.diff.compare.DatabaseObjectComparatorFactory;
import liquibase.exception.*;
import liquibase.executor.ExecutorService;
import liquibase.lockservice.LockServiceFactory;
import liquibase.sql.Sql;
import liquibase.sql.visitor.SqlVisitor;
import liquibase.sqlgenerator.SqlGeneratorFactory;
import liquibase.statement.DatabaseFunction;
import liquibase.statement.SequenceCurrentValueFunction;
import liquibase.statement.SequenceNextValueFunction;
import liquibase.statement.SqlStatement;
import liquibase.statement.core.GetViewDefinitionStatement;
import liquibase.statement.core.RawCallStatement;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import liquibase.util.ISODateFormat;
import liquibase.util.NowAndTodayUtil;
import liquibase.util.StreamUtil;
import liquibase.util.StringUtil;
import java.io.IOException;
import java.io.Writer;
import java.math.BigInteger;
import java.sql.SQLException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Pattern;
import static liquibase.util.StringUtil.join;
/**
* AbstractJdbcDatabase is extended by all supported databases as a facade to the underlying database.
* The physical connection can be retrieved from the AbstractJdbcDatabase implementation, as well as any
* database-specific characteristics such as the datatype for "boolean" fields.
*/
public abstract class AbstractJdbcDatabase implements Database {
private static final int FETCH_SIZE = 1000;
private static final int DEFAULT_MAX_TIMESTAMP_FRACTIONAL_DIGITS = 9;
private static final String STARTS_WITH_NUMBER_REGEX = "^[0-9].*";
private static final Pattern STARTS_WITH_NUMBER_PATTERN = Pattern.compile(STARTS_WITH_NUMBER_REGEX);
private static final String NON_WORD_REGEX = ".*\\W.*";
private static final Pattern NON_WORD_PATTERN = Pattern.compile(NON_WORD_REGEX);
private static final String CREATE_VIEW_AS_REGEX = "^CREATE\\s+.*?VIEW\\s+.*?\\s+AS(?:\\s+|(?=\\())";
private static final Pattern CREATE_VIEW_AS_PATTERN = Pattern.compile(CREATE_VIEW_AS_REGEX, Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
private static final String DATE_ONLY_REGEX = "^\\d{4}\\-\\d{2}\\-\\d{2}$";
private static final Pattern DATE_ONLY_PATTERN = Pattern.compile(DATE_ONLY_REGEX);
private static final String DATE_TIME_REGEX = "^\\d{4}\\-\\d{2}\\-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(?:\\.\\d+)?$";
private static final Pattern DATE_TIME_PATTERN = Pattern.compile(DATE_TIME_REGEX);
private static final String TIMESTAMP_REGEX = "^\\d{4}\\-\\d{2}\\-\\d{2}T\\d{2}:\\d{2}:\\d{2}\\.\\d+$";
private static final Pattern TIMESTAMP_PATTERN = Pattern.compile(TIMESTAMP_REGEX);
private static final String TIME_REGEX = "^\\d{2}:\\d{2}:\\d{2}$";
private static final Pattern TIME_PATTERN = Pattern.compile(TIME_REGEX);
private static final String NAME_WITH_DESC_REGEX = "(?i).*\\s+DESC";
private static final Pattern NAME_WITH_DESC_PATTERN = Pattern.compile(NAME_WITH_DESC_REGEX);
private static final String NAME_WITH_ASC_REGEX = "(?i).*\\s+ASC";
private static final Pattern NAME_WITH_ASC_PATTERN = Pattern.compile(NAME_WITH_ASC_REGEX);
private final Set<String> reservedWords = new HashSet<>();
protected String defaultCatalogName;
protected String defaultSchemaName;
protected String currentDateTimeFunction;
/**
* The sequence name will be substituted into the string e.g. NEXTVAL('%s')
*/
protected String sequenceNextValueFunction;
protected String sequenceCurrentValueFunction;
// List of Database native functions.
protected List<DatabaseFunction> dateFunctions = new ArrayList<>();
protected List<String> unmodifiableDataTypes = new ArrayList<>();
protected BigInteger defaultAutoIncrementStartWith = BigInteger.ONE;
protected BigInteger defaultAutoIncrementBy = BigInteger.ONE;
// most databases either lowercase or uppercase unuqoted objects such as table and column names.
protected Boolean unquotedObjectsAreUppercased;
// whether object names should be quoted
protected ObjectQuotingStrategy quotingStrategy = ObjectQuotingStrategy.LEGACY;
protected Boolean caseSensitive;
private String databaseChangeLogTableName;
private String databaseChangeLogLockTableName;
private String liquibaseTablespaceName;
private String liquibaseSchemaName;
private String liquibaseCatalogName;
private Boolean previousAutoCommit;
private boolean canCacheLiquibaseTableInfo = false;
private DatabaseConnection connection;
private boolean outputDefaultSchema = true;
private boolean outputDefaultCatalog = true;
private boolean defaultCatalogSet;
private final Map<String, Object> attributes = new HashMap<>();
public String getName() {
return toString();
}
@Override
public boolean requiresPassword() {
return true;
}
@Override
public boolean requiresUsername() {
return true;
}
public DatabaseObject[] getContainingObjects() {
return null;
}
// ------- DATABASE INFORMATION METHODS ---- //
@Override
public DatabaseConnection getConnection() {
return connection;
}
@Override
public void setConnection(final DatabaseConnection conn) {
this.setSuperConnection(conn);
}
protected void setSuperConnection(final DatabaseConnection conn) {
Scope.getCurrentScope().getLog(getClass()).fine("Connected to " + conn.getConnectionUserName() + "@" + conn.getURL());
this.connection = conn;
try {
boolean autoCommit = conn.getAutoCommit();
if (autoCommit == getAutoCommitMode()) {
// Don't adjust the auto-commit mode if it's already what the database wants it to be.
Scope.getCurrentScope().getLog(getClass()).fine("Not adjusting the auto commit mode; it is already " + autoCommit);
} else {
// Store the previous auto-commit mode, because the connection needs to be restored to it when this
// AbstractDatabase type is closed. This is important for systems which use connection pools.
previousAutoCommit = autoCommit;
Scope.getCurrentScope().getLog(getClass()).fine("Setting auto commit to " + getAutoCommitMode() + " from " + autoCommit);
connection.setAutoCommit(getAutoCommitMode());
}
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).warning("Cannot set auto commit to " + getAutoCommitMode() + " on connection");
}
this.connection.attached(this);
}
@Override
public boolean getAutoCommitMode() {
return !supportsDDLInTransaction();
}
@Override
public final void addReservedWords(Collection<String> words) {
reservedWords.addAll(words);
}
/**
* Determines if the database supports DDL within a transaction or not.
*
* @return True if the database supports DDL within a transaction, otherwise false.
*/
// TODO this might be a dangerous default value. I would rather make this an abstract method and have every
// implementation specify it explicitly.
@Override
public boolean supportsDDLInTransaction() {
return true;
}
@Override
public String getDatabaseProductName() {
if (connection == null) {
return getDefaultDatabaseProductName();
}
try {
return connection.getDatabaseProductName();
} catch (DatabaseException e) {
throw new RuntimeException("Cannot get database name");
}
}
protected abstract String getDefaultDatabaseProductName();
@Override
public String getDatabaseProductVersion() throws DatabaseException {
if (connection == null) {
return null;
}
return connection.getDatabaseProductVersion();
}
@Override
public int getDatabaseMajorVersion() throws DatabaseException {
if (connection == null) {
return 999;
}
return connection.getDatabaseMajorVersion();
}
@Override
public int getDatabaseMinorVersion() throws DatabaseException {
if (connection == null) {
return -1;
}
return connection.getDatabaseMinorVersion();
}
@Override
public String getDefaultCatalogName() {
if (defaultCatalogName == null) {
if ((defaultSchemaName != null) && !this.supportsSchemas()) {
return defaultSchemaName;
}
if (connection != null) {
try {
defaultCatalogName = getConnectionCatalogName();
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).info("Error getting default catalog", e);
}
}
}
return defaultCatalogName;
}
@Override
public void setDefaultCatalogName(final String defaultCatalogName) {
this.defaultCatalogName = correctObjectName(defaultCatalogName, Catalog.class);
defaultCatalogSet = defaultCatalogName != null;
}
protected String getConnectionCatalogName() throws DatabaseException {
return connection.getCatalog();
}
@Deprecated
public CatalogAndSchema correctSchema(final String catalog, final String schema) {
return new CatalogAndSchema(catalog, schema).standardize(this);
}
@Deprecated
@Override
public CatalogAndSchema correctSchema(final CatalogAndSchema schema) {
if (schema == null) {
return new CatalogAndSchema(getDefaultCatalogName(), getDefaultSchemaName());
}
return schema.standardize(this);
}
@Override
public String correctObjectName(final String objectName, final Class<? extends DatabaseObject> objectType) {
if (isCatalogOrSchemaType(objectType) && preserveCaseIfRequested() == CatalogAndSchema.CatalogAndSchemaCase.ORIGINAL_CASE) {
return objectName;
} else if ((getObjectQuotingStrategy() == ObjectQuotingStrategy.QUOTE_ALL_OBJECTS) || (unquotedObjectsAreUppercased == null) ||
(objectName == null) || (objectName.startsWith(getQuotingStartCharacter()) && objectName.endsWith(getQuotingEndCharacter()))) {
return objectName;
} else if (Boolean.TRUE.equals(unquotedObjectsAreUppercased)) {
return objectName.toUpperCase(Locale.US);
} else {
return objectName.toLowerCase(Locale.US);
}
}
protected boolean isCatalogOrSchemaType(Class<? extends DatabaseObject> objectType) {
return objectType.equals(Catalog.class) || objectType.equals(Schema.class);
}
private CatalogAndSchema.CatalogAndSchemaCase preserveCaseIfRequested() {
if (Boolean.TRUE.equals(GlobalConfiguration.PRESERVE_SCHEMA_CASE.getCurrentValue())) {
return CatalogAndSchema.CatalogAndSchemaCase.ORIGINAL_CASE;
}
return getSchemaAndCatalogCase();
}
@Override
public CatalogAndSchema getDefaultSchema() {
return new CatalogAndSchema(getDefaultCatalogName(), getDefaultSchemaName());
}
@Override
public String getDefaultSchemaName() {
if (!supportsSchemas()) {
return getDefaultCatalogName();
}
if ((defaultSchemaName == null) && (connection != null)) {
defaultSchemaName = getConnectionSchemaName();
if (defaultSchemaName != null) {
Scope.getCurrentScope().getLog(getClass()).info("Set default schema name to " + defaultSchemaName);
}
}
return defaultSchemaName;
}
@Override
public Integer getDefaultScaleForNativeDataType(String nativeDataType) {
// Default implementation does not return anything; this is up to the concrete implementation.
return null;
}
@Override
public void setDefaultSchemaName(final String schemaName) {
this.defaultSchemaName = correctObjectName(schemaName, Schema.class);
if (!supportsSchemas()) {
defaultCatalogSet = schemaName != null;
}
}
/**
* Overwrite this method to get the default schema name for the connection.
* If you only need to change the statement that obtains the current schema then override
*
* @see AbstractJdbcDatabase#getConnectionSchemaNameCallStatement()
*/
protected String getConnectionSchemaName() {
if (connection == null) {
return null;
}
if (connection instanceof OfflineConnection) {
return ((OfflineConnection) connection).getSchema();
}
if (!(connection instanceof JdbcConnection)) {
return defaultSchemaName;
}
try {
SqlStatement currentSchemaStatement = getConnectionSchemaNameCallStatement();
return Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", this).
queryForObject(currentSchemaStatement, String.class);
} catch (Exception e) {
Scope.getCurrentScope().getLog(getClass()).info("Error getting default schema", e);
}
return null;
}
/**
* Used to obtain the connection schema name through a statement
* Override this method to change the statement.
* Only override this if getConnectionSchemaName is left unchanged or is using this method.
*
* @see AbstractJdbcDatabase#getConnectionSchemaName()
*/
protected SqlStatement getConnectionSchemaNameCallStatement() {
return new RawCallStatement("call current_schema");
}
@Override
public Integer getFetchSize() {
return FETCH_SIZE;
}
/**
* Returns system (undroppable) views.
*/
protected Set<String> getSystemTables() {
return new HashSet<>();
}
/**
* Returns system (undroppable) views.
*/
protected Set<String> getSystemViews() {
return new HashSet<>();
}
// ------- DATABASE FEATURE INFORMATION METHODS ---- //
/**
* Does the database type support sequence.
*/
@Override
public boolean supportsSequences() {
return true;
}
@Override
public boolean supportsAutoIncrement() {
return true;
}
// ------- DATABASE-SPECIFIC SQL METHODS ---- //
/**
* Return a date literal with the same value as a string formatted using ISO 8601.
* <p/>
* Note: many databases accept date literals in ISO8601 format with the 'T' replaced with
* a space. Only databases which do not accept these strings should need to override this
* method.
* <p/>
* Implementation restriction:
* Currently, only the following subsets of ISO8601 are supported:
* yyyy-MM-dd
* hh:mm:ss
* yyyy-MM-ddThh:mm:ss
*/
@Override
public String getDateLiteral(final String isoDate) {
if (isDateOnly(isoDate) || isTimeOnly(isoDate)) {
return "'" + isoDate + "'";
} else if (isDateTime(isoDate)) {
return "'" + isoDate.replace('T', ' ') + "'";
} else {
return "BAD_DATE_FORMAT:" + isoDate;
}
}
@Override
public String getDateTimeLiteral(final java.sql.Timestamp date) {
return getDateLiteral(new ISODateFormat().format(date).replaceFirst("^'", "").replaceFirst("'$", ""));
}
@Override
public String getDateLiteral(final java.sql.Date date) {
return getDateLiteral(new ISODateFormat().format(date).replaceFirst("^'", "").replaceFirst("'$", ""));
}
@Override
public String getTimeLiteral(final java.sql.Time date) {
return getDateLiteral(new ISODateFormat().format(date).replaceFirst("^'", "").replaceFirst("'$", ""));
}
@Override
public String getDateLiteral(final Date date) {
if (date instanceof java.sql.Date) {
return getDateLiteral(((java.sql.Date) date));
} else if (date instanceof java.sql.Time) {
return getTimeLiteral(((java.sql.Time) date));
} else if (date instanceof java.sql.Timestamp) {
return getDateTimeLiteral(((java.sql.Timestamp) date));
} else if (date instanceof Date) {
return getDateTimeLiteral(new java.sql.Timestamp(date.getTime()));
} else {
throw new RuntimeException("Unexpected type: " + date.getClass().getName());
}
}
@Override
public Date parseDate(final String dateAsString) throws DateParseException {
try {
if (dateAsString.indexOf(" ") > 0) {
return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(dateAsString);
} else if (dateAsString.indexOf("T") > 0) {
return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").parse(dateAsString);
} else {
if (dateAsString.indexOf(":") > 0) {
return new SimpleDateFormat("HH:mm:ss").parse(dateAsString);
} else {
return new SimpleDateFormat("yyyy-MM-dd").parse(dateAsString);
}
}
} catch (ParseException e) {
throw new DateParseException(dateAsString);
}
}
/***
* Returns true if the String conforms to an ISO 8601 date, e.g. 2016-12-31. (Or, if it is a "NOW" or "TODAY" type
* value)
* @param isoDate value to check.
*/
protected boolean isDateOnly(final String isoDate) {
return DATE_ONLY_PATTERN.matcher(isoDate).matches()
|| NowAndTodayUtil.isNowOrTodayFormat(isoDate);
}
/***
* Returns true if the String conforms to an ISO 8601 date plus a time (hours, minutes, whole seconds
* and optionally fraction of a second) in UTC, e.g. 2016-12-31T18:43:59. (Or, if it is a "NOW" or "TODAY" type
* value.)
* The "T" may be replaced by a space.
* CAUTION: Does NOT recognize values with a timezone information (...[+-Z]...)
* @param isoDate value to check.
*/
protected boolean isDateTime(final String isoDate) {
return DATE_TIME_PATTERN.matcher(isoDate).matches()
|| NowAndTodayUtil.isNowOrTodayFormat(isoDate);
}
/***
* Returns true if the String conforms to an ISO 8601 date
* plus a timestamp (hours, minutes, seconds and at least one decimal fraction) in UTC,
* e.g. 2016-12-31T18:43:59.3 or 2016-12-31T18:43:59.345. (Or, if it is a "NOW" or "TODAY" type value).
* CAUTION: Does NOT recognize values with a timezone information (...[+-Z]...)
* The "T" may be replaced by a space.
* @param isoDate value to check
*/
protected boolean isTimestamp(final String isoDate) {
return TIMESTAMP_PATTERN.matcher(isoDate).matches()
|| NowAndTodayUtil.isNowOrTodayFormat(isoDate);
}
/***
* Returns true if the String conforms to an ISO 8601 time (hours, minutes and whole seconds) in UTC,
* e.g. 18:43:59. (Or, if it is a "NOW" or "TODAY" type value).
* CAUTION: Does NOT recognize values with a timezone information (...[+-Z]...)
* @param isoDate value to check
*/
protected boolean isTimeOnly(final String isoDate) {
return TIME_PATTERN.matcher(isoDate).matches()
|| NowAndTodayUtil.isNowOrTodayFormat(isoDate);
}
/**
* Returns database-specific line comment string.
*/
@Override
public String getLineComment() {
return "--";
}
@Override
public String getAutoIncrementClause(final BigInteger startWith, final BigInteger incrementBy, final String generationType, final Boolean defaultOnNull) {
if (!supportsAutoIncrement()) {
return "";
}
// generate an SQL:2003 STANDARD compliant auto increment clause by default
String autoIncrementClause = getAutoIncrementClause(generationType, defaultOnNull);
boolean generateStartWith = generateAutoIncrementStartWith(startWith);
boolean generateIncrementBy = generateAutoIncrementBy(incrementBy);
if (generateStartWith || generateIncrementBy) {
autoIncrementClause += getAutoIncrementOpening();
if (generateStartWith) {
autoIncrementClause += String.format(getAutoIncrementStartWithClause(), (startWith == null) ? defaultAutoIncrementStartWith : startWith);
}
if (generateIncrementBy) {
if (generateStartWith) {
if(!(this instanceof PostgresDatabase)) {
autoIncrementClause += ", ";
}
else {
autoIncrementClause += " ";
}
}
autoIncrementClause += String.format(getAutoIncrementByClause(), (incrementBy == null) ? defaultAutoIncrementBy : incrementBy);
}
autoIncrementClause += getAutoIncrementClosing();
}
return autoIncrementClause;
}
protected String getAutoIncrementClause() {
return "GENERATED BY DEFAULT AS IDENTITY";
}
/**
* Default implementation. Intended for override in database specific cases
*/
protected String getAutoIncrementClause(final String generationType, final Boolean defaultOnNull) {
return getAutoIncrementClause();
}
protected boolean generateAutoIncrementStartWith(final BigInteger startWith) {
return (startWith != null) && !startWith.equals(defaultAutoIncrementStartWith);
}
protected boolean generateAutoIncrementBy(final BigInteger incrementBy) {
return (incrementBy != null) && !incrementBy.equals(defaultAutoIncrementBy);
}
protected String getAutoIncrementOpening() {
return " (";
}
protected String getAutoIncrementClosing() {
return ")";
}
protected String getAutoIncrementStartWithClause() {
return "START WITH %d";
}
protected String getAutoIncrementByClause() {
return "INCREMENT BY %d";
}
@Override
public String getConcatSql(final String... values) {
return join(values, " || ");
}
@Override
public String getDatabaseChangeLogTableName() {
if (databaseChangeLogTableName != null) {
return correctObjectName(databaseChangeLogTableName, Table.class);
}
return correctObjectName(GlobalConfiguration.DATABASECHANGELOG_TABLE_NAME.getCurrentValue(), Table.class);
}
@Override
public void setDatabaseChangeLogTableName(final String tableName) {
this.databaseChangeLogTableName = tableName;
}
/**
* Method used by extensions to get raw dbcl table name
*/
protected String getRawDatabaseChangeLogTableName() {
return databaseChangeLogTableName;
}
@Override
public String getDatabaseChangeLogLockTableName() {
if (databaseChangeLogLockTableName != null) {
return correctObjectName(databaseChangeLogLockTableName, Table.class);
}
return correctObjectName(GlobalConfiguration.DATABASECHANGELOGLOCK_TABLE_NAME.getCurrentValue(), Table.class);
}
@Override
public void setDatabaseChangeLogLockTableName(final String tableName) {
this.databaseChangeLogLockTableName = tableName;
}
/**
* Method used by extensions to get raw dbcll table name
*/
protected String getRawDatabaseChangeLogLockTableName() {
return databaseChangeLogLockTableName;
}
@Override
public String getLiquibaseTablespaceName() {
if (liquibaseTablespaceName != null) {
return liquibaseTablespaceName;
}
return GlobalConfiguration.LIQUIBASE_TABLESPACE_NAME.getCurrentValue();
}
@Override
public void setLiquibaseTablespaceName(final String tablespace) {
this.liquibaseTablespaceName = tablespace;
}
protected boolean canCreateChangeLogTable() throws DatabaseException {
return ((StandardChangeLogHistoryService) Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this)).canCreateChangeLogTable();
}
@Override
public void setCanCacheLiquibaseTableInfo(final boolean canCacheLiquibaseTableInfo) {
this.canCacheLiquibaseTableInfo = canCacheLiquibaseTableInfo;
}
@Override
public String getLiquibaseCatalogName() {
if (liquibaseCatalogName != null) {
return liquibaseCatalogName;
}
final String configuredCatalogName = GlobalConfiguration.LIQUIBASE_CATALOG_NAME.getCurrentValue();
if (configuredCatalogName != null) {
return configuredCatalogName;
}
return getDefaultCatalogName();
}
@Override
public void setLiquibaseCatalogName(final String catalogName) {
this.liquibaseCatalogName = catalogName;
}
@Override
public String getLiquibaseSchemaName() {
if (liquibaseSchemaName != null) {
return liquibaseSchemaName;
}
final ConfiguredValue<String> configuredValue = GlobalConfiguration.LIQUIBASE_SCHEMA_NAME.getCurrentConfiguredValue();
if (!configuredValue.wasDefaultValueUsed()) {
return configuredValue.getValue();
}
return getDefaultSchemaName();
}
@Override
public void setLiquibaseSchemaName(final String schemaName) {
this.liquibaseSchemaName = schemaName;
}
@Override
public boolean isCaseSensitive() {
if (caseSensitive == null) {
if ((connection != null) && (connection instanceof JdbcConnection)) {
try {
caseSensitive = ((JdbcConnection) connection).getUnderlyingConnection().getMetaData().supportsMixedCaseIdentifiers();
} catch (SQLException e) {
Scope.getCurrentScope().getLog(getClass()).warning("Cannot determine case sensitivity from JDBC driver", e);
}
}
}
if (caseSensitive == null) {
return false;
} else {
return caseSensitive.booleanValue();
}
}
public void setCaseSensitive(Boolean caseSensitive) {
this.caseSensitive = caseSensitive;
}
@Override
public boolean isReservedWord(final String string) {
return reservedWords.contains(string.toUpperCase());
}
/*
* Check if given string starts with numeric values that may cause problems and should be escaped.
*/
protected boolean startsWithNumeric(final String objectName) {
return STARTS_WITH_NUMBER_PATTERN.matcher(objectName).matches();
}
@Override
public void dropDatabaseObjects(final CatalogAndSchema schemaToDrop) throws LiquibaseException {
dropDatabaseObjects(schemaToDrop, null);
}
@Override
public boolean supportsDropTableCascadeConstraints() {
return ((this instanceof SQLiteDatabase) || (this instanceof SybaseDatabase) || (this instanceof
SybaseASADatabase) || (this instanceof PostgresDatabase) || (this instanceof OracleDatabase));
}
@Override
public boolean isSystemObject(final DatabaseObject example) {
if (example == null) {
return false;
}
if ((example.getSchema() != null) && (example.getSchema().getName() != null) && "information_schema"
.equalsIgnoreCase(example.getSchema().getName())) {
return true;
}
if ((example instanceof Table) && getSystemTables().contains(example.getName())) {
return true;
}
return (example instanceof View) && getSystemViews().contains(example.getName());
}
public boolean isSystemView(CatalogAndSchema schema, final String viewName) {
schema = schema.customize(this);
if ("information_schema".equalsIgnoreCase(schema.getSchemaName())) {
return true;
} else return getSystemViews().contains(viewName);
}
@Override
public boolean isLiquibaseObject(final DatabaseObject object) {
if (object instanceof Table) {
Schema liquibaseSchema = new Schema(getLiquibaseCatalogName(), getLiquibaseSchemaName());
LiquibaseTableNamesFactory liquibaseTableNamesFactory = Scope.getCurrentScope().getSingleton(LiquibaseTableNamesFactory.class);
List<String> liquibaseTableNames = liquibaseTableNamesFactory.getLiquibaseTableNames(this);
return liquibaseTableNames.stream().anyMatch(tableName ->
DatabaseObjectComparatorFactory.getInstance().isSameObject(object, new Table().setName(tableName).setSchema(liquibaseSchema), null, this));
} else if (object instanceof Column) {
return isLiquibaseObject(((Column) object).getRelation());
} else if (object instanceof Index) {
return isLiquibaseObject(((Index) object).getRelation());
} else if (object instanceof PrimaryKey) {
return isLiquibaseObject(((PrimaryKey) object).getTable());
}
return false;
}
@Override
public void tag(final String tagString) throws DatabaseException {
Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).tag(tagString);
}
@Override
public boolean doesTagExist(final String tag) throws DatabaseException {
return Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).tagExists(tag);
}
@Override
public String toString() {
if (getConnection() == null) {
return getShortName() + " Database";
}
return getConnection().getConnectionUserName() + " @ " + getConnection().getURL() + (getDefaultSchemaName() == null ? "" : " (Default Schema: " + getDefaultSchemaName() + ")");
}
@Override
public String getViewDefinition(CatalogAndSchema schema, final String viewName) throws DatabaseException {
schema = schema.customize(this);
String definition = Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", this).queryForObject(new GetViewDefinitionStatement(schema.getCatalogName(), schema.getSchemaName(), viewName), String.class);
if (definition == null) {
return null;
}
return CREATE_VIEW_AS_PATTERN.matcher(definition).replaceFirst("");
}
@Override
public String escapeTableName(final String catalogName, final String schemaName, final String tableName) {
return escapeObjectName(catalogName, schemaName, tableName, Table.class);
}
@Override
public String escapeObjectName(String catalogName, String schemaName, final String objectName,
final Class<? extends DatabaseObject> objectType) {
if (supportsSchemas()) {
catalogName = StringUtil.trimToNull(catalogName);
schemaName = StringUtil.trimToNull(schemaName);
if (catalogName == null) {
catalogName = this.getDefaultCatalogName();
}
if (schemaName == null) {
schemaName = this.getDefaultSchemaName();
}
if (!supportsCatalogInObjectName(objectType)) {
catalogName = null;
}
if ((catalogName == null) && (schemaName == null)) {
return escapeObjectName(objectName, objectType);
} else if ((catalogName == null) || !this.supportsCatalogInObjectName(objectType)) {
if (isDefaultSchema(catalogName, schemaName) && !getOutputDefaultSchema()) {
return escapeObjectName(objectName, objectType);
} else {
return escapeObjectName(schemaName, Schema.class) + "." + escapeObjectName(objectName, objectType);
}
} else {
if (isDefaultSchema(catalogName, schemaName) && !getOutputDefaultSchema() && !getOutputDefaultCatalog
()) {
return escapeObjectName(objectName, objectType);
} else if (isDefaultSchema(catalogName, schemaName) && !getOutputDefaultCatalog()) {
return escapeObjectName(schemaName, Schema.class) + "." + escapeObjectName(objectName, objectType);
} else {
return escapeObjectName(catalogName, Catalog.class) + "." + escapeObjectName(schemaName, Schema.class) + "." + escapeObjectName(objectName, objectType);
}
}
} else if (supportsCatalogs()) {
catalogName = StringUtil.trimToNull(catalogName);
schemaName = StringUtil.trimToNull(schemaName);
if (catalogName != null) {
if (getOutputDefaultCatalog()) {
return escapeObjectName(catalogName, Catalog.class) + "." + escapeObjectName(objectName, objectType);
} else {
if (!defaultCatalogSet && isDefaultCatalog(catalogName)) {
return escapeObjectName(objectName, objectType);
} else {
return escapeObjectName(catalogName, Catalog.class) + "." + escapeObjectName(objectName, objectType);
}
}
} else {
//they actually mean catalog name
if (schemaName != null) {
if (getOutputDefaultCatalog()) {
return escapeObjectName(schemaName, Catalog.class) + "." + escapeObjectName(objectName, objectType);
} else {
if (!defaultCatalogSet && isDefaultCatalog(schemaName)) {
return escapeObjectName(objectName, objectType);
} else {
return escapeObjectName(schemaName, Catalog.class) + "." + escapeObjectName(objectName, objectType);
}
}
} else {
catalogName = this.getDefaultCatalogName();
if (catalogName == null) {
return escapeObjectName(objectName, objectType);
} else {
if (defaultCatalogSet || (isDefaultCatalog(catalogName) && getOutputDefaultCatalog())) {
return escapeObjectName(catalogName, Catalog.class) + "." + escapeObjectName(objectName, objectType);
} else {
return escapeObjectName(objectName, objectType);
}
}
}
}
} else {
return escapeObjectName(objectName, objectType);
}
}
@Override
public String escapeObjectName(String objectName, final Class<? extends DatabaseObject> objectType) {
if (objectName != null) {
if (mustQuoteObjectName(objectName, objectType)) {
return quoteObject(objectName, objectType).trim();
} else if (quotingStrategy == ObjectQuotingStrategy.QUOTE_ALL_OBJECTS) {
return quoteObject(objectName, objectType).trim();
}
objectName = objectName.trim();
}
return objectName;
}
protected boolean mustQuoteObjectName(String objectName, Class<? extends DatabaseObject> objectType) {
if (isCatalogOrSchemaType(objectType) && preserveCaseIfRequested() == CatalogAndSchema.CatalogAndSchemaCase.ORIGINAL_CASE) {
return true;
}
return objectName.contains("-") || startsWithNumeric(objectName) || isReservedWord(objectName) || NON_WORD_PATTERN.matcher(objectName).matches();
}
protected String getQuotingStartCharacter() {
return "\"";
}
protected String getQuotingEndCharacter() {
return "\"";
}
protected String getQuotingEndReplacement() {
return "\"\"";
}
public String quoteObject(final String objectName, final Class<? extends DatabaseObject> objectType) {
if (objectName == null) {
return null;
}
return getQuotingStartCharacter() + objectName.replace(getQuotingEndCharacter(), getQuotingEndReplacement()) + getQuotingEndCharacter();
}
@Override
public String escapeIndexName(final String catalogName, final String schemaName, final String indexName) {
return escapeObjectName(catalogName, schemaName, indexName, Index.class);
}
@Override
public String escapeSequenceName(final String catalogName, final String schemaName, final String sequenceName) {
return escapeObjectName(catalogName, schemaName, sequenceName, Sequence.class);
}
@Override
public String escapeConstraintName(final String constraintName) {
return escapeObjectName(constraintName, Index.class);
}
@Override
public String escapeColumnName(final String catalogName, final String schemaName, final String tableName, final String columnName) {
return escapeObjectName(columnName, Column.class);
}
@Override
public String escapeColumnName(String catalogName, String schemaName, String tableName, String columnName, boolean quoteNamesThatMayBeFunctions) {
if (quotingStrategy == ObjectQuotingStrategy.QUOTE_ALL_OBJECTS) {
return quoteObject(columnName, Column.class);
}
if (columnName.contains("(")) {
if (quoteNamesThatMayBeFunctions) {
return quoteObject(columnName, Column.class);
} else {
return columnName;
}
}
return escapeObjectName(columnName, Column.class);
}
@Override
public String escapeColumnNameList(final String columnNames) {
StringBuilder sb = new StringBuilder();
for (String columnName : StringUtil.splitAndTrim(columnNames, ",")) {
if (sb.length() > 0) {
sb.append(", ");
}
boolean descending = false;
if (NAME_WITH_DESC_PATTERN.matcher(columnName).matches()) {
columnName = columnName.replaceFirst("(?i)\\s+DESC$", "");
descending = true;
} else if (NAME_WITH_ASC_PATTERN.matcher(columnName).matches()) {
columnName = columnName.replaceFirst("(?i)\\s+ASC$", "");
}
sb.append(escapeObjectName(columnName, Column.class));
if (descending) {
sb.append(" DESC");
}
}
return sb.toString();
}
@Override
public boolean supportsSchemas() {
return true;
}
@Override
public boolean supportsCatalogs() {
return true;
}
public boolean jdbcCallsCatalogsSchemas() {
return false;
}
@Override
public boolean supportsCatalogInObjectName(final Class<? extends DatabaseObject> type) {
return false;
}
@Override
public String generatePrimaryKeyName(final String tableName) {
return "PK_" + tableName.toUpperCase(Locale.US);
}
@Override
public String escapeViewName(final String catalogName, final String schemaName, final String viewName) {
return escapeObjectName(catalogName, schemaName, viewName, View.class);
}
@Override
public ChangeSet.RunStatus getRunStatus(final ChangeSet changeSet) throws DatabaseException, DatabaseHistoryException {
return Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).getRunStatus(changeSet);
}
@Override
public RanChangeSet getRanChangeSet(final ChangeSet changeSet) throws DatabaseException, DatabaseHistoryException {
return Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).getRanChangeSet(changeSet);
}
@Override
public List<RanChangeSet> getRanChangeSetList() throws DatabaseException {
return Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).getRanChangeSets();
}
@Override
public Date getRanDate(final ChangeSet changeSet) throws DatabaseException, DatabaseHistoryException {
return Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).getRanDate(changeSet);
}
@Override
public void markChangeSetExecStatus(final ChangeSet changeSet, final ChangeSet.ExecType execType) throws DatabaseException {
Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).setExecType(changeSet, execType);
}
@Override
public void removeRanStatus(final ChangeSet changeSet) throws DatabaseException {
Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).removeFromHistory(changeSet);
}
@Override
public String escapeStringForDatabase(final String string) {
if (string == null) {
return null;
}
return string.replaceAll("'", "''");
}
@Override
public void commit() throws DatabaseException {
getConnection().commit();
}
@Override
public void rollback() throws DatabaseException {
getConnection().rollback();
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if ((o == null) || (getClass() != o.getClass())) {
return false;
}
AbstractJdbcDatabase that = (AbstractJdbcDatabase) o;
if (connection == null) {
if (that.connection == null) {
return this == that;
} else {
return false;
}
} else {
return connection.equals(that.connection);
}
}
@Override
public int hashCode() {
return ((connection != null) ? connection.hashCode() : super.hashCode());
}
@Override
public void close() throws DatabaseException {
Scope.getCurrentScope().getSingleton(ExecutorService.class).clearExecutor("jdbc", this);
try (final DatabaseConnection connection = getConnection()) {
if (connection != null && previousAutoCommit != null) {
connection.setAutoCommit(previousAutoCommit);
}
} catch (final DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).warning("Failed to restore the auto commit to " + previousAutoCommit);
throw e;
}
}
@Override
public boolean supportsRestrictForeignKeys() {
return true;
}
@Override
public boolean isAutoCommit() throws DatabaseException {
return getConnection().getAutoCommit();
}
@Override
public void setAutoCommit(final boolean b) throws DatabaseException {
getConnection().setAutoCommit(b);
}
/**
* Default implementation of the {@link #isSafeToRunUpdate()} method. It checks if the database connection URL contains
* "localhost" or "127.0.0.1". If the database returns a {@code null} URL, the method returns {@code false} because it's
* not known whether it's safe to run the update.
*
* @return {@code true} if the database connection URL contains "localhost" or "127.0.0.1", otherwise {@code false}.
* @throws DatabaseException if there is an error accessing the database
*/
@Override
public boolean isSafeToRunUpdate() throws DatabaseException {
DatabaseConnection connection = getConnection();
if (connection == null) {
return true;
}
String url = connection.getURL();
if (url == null) {
return false;
}
return (url.contains("localhost")) || (url.contains("127.0.0.1"));
}
@Override
public void executeStatements(final Change change, final DatabaseChangeLog changeLog, final List<SqlVisitor> sqlVisitors) throws LiquibaseException {
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", this).execute(change, sqlVisitors);
}
/*
* Executes the statements passed
*
* @param statements an array containing the SQL statements to be issued
* @param sqlVisitors a list of {@link SqlVisitor} objects to be applied to the executed statements
* @throws DatabaseException if there were problems issuing the statements
*/
@Override
public void execute(final SqlStatement[] statements, final List<SqlVisitor> sqlVisitors) throws LiquibaseException {
for (SqlStatement statement : statements) {
if (statement.skipOnUnsupported() && !SqlGeneratorFactory.getInstance().supports(statement, this)) {
continue;
}
Scope.getCurrentScope().getLog(getClass()).fine("Executing Statement: " + statement);
try {
Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", this).execute(statement, sqlVisitors);
} catch (DatabaseException e) {
if (statement.continueOnError()) {
Scope.getCurrentScope().getLog(getClass()).severe("Error executing statement '" + statement + "', but continuing", e);
} else {
throw e;
}
}
}
}
@Override
public void saveStatements(final Change change, final List<SqlVisitor> sqlVisitors, final Writer writer) throws
IOException {
SqlStatement[] statements = change.generateStatements(this);
for (SqlStatement statement : statements) {
for (Sql sql : SqlGeneratorFactory.getInstance().generateSql(statement, this)) {
writer.append(sql.toSql()).append(sql.getEndDelimiter()).append(StreamUtil.getLineSeparator()).append(StreamUtil.getLineSeparator());
}
}
}
@Override
public void executeRollbackStatements(final SqlStatement[] statements, final List<SqlVisitor> sqlVisitors) throws LiquibaseException {
execute(statements, filterRollbackVisitors(sqlVisitors));
}
@Override
public void executeRollbackStatements(final Change change, final List<SqlVisitor> sqlVisitors) throws LiquibaseException {
final SqlStatement[] statements = change.generateRollbackStatements(this);
executeRollbackStatements(statements, sqlVisitors);
}
@Override
public void saveRollbackStatement(final Change change, final List<SqlVisitor> sqlVisitors, final Writer writer) throws IOException, LiquibaseException {
SqlStatement[] statements = change.generateRollbackStatements(this);
for (SqlStatement statement : statements) {
for (Sql sql : SqlGeneratorFactory.getInstance().generateSql(statement, this)) {
writer.append(sql.toSql()).append(sql.getEndDelimiter()).append("\n\n");
}
}
}
/**
* Takes a list of SqlVisitors and returns a new list with only the SqlVisitors set to apply to rollbacks
*/
protected List<SqlVisitor> filterRollbackVisitors(final List<SqlVisitor> visitors) {
final List<SqlVisitor> rollbackVisitors = new ArrayList<>();
if (visitors != null) {
for (SqlVisitor visitor : visitors) {
if (visitor.isApplyToRollback()) {
rollbackVisitors.add(visitor);
}
}
}
return rollbackVisitors;
}
@Override
public List<DatabaseFunction> getDateFunctions() {
return dateFunctions;
}
@Override
public boolean isFunction(final String string) {
if (string.endsWith("()")) {
return true;
}
for (DatabaseFunction function : getDateFunctions()) {
if (function.toString().equalsIgnoreCase(string)) {
return true;
}
}
return false;
}
@Override
public void resetInternalState() {
Scope.getCurrentScope().getSingleton(ChangeLogHistoryServiceFactory.class).getChangeLogService(this).reset();
LockServiceFactory.getInstance().getLockService(this).reset();
}
@Override
public boolean supportsForeignKeyDisable() {
return false;
}
@Override
public boolean disableForeignKeyChecks() throws DatabaseException {
throw new DatabaseException("ForeignKeyChecks Management not supported");
}
@Override
public void enableForeignKeyChecks() throws DatabaseException {
throw new DatabaseException("ForeignKeyChecks Management not supported");
}
@Override
public boolean createsIndexesForForeignKeys() {
return false;
}
@Override
public int getDataTypeMaxParameters(final String dataTypeName) {
return 2;
}
public CatalogAndSchema getSchemaFromJdbcInfo(final String rawCatalogName, final String rawSchemaName) {
return new CatalogAndSchema(rawCatalogName, rawSchemaName).customize(this);
}
public String getJdbcCatalogName(final CatalogAndSchema schema) {
return correctObjectName(schema.getCatalogName(), Catalog.class);
}
public String getJdbcSchemaName(final CatalogAndSchema schema) {
return correctObjectName(schema.getSchemaName(), Schema.class);
}
public final String getJdbcCatalogName(final Schema schema) {
if (schema == null) {
return getJdbcCatalogName(getDefaultSchema());
} else {
return getJdbcCatalogName(new CatalogAndSchema(schema.getCatalogName(), schema.getName()));
}
}
public final String getJdbcSchemaName(final Schema schema) {
if (schema == null) {
return getJdbcSchemaName(getDefaultSchema());
} else {
return getJdbcSchemaName(new CatalogAndSchema(schema.getCatalogName(), schema.getName()));
}
}
@Override
public boolean dataTypeIsNotModifiable(final String typeName) {
return unmodifiableDataTypes.contains(typeName.toLowerCase());
}
@Override
public ObjectQuotingStrategy getObjectQuotingStrategy() {
return this.quotingStrategy;
}
@Override
public void setObjectQuotingStrategy(final ObjectQuotingStrategy quotingStrategy) {
this.quotingStrategy = quotingStrategy;
}
@Override
public String generateDatabaseFunctionValue(final DatabaseFunction databaseFunction) {
if (databaseFunction.getValue() == null) {
return null;
}
if (isCurrentTimeFunction(databaseFunction.getValue().toLowerCase())) {
return getCurrentDateTimeFunction();
} else if (databaseFunction instanceof SequenceNextValueFunction) {
if (sequenceNextValueFunction == null) {
throw new RuntimeException(String.format("next value function for a sequence is not configured for database %s",
getDefaultDatabaseProductName()));
}
String sequenceName = databaseFunction.getValue();
String sequenceSchemaName = databaseFunction.getSchemaName();
sequenceName = escapeObjectName(null, sequenceSchemaName, sequenceName, Sequence.class);
if (sequenceNextValueFunction.contains("'")) {
/* For PostgreSQL, the quotes around dangerous identifiers (e.g. mixed-case) need to stay in place,
* or else PostgreSQL will not be able to find the sequence. */
if (!(this instanceof PostgresDatabase)) {
sequenceName = sequenceName.replace("\"", "");
}
}
return String.format(sequenceNextValueFunction, sequenceName);
} else if (databaseFunction instanceof SequenceCurrentValueFunction) {
if (sequenceCurrentValueFunction == null) {
throw new RuntimeException(String.format("current value function for a sequence is not configured for database %s",
getDefaultDatabaseProductName()));
}
String sequenceSchemaName = databaseFunction.getSchemaName();
String sequenceName = databaseFunction.getValue();
sequenceName = escapeObjectName(null, sequenceSchemaName, sequenceName, Sequence.class);
if (sequenceCurrentValueFunction.contains("'")) {
/* For PostgreSQL, the quotes around dangerous identifiers (e.g. mixed-case) need to stay in place,
* or else PostgreSQL will not be able to find the sequence. */
if (!(this instanceof PostgresDatabase)) {
sequenceName = sequenceName.replace("\"", "");
}
}
return String.format(sequenceCurrentValueFunction, sequenceName);
} else {
return databaseFunction.getValue();
}
}
protected boolean isCurrentTimeFunction(final String functionValue) {
if (functionValue == null) {
return false;
}
return functionValue.startsWith("current_timestamp")
|| functionValue.startsWith("current_datetime")
|| functionValue.equalsIgnoreCase(getCurrentDateTimeFunction());
}
@Override
public String getCurrentDateTimeFunction() {
return currentDateTimeFunction;
}
@Override
public void setCurrentDateTimeFunction(final String function) {
if (function != null) {
this.currentDateTimeFunction = function;
this.dateFunctions.add(new DatabaseFunction(function));
}
}
@Override
public boolean isDefaultSchema(final String catalog, final String schema) {
if (!supportsSchemas()) {
return true;
}
if (!isDefaultCatalog(catalog)) {
return false;
}
return (schema == null) || schema.equalsIgnoreCase(getDefaultSchemaName());
}
@Override
public boolean isDefaultCatalog(final String catalog) {
if (!supportsCatalogs()) {
return true;
}
return (catalog == null) || catalog.equalsIgnoreCase(getDefaultCatalogName());
}
@Override
public boolean getOutputDefaultSchema() {
return outputDefaultSchema;
}
@Override
public void setOutputDefaultSchema(final boolean outputDefaultSchema) {
this.outputDefaultSchema = outputDefaultSchema;
}
@Override
public boolean getOutputDefaultCatalog() {
return outputDefaultCatalog;
}
@Override
public void setOutputDefaultCatalog(final boolean outputDefaultCatalog) {
this.outputDefaultCatalog = outputDefaultCatalog;
}
@Override
public boolean supportsPrimaryKeyNames() {
return true;
}
@Override
public String getSystemSchema() {
return "information_schema";
}
@Override
public String escapeDataTypeName(String dataTypeName) {
return dataTypeName;
}
@Override
public String unescapeDataTypeName(String dataTypeName) {
return dataTypeName;
}
@Override
public String unescapeDataTypeString(String dataTypeString) {
return dataTypeString;
}
public Object get(String key) {
return attributes.get(key);
}
public AbstractJdbcDatabase set(String key, Object value) {
if (value == null) {
attributes.remove(key);
} else {
attributes.put(key, value);
}
return this;
}
@Override
public ValidationErrors validate() {
return new ValidationErrors();
}
/**
* Most relational databases support 9 fractional digits, and subclasses must overwrite this method if they
* support less than that.
*
* @return the maximum number of supported fractional digits in TIMESTAMP columns
*/
@Override
public int getMaxFractionalDigitsForTimestamp() {
if (getConnection() == null) {
// if no connection is there we cannot do anything...
Scope.getCurrentScope().getLog(getClass()).warning(
"No database connection available - specified"
+ " DATETIME/TIMESTAMP precision will be tried");
}
return DEFAULT_MAX_TIMESTAMP_FRACTIONAL_DIGITS;
}
/**
* SQL Standard (Foundation) says: "...if <timestamp precision> is not specified, then 6 is implicit."
*
* @return the default precision / number of maximum digits in a timestamp if nothing else is specified.
*/
@Override
public int getDefaultFractionalDigitsForTimestamp() {
return ((getMaxFractionalDigitsForTimestamp() >= 6) ? 6 : getMaxFractionalDigitsForTimestamp());
}
@Override
public boolean supportsBatchUpdates() throws DatabaseException {
if (connection instanceof OfflineConnection) {
return false;
} else if (connection instanceof JdbcConnection) {
return ((JdbcConnection) getConnection()).supportsBatchUpdates();
} else {
// Normally, the connection can only be one of the two above types. But if, for whatever reason, it is
// not, let's err on the safe side.
return false;
}
}
@Override
public boolean supportsNotNullConstraintNames() {
return false;
}
@Override
public boolean requiresExplicitNullForColumns() {
return false;
}
/**
* This logic is used when db support catalogs
*
* @return UPPER_CASE by default
*/
@Override
public CatalogAndSchema.CatalogAndSchemaCase getSchemaAndCatalogCase() {
return CatalogAndSchema.CatalogAndSchemaCase.UPPER_CASE;
}
}
liquibase.database.core.DmDatabase:
package liquibase.database.core;
import com.akim.boot.module.bpm.framework.flowable.core.utils.ReflectUtils;
import liquibase.GlobalConfiguration;
import liquibase.Scope;
import liquibase.database.DatabaseConnection;
import liquibase.database.OfflineConnection;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.DatabaseException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.util.JdbcUtil;
import java.sql.*;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
/**
* Encapsulates Oracle database support.
*/
public class DmDatabase extends OracleDatabase {
public static final String PRODUCT_NAME = "DM DBMS";
private final Set<String> reservedWords = new HashSet<>();
private Integer databaseMajorVersion;
private Integer databaseMinorVersion;
@Override
public void setConnection(DatabaseConnection conn) {
//noinspection HardCodedStringLiteral,HardCodedStringLiteral,HardCodedStringLiteral,HardCodedStringLiteral,
// HardCodedStringLiteral
reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION", "PASSWORD", "RESOURCE", "START", "SIZE", "UID", "DESC", "ORDER")); //more reserved words not returned by driver
Connection sqlConn = null;
if (!(conn instanceof OfflineConnection)) {
try {
/*
* Don't try to call getWrappedConnection if the conn instance is
* is not a JdbcConnection. This happens for OfflineConnection.
* see https://liquibase.jira.com/browse/CORE-2192
*/
if (conn instanceof JdbcConnection) {
sqlConn = ((JdbcConnection) conn).getWrappedConnection();
}
} catch (Exception e) {
throw new UnexpectedLiquibaseException(e);
}
if (sqlConn != null) {
//tryProxySession(conn.getURL(), sqlConn);
try {
//noinspection HardCodedStringLiteral
reservedWords.addAll(Arrays.asList(sqlConn.getMetaData().getSQLKeywords().toUpperCase().split(",\\s*")));
} catch (SQLException e) {
//noinspection HardCodedStringLiteral
Scope.getCurrentScope().getLog(getClass()).info("Could get sql keywords on OracleDatabase: " + e.getMessage());
//can not get keywords. Continue on
}
//try {
// Method method = sqlConn.getClass().getMethod("setRemarksReporting", Boolean.TYPE);
// method.setAccessible(true);
// method.invoke(sqlConn, true);
//} catch (Exception e) {
// //noinspection HardCodedStringLiteral
// Scope.getCurrentScope().getLog(getClass()).info("Could not set remarks reporting on OracleDatabase: " + e.getMessage());
// //cannot set it. That is OK
//}
CallableStatement statement = null;
try {
//noinspection HardCodedStringLiteral
// statement = sqlConn.prepareCall("{call DBMS_UTILITY.DB_VERSION(?,?)}");
// statement.registerOutParameter(1, Types.VARCHAR);
// statement.registerOutParameter(2, Types.VARCHAR);
// statement.execute();
//
// String compatibleVersion = statement.getString(2);
// if (compatibleVersion != null) {
// Matcher majorVersionMatcher = VERSION_PATTERN.matcher(compatibleVersion);
// if (majorVersionMatcher.matches()) {
// this.databaseMajorVersion = Integer.valueOf(majorVersionMatcher.group(1));
// this.databaseMinorVersion = Integer.valueOf(majorVersionMatcher.group(2));
// }
// }
DatabaseMetaData metaData = sqlConn.getMetaData();
this.databaseMajorVersion = metaData.getDatabaseMajorVersion();
this.databaseMinorVersion = metaData.getDatabaseMinorVersion();
} catch (SQLException e) {
@SuppressWarnings("HardCodedStringLiteral") String message = "Cannot read from DBMS_UTILITY.DB_VERSION: " + e.getMessage();
//noinspection HardCodedStringLiteral
Scope.getCurrentScope().getLog(getClass()).info("Could not set check compatibility mode on OracleDatabase, assuming not running in any sort of compatibility mode: " + message);
} finally {
//JdbcUtil.closeStatement(statement);
}
if (ReflectUtils.hasStaticField(GlobalConfiguration.class, "DDL_LOCK_TIMEOUT") && GlobalConfiguration.DDL_LOCK_TIMEOUT.getCurrentValue() != null) {
int timeoutValue = GlobalConfiguration.DDL_LOCK_TIMEOUT.getCurrentValue();
Scope.getCurrentScope().getLog(getClass()).fine("Setting DDL_LOCK_TIMEOUT value to " + timeoutValue);
String sql = "ALTER SESSION SET DDL_LOCK_TIMEOUT=" + timeoutValue;
PreparedStatement ddlLockTimeoutStatement = null;
try {
ddlLockTimeoutStatement = sqlConn.prepareStatement(sql);
ddlLockTimeoutStatement.execute();
} catch (SQLException sqle) {
Scope.getCurrentScope().getUI().sendErrorMessage("Unable to set the DDL_LOCK_TIMEOUT_VALUE: " + sqle.getMessage(), sqle);
Scope.getCurrentScope().getLog(getClass()).warning("Unable to set the DDL_LOCK_TIMEOUT_VALUE: " + sqle.getMessage(), sqle);
} finally {
JdbcUtil.closeStatement(ddlLockTimeoutStatement);
}
}
}
}
// super.setConnection(conn);
super.setSuperConnection(conn);
}
@Override
public String getShortName() {
//noinspection HardCodedStringLiteral
return "dm";
}
@Override
protected String getDefaultDatabaseProductName() {
//noinspection HardCodedStringLiteral
return PRODUCT_NAME;
}
@Override
public Integer getDefaultPort() {
return 5236;
}
@Override
public int getDatabaseMajorVersion() throws DatabaseException {
if (databaseMajorVersion == null) {
return super.getDatabaseMajorVersion();
} else {
return databaseMajorVersion;
}
}
@Override
public int getDatabaseMinorVersion() throws DatabaseException {
if (databaseMinorVersion == null) {
return super.getDatabaseMinorVersion();
} else {
return databaseMinorVersion;
}
}
@Override
public boolean isCorrectDatabaseImplementation(DatabaseConnection conn) throws DatabaseException {
return PRODUCT_NAME.equalsIgnoreCase(conn.getDatabaseProductName());
}
@Override
public String getDefaultDriver(String url) {
//noinspection HardCodedStringLiteral
if (url.startsWith("jdbc:dm")) {
return "dm.jdbc.driver.DmDriver";
}
return null;
}
@Override
public boolean supportsAutoIncrement() {
return true;
}
@Override
public int getIdentifierMaximumLength() {
return LONG_IDENTIFIERS_LEGNTH;
}
}
liquibase.datatype.core.BooleanType:
package liquibase.datatype.core;
import liquibase.Scope;
import liquibase.change.core.LoadDataChange;
import liquibase.database.Database;
import liquibase.database.core.*;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
import liquibase.exception.DatabaseException;
import liquibase.exception.UnexpectedLiquibaseException;
import liquibase.statement.DatabaseFunction;
import liquibase.util.StringUtil;
import java.util.Locale;
import java.util.regex.Pattern;
@DataTypeInfo(name = "boolean", aliases = {"java.sql.Types.BOOLEAN", "java.lang.Boolean", "bit", "bool"}, minParameters = 0, maxParameters = 0, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class BooleanType extends LiquibaseDataType {
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
String originalDefinition = StringUtil.trimToEmpty(getRawDefinition());
if ((database instanceof FirebirdDatabase)) {
try {
if (database.getDatabaseMajorVersion() <= 2) {
return new DatabaseDataType("SMALLINT");
}
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).fine("Error checking database major version, assuming version 3+: "+e.getMessage(), e);
}
return new DatabaseDataType("BOOLEAN");
}
if ((database instanceof Db2zDatabase)) {
return new DatabaseDataType("SMALLINT");
} else if (database instanceof MSSQLDatabase) {
return new DatabaseDataType(database.escapeDataTypeName("bit"));
} else if (database instanceof MySQLDatabase) {
if (originalDefinition.toLowerCase(Locale.US).startsWith("bit")) {
return new DatabaseDataType("BIT", getParameters());
}
return database instanceof MariaDBDatabase ? new DatabaseDataType("TINYINT(1)") : new DatabaseDataType("TINYINT");
} else if (database instanceof DmDatabase) {
return new DatabaseDataType("NUMBER", 1);
} else if (database instanceof OracleDatabase) {
try {
if (database.getDatabaseMajorVersion() >= OracleDatabase.ORACLE_23C_MAJOR_VERSION) {
return new DatabaseDataType("BOOLEAN");
}
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).fine("Error checking database major version, assuming version <23: "+e.getMessage(), e);
}
return new DatabaseDataType("NUMBER", 1);
} else if ((database instanceof SybaseASADatabase) || (database instanceof SybaseDatabase)) {
return new DatabaseDataType("BIT");
} else if (database instanceof DerbyDatabase) {
if (((DerbyDatabase) database).supportsBooleanDataType()) {
return new DatabaseDataType("BOOLEAN");
} else {
return new DatabaseDataType("SMALLINT");
}
} else if (database instanceof DB2Database) {
if (((DB2Database) database).supportsBooleanDataType())
return new DatabaseDataType("BOOLEAN");
else
return new DatabaseDataType("SMALLINT");
} else if (database instanceof HsqlDatabase) {
return new DatabaseDataType("BOOLEAN");
} else if (database instanceof PostgresDatabase) {
if (originalDefinition.toLowerCase(Locale.US).startsWith("bit")) {
return new DatabaseDataType("BIT", getParameters());
}
} else if (database instanceof H2Database && getParameters().length > 0) {
return new DatabaseDataType("BOOLEAN");
}
return super.toDatabaseDataType(database);
}
@Override
public String objectToSql(Object value, Database database) {
if ((value == null) || "null".equals(value.toString().toLowerCase(Locale.US))) {
return null;
}
String returnValue;
if (value instanceof String) {
value = ((String) value).replaceAll("'", "");
if ("true".equals(((String) value).toLowerCase(Locale.US)) || "1".equals(value) || "b'1'".equals(((String) value).toLowerCase(Locale.US)) || "t".equals(((String) value).toLowerCase(Locale.US)) || ((String) value).toLowerCase(Locale.US).equals(this.getTrueBooleanValue(database).toLowerCase(Locale.US))) {
returnValue = this.getTrueBooleanValue(database);
} else if ("false".equals(((String) value).toLowerCase(Locale.US)) || "0".equals(value) || "b'0'".equals(
((String) value).toLowerCase(Locale.US)) || "f".equals(((String) value).toLowerCase(Locale.US)) || ((String) value).toLowerCase(Locale.US).equals(this.getFalseBooleanValue(database).toLowerCase(Locale.US))) {
returnValue = this.getFalseBooleanValue(database);
} else if (database instanceof PostgresDatabase && Pattern.matches("b?([01])\\1*(::bit|::\"bit\")?", (String) value)) {
returnValue = "b'"
+ value.toString()
.replace("b", "")
.replace("\"", "")
.replace("::it", "")
+ "'::\"bit\"";
} else if (database instanceof SybaseASADatabase && ((String) value).startsWith("COMPUTE")) {
returnValue = (String) value;
} else {
throw new UnexpectedLiquibaseException("Unknown boolean value: " + value);
}
} else if (value instanceof Long) {
if (Long.valueOf(1).equals(value)) {
returnValue = this.getTrueBooleanValue(database);
} else {
returnValue = this.getFalseBooleanValue(database);
}
} else if (value instanceof Number) {
if (value.equals(1) || "1".equals(value.toString()) || "1.0".equals(value.toString())) {
returnValue = this.getTrueBooleanValue(database);
} else {
returnValue = this.getFalseBooleanValue(database);
}
} else if (value instanceof DatabaseFunction) {
return value.toString();
} else if (value instanceof Boolean) {
if (((Boolean) value)) {
returnValue = this.getTrueBooleanValue(database);
} else {
returnValue = this.getFalseBooleanValue(database);
}
} else {
throw new UnexpectedLiquibaseException("Cannot convert type "+value.getClass()+" to a boolean value");
}
return returnValue;
}
protected boolean isNumericBoolean(Database database) {
if ((database instanceof FirebirdDatabase)) {
try {
if (database.getDatabaseMajorVersion() <= 2) {
return true;
}
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).fine("Error checking database major version, assuming version 3+: "+e.getMessage(), e);
}
return false;
}
if (database instanceof DerbyDatabase) {
return !((DerbyDatabase) database).supportsBooleanDataType();
} else if (database instanceof DB2Database) {
return !((DB2Database) database).supportsBooleanDataType();
}
return (database instanceof Db2zDatabase) || (database instanceof FirebirdDatabase) || (database instanceof
MSSQLDatabase) || (database instanceof MySQLDatabase) || (database instanceof OracleDatabase) ||
(database instanceof SQLiteDatabase) || (database instanceof SybaseASADatabase) || (database instanceof
SybaseDatabase);
}
/**
* The database-specific value to use for "false" "boolean" columns.
*/
public String getFalseBooleanValue(Database database) {
if (isNumericBoolean(database)) {
return "0";
}
if (database instanceof InformixDatabase) {
return "'f'";
}
return "FALSE";
}
/**
* The database-specific value to use for "true" "boolean" columns.
*/
public String getTrueBooleanValue(Database database) {
if (isNumericBoolean(database)) {
return "1";
}
if (database instanceof InformixDatabase) {
return "'t'";
}
return "TRUE";
}
@Override
public LoadDataChange.LOAD_DATA_TYPE getLoadTypeName() {
return LoadDataChange.LOAD_DATA_TYPE.BOOLEAN;
}
}
liquibase.datatype.core.CharType:
package liquibase.datatype.core;
import liquibase.Scope;
import liquibase.change.core.LoadDataChange;
import liquibase.database.Database;
import liquibase.database.core.*;
import liquibase.datatype.DataTypeInfo;
import liquibase.datatype.DatabaseDataType;
import liquibase.datatype.LiquibaseDataType;
import liquibase.integration.commandline.LiquibaseCommandLineConfiguration;
import liquibase.statement.DatabaseFunction;
import liquibase.util.StringUtil;
import java.math.BigInteger;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
@DataTypeInfo(name="char", aliases = {"java.sql.Types.CHAR", "bpchar", "character"}, minParameters = 0, maxParameters = 1, priority = LiquibaseDataType.PRIORITY_DEFAULT)
public class CharType extends LiquibaseDataType {
@Override
public DatabaseDataType toDatabaseDataType(Database database) {
if (database instanceof MSSQLDatabase) {
Object[] parameters = getParameters();
if (parameters.length > 0) {
// MSSQL only supports (n) syntax but not (n CHAR) syntax, so we need to remove CHAR.
final String param1 = parameters[0].toString().replaceFirst("(?<=\\d+)\\s*(?i)CHAR$", "");
parameters[0] = param1;
if (!param1.matches("\\d+") || (new BigInteger(param1).compareTo(BigInteger.valueOf(8000)) > 0)) {
DatabaseDataType type = new DatabaseDataType(database.escapeDataTypeName("char"), 8000);
type.addAdditionalInformation(getAdditionalInformation());
return type;
}
}
if (parameters.length == 0) {
parameters = new Object[] { 1 };
} else if (parameters.length > 1) {
parameters = Arrays.copyOfRange(parameters, 0, 1);
}
DatabaseDataType type = new DatabaseDataType(database.escapeDataTypeName("char"), parameters);
type.addAdditionalInformation(getAdditionalInformation());
return type;
} else if (database instanceof PostgresDatabase){
final Object[] parameters = getParameters();
if ((parameters != null) && (parameters.length == 1)) {
// PostgreSQL only supports (n) syntax but not (n CHAR) syntax, so we need to remove CHAR.
final String parameter = parameters[0].toString().replaceFirst("(?<=\\d+)\\s*(?i)CHAR$", "");
// PostgreSQL uses max. length implicitly if no length is provided to CHARACTER VARYING, so we can spare it.
if ("2147483647".equals(parameter)) {
DatabaseDataType type = new DatabaseDataType("CHARACTER");
type.addAdditionalInformation("VARYING");
return type;
}
parameters[0] = parameter;
DatabaseDataType type = new DatabaseDataType(this.getName().toUpperCase(Locale.US), parameters);
type.addAdditionalInformation(this.getAdditionalInformation());
return type;
}
} else if (database instanceof H2Database) {
if (getRawDefinition().toLowerCase(Locale.US).contains("large object")) {
return new DatabaseDataType("CHARACTER LARGE OBJECT");
}
}
return super.toDatabaseDataType(database);
}
@Override
public String objectToSql(Object value, Database database) {
if ((value == null) || "null".equals(value.toString().toLowerCase(Locale.US))) {
return null;
}
String stringValue = value.toString();
if (value instanceof DatabaseFunction) {
return stringValue;
}
String val = String.valueOf(value);
if ((database instanceof MSSQLDatabase) && !StringUtil.isAscii(val)) {
return "N'"+database.escapeStringForDatabase(val)+"'";
}
if (database instanceof DmDatabase) {
// ignore
} else
/*
It is a somewhat safe assumption that if the database is Oracle and the length of the string exceeds 4000
characters then the column must be a clob type column, because Oracle doesn't support varchars longer than
2000 characters. It would be better to read the column config directly, but that info isn't available at this
point in the code.
*/
if (database instanceof OracleDatabase &&
LiquibaseCommandLineConfiguration.WORKAROUND_ORACLE_CLOB_CHARACTER_LIMIT.getCurrentValue() &&
stringValue.length() > 4000) {
Scope.getCurrentScope().getLog(getClass()).fine("A string longer than 4000 characters has been detected on an insert statement, " +
"and the database is Oracle. Oracle forbids insert statements with strings longer than 4000 characters, " +
"so Liquibase is going to workaround this limitation. If an error occurs, this can be disabled by setting "
+ LiquibaseCommandLineConfiguration.WORKAROUND_ORACLE_CLOB_CHARACTER_LIMIT.getKey() + " to false.");
List<String> chunks = StringUtil.splitToChunks(stringValue, 4000);
return "to_clob( '" + StringUtil.join(chunks, "' ) || to_clob( '", obj -> database.escapeStringForDatabase(obj.toString())) + "' )";
}
return "'"+database.escapeStringForDatabase(val)+"'";
}
/**
* Return the size of this data type definition. If unknown or unspecified, return -1
*/
protected int getSize() {
if (getParameters().length == 0) {
return -1;
}
if (getParameters()[0] instanceof String) {
return Integer.parseInt((String) getParameters()[0]);
}
if (getParameters()[0] instanceof Number) {
return ((Number) getParameters()[0]).intValue();
}
return -1;
}
@Override
public LoadDataChange.LOAD_DATA_TYPE getLoadTypeName() {
return LoadDataChange.LOAD_DATA_TYPE.STRING;
}
}
liquibase.snapshot.JdbcDatabaseSnapshot:
package liquibase.snapshot;
import liquibase.CatalogAndSchema;
import liquibase.Scope;
import liquibase.database.AbstractJdbcDatabase;
import liquibase.database.Database;
import liquibase.database.DatabaseConnection;
import liquibase.database.LiquibaseTableNamesFactory;
import liquibase.database.core.*;
import liquibase.database.jvm.JdbcConnection;
import liquibase.exception.DatabaseException;
import liquibase.executor.jvm.ColumnMapRowMapper;
import liquibase.executor.jvm.RowMapperNotNullConstraintsResultSetExtractor;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.Catalog;
import liquibase.structure.core.Schema;
import liquibase.structure.core.Table;
import liquibase.structure.core.View;
import liquibase.util.JdbcUtil;
import liquibase.util.StringUtil;
import java.sql.*;
import java.util.*;
public class JdbcDatabaseSnapshot extends DatabaseSnapshot {
private boolean warnedAboutDbaRecycleBin;
private static final boolean ignoreWarnAboutDbaRecycleBin = Boolean.getBoolean("liquibase.ignoreRecycleBinWarning");
private CachingDatabaseMetaData cachingDatabaseMetaData;
private Map<String, CachedRow> cachedExpressionMap = null;
private Set<String> userDefinedTypes;
public JdbcDatabaseSnapshot(DatabaseObject[] examples, Database database, SnapshotControl snapshotControl) throws DatabaseException, InvalidExampleException {
super(examples, database, snapshotControl);
}
public JdbcDatabaseSnapshot(DatabaseObject[] examples, Database database) throws DatabaseException, InvalidExampleException {
super(examples, database);
}
public CachingDatabaseMetaData getMetaDataFromCache() throws SQLException {
if (cachingDatabaseMetaData == null) {
DatabaseMetaData databaseMetaData = null;
if (getDatabase().getConnection() != null) {
databaseMetaData = ((JdbcConnection) getDatabase().getConnection()).getUnderlyingConnection().getMetaData();
}
cachingDatabaseMetaData = new CachingDatabaseMetaData(this.getDatabase(), databaseMetaData);
}
return cachingDatabaseMetaData;
}
public class CachingDatabaseMetaData {
private static final String SQL_FILTER_MATCH_ALL = "%";
private final DatabaseMetaData databaseMetaData;
private final Database database;
public CachingDatabaseMetaData(Database database, DatabaseMetaData metaData) {
this.databaseMetaData = metaData;
this.database = database;
}
public DatabaseMetaData getDatabaseMetaData() {
return databaseMetaData;
}
public List<CachedRow> getForeignKeys(final String catalogName, final String schemaName, final String tableName,
final String fkName) throws DatabaseException {
ForeignKeysResultSetCache foreignKeysResultSetCache = new ForeignKeysResultSetCache(database, catalogName, schemaName, tableName, fkName);
ResultSetCache importedKeys = getResultSetCache("getImportedKeys");
importedKeys.setBulkTracking(!(database instanceof MSSQLDatabase));
return importedKeys.get(foreignKeysResultSetCache);
}
public List<CachedRow> getIndexInfo(final String catalogName, final String schemaName, final String tableName, final String indexName) throws DatabaseException, SQLException {
return getResultSetCache("getIndexInfo").get(new ResultSetCache.UnionResultSetExtractor(database) {
public boolean isBulkFetchMode;
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"), row.getString("INDEX_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, tableName, indexName);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return getAllCatalogsStringScratchData() != null && ((database instanceof OracleDatabase));
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("TABLE_SCHEM");
}
@Override
public List<CachedRow> fastFetch() throws SQLException, DatabaseException {
List<CachedRow> returnList = new ArrayList<>();
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
if (database instanceof DmDatabase) {
String sql =
"SELECT " +
"c.INDEX_NAME, " +
"3 AS TYPE, " +
"c.TABLE_OWNER AS TABLE_SCHEM, " +
"c.TABLE_NAME, " +
"c.COLUMN_NAME, " +
"c.COLUMN_POSITION AS ORDINAL_POSITION, " +
//"e.COLUMN_EXPRESSION AS FILTER_CONDITION, " +
"CASE I.UNIQUENESS WHEN 'UNIQUE' THEN 0 ELSE 1 END AS NON_UNIQUE, " +
"CASE c.DESCEND WHEN 'Y' THEN 'D' WHEN 'DESC' THEN 'D' WHEN 'N' THEN 'A' WHEN 'ASC' THEN 'A' END AS ASC_OR_DESC, " +
"CASE WHEN tablespace_name = (SELECT default_tablespace FROM user_users) " +
"THEN NULL ELSE tablespace_name END AS tablespace_name " +
"FROM ALL_IND_COLUMNS c " +
"JOIN ALL_INDEXES i ON i.owner=c.index_owner AND i.index_name = c.index_name and i.table_owner = c.table_owner ";
if (!isBulkFetchMode || getAllCatalogsStringScratchData() == null) {
sql += "WHERE c.TABLE_OWNER = '" + database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class) + "' ";
} else {
sql += "WHERE c.TABLE_OWNER IN ('" + database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class) + "', " + getAllCatalogsStringScratchData() + ")";
}
sql += "AND i.OWNER = c.TABLE_OWNER ";
if (!isBulkFetchMode && (tableName != null)) {
sql += " AND c.TABLE_NAME='" + tableName + "'";
}
if (!isBulkFetchMode && (indexName != null)) {
sql += " AND c.INDEX_NAME='" + indexName + "'";
}
sql += " ORDER BY c.INDEX_NAME, ORDINAL_POSITION";
returnList.addAll(executeAndExtract(sql, database));
} else if (database instanceof OracleDatabase) {
warnAboutDbaRecycleBin();
//oracle getIndexInfo is buggy and slow. See Issue 1824548 and http://forums.oracle.com/forums/thread.jspa?messageID=578383򍍏
String sql =
"SELECT " +
"c.INDEX_NAME, " +
"3 AS TYPE, " +
"c.TABLE_OWNER AS TABLE_SCHEM, " +
"c.TABLE_NAME, " +
"c.COLUMN_NAME, " +
"c.COLUMN_POSITION AS ORDINAL_POSITION, " +
"NULL AS FILTER_CONDITION, " +
"c.INDEX_OWNER, " +
"CASE I.UNIQUENESS WHEN 'UNIQUE' THEN 0 ELSE 1 END AS NON_UNIQUE, " +
"CASE c.DESCEND WHEN 'Y' THEN 'D' WHEN 'DESC' THEN 'D' WHEN 'N' THEN 'A' WHEN 'ASC' THEN 'A' END AS ASC_OR_DESC, " +
"CASE WHEN tablespace_name = (SELECT default_tablespace FROM user_users) " +
"THEN NULL ELSE tablespace_name END AS tablespace_name " +
"FROM ALL_IND_COLUMNS c " +
"JOIN ALL_INDEXES i ON i.owner=c.index_owner AND i.index_name = c.index_name and i.table_owner = c.table_owner " +
"LEFT OUTER JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=c.table_name ";
if (!isBulkFetchMode || getAllCatalogsStringScratchData() == null) {
sql += "WHERE c.TABLE_OWNER = '" + database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class) + "' ";
} else {
sql += "WHERE c.TABLE_OWNER IN ('" + database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class) + "', " + getAllCatalogsStringScratchData() + ")";
}
sql += "AND i.OWNER = c.TABLE_OWNER " +
"AND d.object_name IS NULL ";
if (!isBulkFetchMode && (tableName != null)) {
sql += " AND c.TABLE_NAME='" + tableName + "'";
}
if (!isBulkFetchMode && (indexName != null)) {
sql += " AND c.INDEX_NAME='" + indexName + "'";
}
sql += " ORDER BY c.INDEX_NAME, ORDINAL_POSITION";
returnList.addAll(setIndexExpressions(executeAndExtract(sql, database)));
} else if (database instanceof MSSQLDatabase) {
String tableCat = "original_db_name()";
if (9 <= database.getDatabaseMajorVersion()) {
tableCat = "db_name()";
}
//fetch additional index info
String sql = "SELECT " +
tableCat + " as TABLE_CAT, " +
"object_schema_name(i.object_id) as TABLE_SCHEM, " +
"object_name(i.object_id) as TABLE_NAME, " +
"CASE is_unique WHEN 1 then 0 else 1 end as NON_UNIQUE, " +
"object_name(i.object_id) as INDEX_QUALIFIER, " +
"i.name as INDEX_NAME, " +
"case i.type when 1 then 1 ELSE 3 end as TYPE, " +
"key_ordinal as ORDINAL_POSITION, " +
"COL_NAME(c.object_id,c.column_id) AS COLUMN_NAME, " +
"case is_descending_key when 0 then 'A' else 'D' end as ASC_OR_DESC, " +
"null as CARDINALITY, " +
"null as PAGES, " +
"i.filter_definition as FILTER_CONDITION, " +
"o.type AS INTERNAL_OBJECT_TYPE, " +
"i.*, " +
"c.*, " +
"s.* " +
"FROM sys.indexes i " +
"join sys.index_columns c on i.object_id=c.object_id and i.index_id=c.index_id " +
"join sys.stats s on i.object_id=s.object_id and i.name=s.name " +
"join sys.objects o on i.object_id=o.object_id " +
"WHERE object_schema_name(i.object_id)='" + database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class) + "'";
if (!isBulkFetchMode && (tableName != null)) {
sql += " AND object_name(i.object_id)='" + database.escapeStringForDatabase(tableName) + "'";
}
if (!isBulkFetchMode && (indexName != null)) {
sql += " AND i.name='" + database.escapeStringForDatabase(indexName) + "'";
}
sql += "ORDER BY i.object_id, i.index_id, c.key_ordinal";
returnList.addAll(executeAndExtract(sql, database));
} else if (database instanceof Db2zDatabase) {
List<String> parameters = new ArrayList<>(3);
String sql = "SELECT i.CREATOR AS TABLE_SCHEM, " +
"i.TBNAME AS TABLE_NAME, " +
"i.NAME AS INDEX_NAME, " +
"3 AS TYPE, " +
"k.COLNAME AS COLUMN_NAME, " +
"k.COLSEQ AS ORDINAL_POSITION, " +
"CASE UNIQUERULE WHEN 'D' then 1 else 0 end as NON_UNIQUE, " +
"k.ORDERING AS ORDER, " +
"i.CREATOR AS INDEX_QUALIFIER " +
"FROM SYSIBM.SYSKEYS k " +
"JOIN SYSIBM.SYSINDEXES i " +
"ON k.IXNAME = i.NAME " +
"AND k.IXCREATOR = i.CREATOR " +
"WHERE i.CREATOR = ?";
parameters.add(database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class));
if (!isBulkFetchMode && tableName != null) {
sql += " AND i.TBNAME = ?";
parameters.add(database.escapeStringForDatabase(tableName));
}
if (!isBulkFetchMode && indexName != null) {
sql += " AND i.NAME = ?";
parameters.add(database.escapeStringForDatabase(indexName));
}
sql += "ORDER BY i.NAME, k.COLSEQ";
returnList.addAll(executeAndExtract(database, sql, parameters.toArray()));
} else if (!(database instanceof MariaDBDatabase) && database instanceof MySQLDatabase) {
//mysql 8.0.13 introduced support for indexes on `lower(first_name)` which comes back in an "expression" column
String filterConditionValue = "NULL";
if (database.getDatabaseMajorVersion() > 8 || (database.getDatabaseMajorVersion() == 8 && ((MySQLDatabase) database).getDatabasePatchVersion() >= 13)) {
filterConditionValue = "EXPRESSION";
}
StringBuilder sql = new StringBuilder("SELECT TABLE_CATALOG AS TABLE_CAT, TABLE_SCHEMA AS TABLE_SCHEM,");
sql.append(" TABLE_NAME, NON_UNIQUE, NULL AS INDEX_QUALIFIER, INDEX_NAME,");
sql.append(DatabaseMetaData.tableIndexOther);
sql.append(" AS TYPE, SEQ_IN_INDEX AS ORDINAL_POSITION, COLUMN_NAME,");
sql.append("COLLATION AS ASC_OR_DESC, CARDINALITY, 0 AS PAGES, " + filterConditionValue + " AS FILTER_CONDITION FROM INFORMATION_SCHEMA.STATISTICS WHERE");
sql.append(" TABLE_SCHEMA = '").append(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class)).append("'");
if (!isBulkFetchMode && tableName != null) {
sql.append(" AND TABLE_NAME = '").append(database.escapeStringForDatabase(tableName)).append("'");
}
if (!isBulkFetchMode && indexName != null) {
sql.append(" AND INDEX_NAME='").append(database.escapeStringForDatabase(indexName)).append("'");
}
sql.append("ORDER BY NON_UNIQUE, INDEX_NAME, SEQ_IN_INDEX");
returnList.addAll(executeAndExtract(sql.toString(), database));
} else {
/*
* If we do not know in which table to look for the index, things get a little bit ugly.
* First, we get a collection of all tables within the catalogAndSchema, then iterate through
* them until we (hopefully) find the index we are looking for.
*/
List<String> tables = new ArrayList<>();
if (tableName == null) {
// Build a list of all candidate tables in the catalog/schema that might contain the index
for (CachedRow row : getTables(((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema), ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema), null)) {
tables.add(row.getString("TABLE_NAME"));
}
} else {
tables.add(tableName);
}
// Iterate through all the candidate tables and try to find the index.
for (String tableName1 : tables) {
ResultSet rs = databaseMetaData.getIndexInfo(
((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema),
((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema),
tableName1,
false,
true);
List<CachedRow> rows = extract(rs, (database instanceof InformixDatabase));
returnList.addAll(rows);
}
}
return returnList;
}
private List<CachedRow> setIndexExpressions(List<CachedRow> c) throws DatabaseException, SQLException {
Map<String, CachedRow> expressionMap = getCachedExpressionMap();
c.forEach(row -> {
row.set("FILTER_CONDITION", null);
String key = row.getString("INDEX_OWNER") + "::" + row.getString("INDEX_NAME") + "::" +
row.getInt("ORDINAL_POSITION");
CachedRow fromMap = expressionMap.get(key);
if (fromMap != null) {
row.set("FILTER_CONDITION", fromMap.get("COLUMN_EXPRESSION"));
}
});
return c;
}
private Map<String, CachedRow> getCachedExpressionMap() throws DatabaseException, SQLException {
if (cachedExpressionMap != null) {
return cachedExpressionMap;
}
String expSql = "SELECT e.column_expression, e.index_owner, e.index_name, e.column_position FROM all_ind_expressions e";
List<CachedRow> ec = executeAndExtract(expSql, database);
cachedExpressionMap = new HashMap<>();
ec.forEach(row -> {
String key = row.getString("INDEX_OWNER") + "::" + row.getString("INDEX_NAME") + "::" +
row.getInt("COLUMN_POSITION");
cachedExpressionMap.put(key, row);
});
return cachedExpressionMap;
}
@Override
public List<CachedRow> bulkFetch() throws SQLException, DatabaseException {
this.isBulkFetchMode = true;
return fastFetch();
}
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
if (database instanceof OracleDatabase || database instanceof MSSQLDatabase) {
return JdbcDatabaseSnapshot.this.getAllCatalogsStringScratchData() != null || (tableName == null && indexName == null) || super.shouldBulkSelect(schemaKey, resultSetCache);
}
return false;
}
});
}
protected void warnAboutDbaRecycleBin() {
if (!ignoreWarnAboutDbaRecycleBin && !warnedAboutDbaRecycleBin && !(((OracleDatabase) database).canAccessDbaRecycleBin())) {
Scope.getCurrentScope().getLog(getClass()).warning(((OracleDatabase) database).getDbaRecycleBinWarning());
warnedAboutDbaRecycleBin = true;
}
}
/**
* Return the columns for the given catalog, schema, table, and column.
*/
public List<CachedRow> getColumns(final String catalogName, final String schemaName, final String tableName, final String columnName) throws SQLException, DatabaseException {
if ((database instanceof MSSQLDatabase) && (userDefinedTypes == null)) {
userDefinedTypes = new HashSet<>();
DatabaseConnection databaseConnection = database.getConnection();
if (databaseConnection instanceof JdbcConnection) {
Statement stmt = null;
ResultSet resultSet = null;
try {
stmt = ((JdbcConnection) databaseConnection).getUnderlyingConnection().createStatement();
resultSet = stmt.executeQuery("select name from " + (catalogName == null ? "" : "[" + catalogName + "].") + "sys.types where is_user_defined=1");
while (resultSet.next()) {
userDefinedTypes.add(resultSet.getString("name").toLowerCase());
}
} finally {
JdbcUtil.close(resultSet, stmt);
}
}
}
GetColumnResultSetCache getColumnResultSetCache = new GetColumnResultSetCache(database, catalogName,
schemaName, tableName, columnName);
return getResultSetCache("getColumns").get(getColumnResultSetCache);
}
/**
* Return the NotNullConstraints for the given catalog, schema, table, and column.
*/
public List<CachedRow> getNotNullConst(final String catalogName, final String schemaName,
final String tableName) throws DatabaseException {
if (!(database instanceof OracleDatabase)) {
return Collections.emptyList();
}
GetNotNullConstraintsResultSetCache getNotNullConstraintsResultSetCache = new GetNotNullConstraintsResultSetCache(database, catalogName,
schemaName, tableName);
return getResultSetCache("getNotNullConst").get(getNotNullConstraintsResultSetCache);
}
private class GetColumnResultSetCache extends ResultSetCache.SingleResultSetExtractor {
final String catalogName;
final String schemaName;
final String tableName;
final String columnName;
private GetColumnResultSetCache(Database database, String catalogName, String schemaName, String tableName, String columnName) {
super(database);
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = tableName;
this.columnName = columnName;
}
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"), row.getString("COLUMN_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, tableName, columnName);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
String catalogs = getAllCatalogsStringScratchData();
return catalogs != null && schemaKey != null
&& catalogs.contains("'" + schemaKey.toUpperCase() + "'")
&& (database instanceof OracleDatabase);
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("TABLE_SCHEM");
}
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
LiquibaseTableNamesFactory liquibaseTableNamesFactory = Scope.getCurrentScope().getSingleton(LiquibaseTableNamesFactory.class);
List<String> liquibaseTableNames = liquibaseTableNamesFactory.getLiquibaseTableNames(database);
return liquibaseTableNames.stream().noneMatch(tableName::equalsIgnoreCase);
}
@Override
public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
if (database instanceof DmDatabase) {
return dmQuery(false);
} else if (database instanceof OracleDatabase) {
return oracleQuery(false);
} else if (database instanceof MSSQLDatabase) {
return mssqlQuery(false);
}
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
try {
List<CachedRow> returnList =
extract(
databaseMetaData.getColumns(
((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema),
escapeForLike(((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema), database),
escapeForLike(tableName, database),
SQL_FILTER_MATCH_ALL)
);
//
// IF MARIADB OR SQL ANYWHERE
// Query to get actual data types and then map each column to its CachedRow
//
determineActualDataTypes(returnList, tableName);
return returnList;
} catch (SQLException e) {
if (shouldReturnEmptyColumns(e)) { //view with table already dropped. Act like it has no columns.
return new ArrayList<>();
} else {
throw e;
}
}
}
@Override
public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
if (database instanceof DmDatabase) {
return dmQuery(true);
} else if (database instanceof OracleDatabase) {
return oracleQuery(true);
} else if (database instanceof MSSQLDatabase) {
return mssqlQuery(true);
}
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
try {
List<CachedRow> returnList =
extract(databaseMetaData.getColumns(((AbstractJdbcDatabase) database)
.getJdbcCatalogName(catalogAndSchema),
escapeForLike(((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema), database),
SQL_FILTER_MATCH_ALL, SQL_FILTER_MATCH_ALL));
//
// IF MARIADB OR SQL ANYWHERE
// Query to get actual data types and then map each column to its CachedRow
//
determineActualDataTypes(returnList, null);
return returnList;
} catch (SQLException e) {
if (shouldReturnEmptyColumns(e)) {
return new ArrayList<>();
} else {
throw e;
}
}
}
//
// For MariaDB, query for the data type column so that we can correctly
// set the DATETIME(6) type if specified
//
// For SQL Anywhere, query for the scale column so we can correctly
// set the size unit
//
private void determineActualDataTypes(List<CachedRow> returnList, String tableName) throws SQLException {
//
// If not MariaDB / SQL Anywhere then just return
//
if (!(database instanceof MariaDBDatabase || database instanceof SybaseASADatabase)) {
return;
}
if (database instanceof SybaseASADatabase) {
//
// Query for actual data type for column. The actual SYSTABCOL.scale column value is
// not reported by the DatabaseMetadata.getColumns() query for CHAR-limited (in contrast
// to BYTE-limited) columns, and it is needed to capture the kind if limitation.
// The actual SYSTABCOL.column_type is not reported by the DatabaseMetadata.getColumns()
// query as the IS_GENERATEDCOLUMN columns is missing in the result set, and it is needed to
// capture the kind of column (regular or computed).
//
// See https://help.sap.com/docs/SAP_SQL_Anywhere/93079d4ba8e44920ae63ffb4def91f5b/3beaa3956c5f1014883cb0c3e3559cc9.html.
//
String selectStatement =
"SELECT table_name, column_name, scale, column_type FROM SYSTABCOL KEY JOIN SYSTAB KEY JOIN SYSUSER " +
"WHERE user_name = ? AND ? IS NULL OR table_name = ?";
Connection underlyingConnection = ((JdbcConnection) database.getConnection()).getUnderlyingConnection();
try (PreparedStatement stmt = underlyingConnection.prepareStatement(selectStatement)) {
stmt.setString(1, schemaName);
stmt.setString(2, tableName);
stmt.setString(3, tableName);
try (ResultSet columnSelectRS = stmt.executeQuery()) {
while (columnSelectRS.next()) {
String selectedTableName = columnSelectRS.getString("table_name");
String selectedColumnName = columnSelectRS.getString("column_name");
int selectedScale = columnSelectRS.getInt("scale");
String selectedColumnType = columnSelectRS.getString("column_type");
for (CachedRow row : returnList) {
String rowTableName = row.getString("TABLE_NAME");
String rowColumnName = row.getString("COLUMN_NAME");
if (rowTableName.equalsIgnoreCase(selectedTableName) &&
rowColumnName.equalsIgnoreCase(selectedColumnName)) {
int rowDataType = row.getInt("DATA_TYPE");
if (rowDataType == Types.VARCHAR || rowDataType == Types.CHAR) {
row.set("scale", selectedScale);
}
row.set("IS_GENERATEDCOLUMN", "C".equals(selectedColumnType) ? "YES" : "NO");
break;
}
}
}
}
} catch (SQLException sqle) {
throw new RuntimeException(sqle);
//
// Do not stop
//
}
return;
}
//
// Query for actual data type for column. The actual DATA_TYPE column string is
// not returned by the DatabaseMetadata.getColumns() query, and it is needed
// to capture DATETIME(<precision>) data types.
//
StringBuilder selectStatement = new StringBuilder(
"SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = ?");
if(tableName != null) {
selectStatement.append(" AND TABLE_NAME = ?");
}
Connection underlyingConnection = ((JdbcConnection) database.getConnection()).getUnderlyingConnection();
PreparedStatement statement = underlyingConnection.prepareStatement(selectStatement.toString());
statement.setString(1, schemaName);
if (tableName != null) {
statement.setString(2, tableName);
}
try {
ResultSet columnSelectRS = statement.executeQuery(selectStatement.toString());
//
// Iterate the result set from the query and match the rows
// to the rows that were returned by getColumns() in order
// to assign the actual DATA_TYPE string to the appropriate row.
//
while (columnSelectRS.next()) {
String selectedTableName = columnSelectRS.getString("TABLE_NAME");
String selectedColumnName = columnSelectRS.getString("COLUMN_NAME");
String actualDataType = columnSelectRS.getString("DATA_TYPE");
for (CachedRow row : returnList) {
String rowTableName = row.getString("TABLE_NAME");
String rowColumnName = row.getString("COLUMN_NAME");
String rowTypeName = row.getString("TYPE_NAME");
int rowDataType = row.getInt("DATA_TYPE");
if (rowTableName.equalsIgnoreCase(selectedTableName) &&
rowColumnName.equalsIgnoreCase(selectedColumnName) &&
rowTypeName.equalsIgnoreCase("datetime") &&
rowDataType == Types.OTHER &&
!rowTypeName.equalsIgnoreCase(actualDataType)) {
row.set("TYPE_NAME", actualDataType);
row.set("DATA_TYPE", Types.TIMESTAMP);
break;
}
}
}
} catch (SQLException sqle) {
//
// Do not stop
//
}
finally {
JdbcUtil.closeStatement(statement);
}
}
protected boolean shouldReturnEmptyColumns(SQLException e) {
return e.getMessage().contains("references invalid table"); //view with table already dropped. Act like it has no columns.
}
protected List<CachedRow> oracleQuery(boolean bulk) throws DatabaseException, SQLException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
boolean collectIdentityData = database.getDatabaseMajorVersion() >= OracleDatabase.ORACLE_12C_MAJOR_VERSION;
String sql = "select NULL AS TABLE_CAT, OWNER AS TABLE_SCHEM, 'NO' as IS_AUTOINCREMENT, cc.COMMENTS AS REMARKS," +
"OWNER, TABLE_NAME, COLUMN_NAME, DATA_TYPE AS DATA_TYPE_NAME, DATA_TYPE_MOD, DATA_TYPE_OWNER, " +
// note: oracle reports DATA_LENGTH=4*CHAR_LENGTH when using VARCHAR( <N> CHAR ), thus BYTEs
"DECODE (c.data_type, 'CHAR', 1, 'VARCHAR2', 12, 'NUMBER', 3, 'LONG', -1, 'DATE', " + "93" + ", 'RAW', -3, 'LONG RAW', -4, 'BLOB', 2004, 'CLOB', 2005, 'BFILE', -13, 'FLOAT', 6, 'TIMESTAMP(6)', 93, 'TIMESTAMP(6) WITH TIME ZONE', -101, 'TIMESTAMP(6) WITH LOCAL TIME ZONE', -102, 'INTERVAL YEAR(2) TO MONTH', -103, 'INTERVAL DAY(2) TO SECOND(6)', -104, 'BINARY_FLOAT', 100, 'BINARY_DOUBLE', 101, 'XMLTYPE', 2009, 1111) AS data_type, " +
"DECODE( CHAR_USED, 'C',CHAR_LENGTH, DATA_LENGTH ) as DATA_LENGTH, " +
"DATA_PRECISION, DATA_SCALE, NULLABLE, COLUMN_ID as ORDINAL_POSITION, DEFAULT_LENGTH, " +
"DATA_DEFAULT, " +
"NUM_BUCKETS, CHARACTER_SET_NAME, " +
"CHAR_COL_DECL_LENGTH, CHAR_LENGTH, " +
"CHAR_USED, VIRTUAL_COLUMN ";
if (collectIdentityData) {
sql += ", DEFAULT_ON_NULL, IDENTITY_COLUMN, ic.GENERATION_TYPE ";
}
sql += "FROM ALL_TAB_COLS c " +
"JOIN ALL_COL_COMMENTS cc USING ( OWNER, TABLE_NAME, COLUMN_NAME ) ";
if (collectIdentityData) {
sql += "LEFT JOIN ALL_TAB_IDENTITY_COLS ic USING (OWNER, TABLE_NAME, COLUMN_NAME ) ";
}
if (!bulk || getAllCatalogsStringScratchData() == null) {
sql += "WHERE OWNER='" + jdbcSchemaName + "' AND hidden_column='NO'";
} else {
sql += "WHERE OWNER IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") AND hidden_column='NO'";
}
if (!bulk) {
if (tableName != null) {
sql += " AND TABLE_NAME='" + database.escapeStringForDatabase(tableName) + "'";
}
if (columnName != null) {
sql += " AND COLUMN_NAME='" + database.escapeStringForDatabase(columnName) + "'";
}
}
sql += " AND " + ((OracleDatabase) database).getSystemTableWhereClause("TABLE_NAME");
sql += " ORDER BY OWNER, TABLE_NAME, c.COLUMN_ID";
return this.executeAndExtract(sql, database);
}
protected List<CachedRow> dmQuery(boolean bulk) throws DatabaseException, SQLException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
//boolean collectIdentityData = database.getDatabaseMajorVersion() >= OracleDatabase.ORACLE_12C_MAJOR_VERSION;
String sql = "select NULL AS TABLE_CAT, OWNER AS TABLE_SCHEM, 'NO' as IS_AUTOINCREMENT, cc.COMMENTS AS REMARKS," +
"OWNER, TABLE_NAME, COLUMN_NAME, DATA_TYPE AS DATA_TYPE_NAME, DATA_TYPE_MOD, DATA_TYPE_OWNER, " +
// note: oracle reports DATA_LENGTH=4*CHAR_LENGTH when using VARCHAR( <N> CHAR ), thus BYTEs
"DECODE (c.data_type, 'CHAR', 1, 'VARCHAR2', 12, 'NUMBER', 3, 'LONG', -1, 'DATE', " + "93" + ", 'RAW', -3, 'LONG RAW', -4, 'BLOB', 2004, 'CLOB', 2005, 'BFILE', -13, 'FLOAT', 6, 'TIMESTAMP(6)', 93, 'TIMESTAMP(6) WITH TIME ZONE', -101, 'TIMESTAMP(6) WITH LOCAL TIME ZONE', -102, 'INTERVAL YEAR(2) TO MONTH', -103, 'INTERVAL DAY(2) TO SECOND(6)', -104, 'BINARY_FLOAT', 100, 'BINARY_DOUBLE', 101, 'XMLTYPE', 2009, 1111) AS data_type, " +
"DECODE( CHAR_USED, 'C',CHAR_LENGTH, DATA_LENGTH ) as DATA_LENGTH, " +
"DATA_PRECISION, DATA_SCALE, NULLABLE, COLUMN_ID as ORDINAL_POSITION, DEFAULT_LENGTH, " +
"DATA_DEFAULT, " +
"NUM_BUCKETS, CHARACTER_SET_NAME, " +
"CHAR_COL_DECL_LENGTH, CHAR_LENGTH, " +
"CHAR_USED, VIRTUAL_COLUMN ";
// if (collectIdentityData) {
// sql += ", DEFAULT_ON_NULL, IDENTITY_COLUMN, ic.GENERATION_TYPE ";
// }
sql += "FROM ALL_TAB_COLS c " +
// Fixed DM schema issue. INNER JOIN ---> LEFT JOIN
"LEFT JOIN ALL_COL_COMMENTS cc USING ( OWNER, TABLE_NAME, COLUMN_NAME ) ";
// if (collectIdentityData) {
// sql += "LEFT JOIN ALL_TAB_IDENTITY_COLS ic USING (OWNER, TABLE_NAME, COLUMN_NAME ) ";
// }
if (!bulk || getAllCatalogsStringScratchData() == null) {
sql += "WHERE OWNER='" + jdbcSchemaName + "' AND hidden_column='NO'";
} else {
sql += "WHERE OWNER IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") AND hidden_column='NO'";
}
if (!bulk) {
if (tableName != null) {
sql += " AND TABLE_NAME='" + database.escapeStringForDatabase(tableName) + "'";
}
if (columnName != null) {
sql += " AND COLUMN_NAME='" + database.escapeStringForDatabase(columnName) + "'";
}
}
sql += " AND " + ((DmDatabase) database).getSystemTableWhereClause("TABLE_NAME");
sql += " ORDER BY OWNER, TABLE_NAME, c.COLUMN_ID";
return this.executeAndExtract(sql, database);
}
protected List<CachedRow> mssqlQuery(boolean bulk) throws DatabaseException, SQLException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String databaseName = StringUtil.trimToNull(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class));
String dbIdParam;
String databasePrefix;
if (databaseName == null) {
databasePrefix = "";
dbIdParam = "";
} else {
dbIdParam = ", db_id('" + databaseName + "')";
databasePrefix = "[" + databaseName + "].";
}
String sql = "select " +
"db_name(" + (databaseName == null ? "" : "db_id('" + databaseName + "')") + ") AS TABLE_CAT, " +
"object_schema_name(c.object_id" + dbIdParam + ") AS TABLE_SCHEM, " +
"object_name(c.object_id" + dbIdParam + ") AS TABLE_NAME, " +
"c.name AS COLUMN_NAME, " +
"is_filestream AS IS_FILESTREAM, " +
"is_rowguidcol AS IS_ROWGUIDCOL, " +
"CASE WHEN c.is_identity = 'true' THEN 'YES' ELSE 'NO' END as IS_AUTOINCREMENT, " +
"{REMARKS_COLUMN_PLACEHOLDER}" +
"t.name AS TYPE_NAME, " +
"dc.name as COLUMN_DEF_NAME, " +
"dc.definition as COLUMN_DEF, " +
// data type mapping from https://msdn.microsoft.com/en-us/library/ms378878(v=sql.110).aspx
"CASE t.name " +
"WHEN 'bigint' THEN " + Types.BIGINT + " " +
"WHEN 'binary' THEN " + Types.BINARY + " " +
"WHEN 'bit' THEN " + Types.BIT + " " +
"WHEN 'char' THEN " + Types.CHAR + " " +
"WHEN 'date' THEN " + Types.DATE + " " +
"WHEN 'datetime' THEN " + Types.TIMESTAMP + " " +
"WHEN 'datetime2' THEN " + Types.TIMESTAMP + " " +
"WHEN 'datetimeoffset' THEN -155 " +
"WHEN 'decimal' THEN " + Types.DECIMAL + " " +
"WHEN 'float' THEN " + Types.DOUBLE + " " +
"WHEN 'image' THEN " + Types.LONGVARBINARY + " " +
"WHEN 'int' THEN " + Types.INTEGER + " " +
"WHEN 'money' THEN " + Types.DECIMAL + " " +
"WHEN 'nchar' THEN " + Types.NCHAR + " " +
"WHEN 'ntext' THEN " + Types.LONGNVARCHAR + " " +
"WHEN 'numeric' THEN " + Types.NUMERIC + " " +
"WHEN 'nvarchar' THEN " + Types.NVARCHAR + " " +
"WHEN 'real' THEN " + Types.REAL + " " +
"WHEN 'smalldatetime' THEN " + Types.TIMESTAMP + " " +
"WHEN 'smallint' THEN " + Types.SMALLINT + " " +
"WHEN 'smallmoney' THEN " + Types.DECIMAL + " " +
"WHEN 'text' THEN " + Types.LONGVARCHAR + " " +
"WHEN 'time' THEN " + Types.TIME + " " +
"WHEN 'timestamp' THEN " + Types.BINARY + " " +
"WHEN 'tinyint' THEN " + Types.TINYINT + " " +
"WHEN 'udt' THEN " + Types.VARBINARY + " " +
"WHEN 'uniqueidentifier' THEN " + Types.CHAR + " " +
"WHEN 'varbinary' THEN " + Types.VARBINARY + " " +
"WHEN 'varbinary(max)' THEN " + Types.VARBINARY + " " +
"WHEN 'varchar' THEN " + Types.VARCHAR + " " +
"WHEN 'varchar(max)' THEN " + Types.VARCHAR + " " +
"WHEN 'xml' THEN " + Types.LONGVARCHAR + " " +
"WHEN 'LONGNVARCHAR' THEN " + Types.SQLXML + " " +
"ELSE " + Types.OTHER + " END AS DATA_TYPE, " +
"CASE WHEN c.is_nullable = 'true' THEN 1 ELSE 0 END AS NULLABLE, " +
"10 as NUM_PREC_RADIX, " +
"c.column_id as ORDINAL_POSITION, " +
"c.scale as DECIMAL_DIGITS, " +
"c.max_length as COLUMN_SIZE, " +
"c.precision as DATA_PRECISION, " +
"c.is_computed as IS_COMPUTED " +
"FROM " + databasePrefix + "sys.columns c " +
"inner join " + databasePrefix + "sys.types t on c.user_type_id=t.user_type_id " +
"{REMARKS_JOIN_PLACEHOLDER}" +
"left outer join " + databasePrefix + "sys.default_constraints dc on dc.parent_column_id = c.column_id AND dc.parent_object_id=c.object_id AND type_desc='DEFAULT_CONSTRAINT' " +
"WHERE object_schema_name(c.object_id" + dbIdParam + ")='" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "'";
if (!bulk) {
if (tableName != null) {
sql += " and object_name(c.object_id" + dbIdParam + ")='" + database.escapeStringForDatabase(tableName) + "'";
}
if (columnName != null) {
sql += " and c.name='" + database.escapeStringForDatabase(columnName) + "'";
}
}
sql += "order by object_schema_name(c.object_id" + dbIdParam + "), object_name(c.object_id" + dbIdParam + "), c.column_id";
// sys.extended_properties is added to Azure on V12: https://feedback.azure.com/forums/217321-sql-database/suggestions/6549815-add-sys-extended-properties-for-meta-data-support
if ((!((MSSQLDatabase) database).isAzureDb()) // Either NOT AzureDB (=SQL Server 2008 or higher)
|| (database.getDatabaseMajorVersion() >= 12)) { // or at least AzureDB v12
// SQL Server 2005 or later
// https://technet.microsoft.com/en-us/library/ms177541.aspx
sql = sql.replace("{REMARKS_COLUMN_PLACEHOLDER}", "CAST([ep].[value] AS [nvarchar](MAX)) AS [REMARKS], ");
sql = sql.replace("{REMARKS_JOIN_PLACEHOLDER}", "left outer join " + databasePrefix + "[sys].[extended_properties] AS [ep] ON [ep].[class] = 1 " +
"AND [ep].[major_id] = c.object_id " +
"AND [ep].[minor_id] = column_id " +
"AND [ep].[name] = 'MS_Description' ");
} else {
sql = sql.replace("{REMARKS_COLUMN_PLACEHOLDER}", "");
sql = sql.replace("{REMARKS_JOIN_PLACEHOLDER}", "");
}
List<CachedRow> rows = this.executeAndExtract(sql, database);
for (CachedRow row : rows) {
String typeName = row.getString("TYPE_NAME");
if ("nvarchar".equals(typeName) || "nchar".equals(typeName)) {
Integer size = row.getInt("COLUMN_SIZE");
if (size > 0) {
row.set("COLUMN_SIZE", size / 2);
}
} else if ((row.getInt("DATA_PRECISION") != null) && (row.getInt("DATA_PRECISION") > 0)) {
row.set("COLUMN_SIZE", row.getInt("DATA_PRECISION"));
}
}
return rows;
}
@Override
protected List<CachedRow> extract(ResultSet resultSet, boolean informixIndexTrimHint) throws SQLException {
List<CachedRow> rows = super.extract(resultSet, informixIndexTrimHint);
if ((database instanceof MSSQLDatabase) && !userDefinedTypes.isEmpty()) { //UDT types in MSSQL don't take parameters
for (CachedRow row : rows) {
String dataType = (String) row.get("TYPE_NAME");
if (userDefinedTypes.contains(dataType.toLowerCase())) {
row.set("COLUMN_SIZE", null);
row.set("DECIMAL_DIGITS ", null);
}
}
}
return rows;
}
}
private class ForeignKeysResultSetCache extends ResultSetCache.UnionResultSetExtractor {
final String catalogName;
final String schemaName;
final String tableName;
final String fkName;
private ForeignKeysResultSetCache(Database database, String catalogName, String schemaName, String tableName, String fkName) {
super(database);
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = tableName;
this.fkName = fkName;
}
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("FKTABLE_CAT"), row.getString("FKTABLE_SCHEM"), database, row.getString("FKTABLE_NAME"), row.getString("FK_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, tableName, fkName);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return database instanceof OracleDatabase;
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("FKTABLE_SCHEM");
}
@Override
public List<CachedRow> fastFetch() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcCatalogName = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
if (database instanceof DB2Database) {
if (database.getDatabaseProductName().startsWith("DB2 UDB for AS/400")) {
executeAndExtract(getDB2ForAs400Sql(jdbcSchemaName, tableName), database);
}
return querytDB2Luw(jdbcSchemaName, tableName);
} else if (database instanceof Db2zDatabase) {
return queryDb2Zos(catalogAndSchema, tableName);
} else {
List<String> tables = new ArrayList<>();
if (tableName == null) {
for (CachedRow row : getTables(jdbcCatalogName, jdbcSchemaName, null)) {
tables.add(row.getString("TABLE_NAME"));
}
} else {
tables.add(tableName);
}
List<CachedRow> returnList = new ArrayList<>();
for (String foundTable : tables) {
if (database instanceof OracleDatabase) {
throw new RuntimeException("Should have bulk selected");
} else {
returnList.addAll(extract(databaseMetaData.getImportedKeys(jdbcCatalogName, jdbcSchemaName, foundTable)));
}
}
return returnList;
}
}
@Override
public List<CachedRow> bulkFetch() throws SQLException, DatabaseException {
if (database instanceof DmDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
String sql = getDmSql(jdbcSchemaName);
return executeAndExtract(sql, database);
} else if (database instanceof OracleDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
String sql = getOracleSql(jdbcSchemaName);
return executeAndExtract(sql, database);
} else if (database instanceof DB2Database) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
if (database.getDatabaseProductName().startsWith("DB2 UDB for AS/400")) {
executeAndExtract(getDB2ForAs400Sql(jdbcSchemaName, null), database);
}
return querytDB2Luw(jdbcSchemaName, null);
} else if (database instanceof Db2zDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
return queryDb2Zos(catalogAndSchema, null);
} else if (database instanceof MSSQLDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
String sql = getMSSQLSql(jdbcSchemaName, tableName);
return executeAndExtract(sql, database);
} else {
throw new RuntimeException("Cannot bulk select");
}
}
protected String getOracleSql(String jdbcSchemaName) {
String sql = "SELECT /*+rule*/" +
" NULL AS pktable_cat, " +
" p.owner as pktable_schem, " +
" p.table_name as pktable_name, " +
" pc.column_name as pkcolumn_name, " +
" NULL as fktable_cat, " +
" f.owner as fktable_schem, " +
" f.table_name as fktable_name, " +
" fc.column_name as fkcolumn_name, " +
" fc.position as key_seq, " +
" NULL as update_rule, " +
" decode (f.delete_rule, 'CASCADE', 0, 'SET NULL', 2, 1) as delete_rule, " +
" f.constraint_name as fk_name, " +
" p.constraint_name as pk_name, " +
" decode(f.deferrable, 'DEFERRABLE', 5, 'NOT DEFERRABLE', 7, 'DEFERRED', 6) deferrability, " +
" f.validated as fk_validate " +
"FROM " +
"all_cons_columns pc " +
"INNER JOIN all_constraints p " +
"ON pc.owner = p.owner " +
"AND pc.constraint_name = p.constraint_name " +
"INNER JOIN all_constraints f " +
"ON pc.owner = f.r_owner " +
"AND pc.constraint_name = f.r_constraint_name " +
"INNER JOIN all_cons_columns fc " +
"ON fc.owner = f.owner " +
"AND fc.constraint_name = f.constraint_name " +
"AND fc.position = pc.position ";
if (getAllCatalogsStringScratchData() == null) {
sql += "WHERE f.owner = '" + jdbcSchemaName + "' ";
} else {
sql += "WHERE f.owner IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") ";
}
sql += "AND p.constraint_type in ('P', 'U') " +
"AND f.constraint_type = 'R' " +
"AND p.table_name NOT LIKE 'BIN$%' " +
"ORDER BY fktable_schem, fktable_name, key_seq";
return sql;
}
protected String getDmSql(String jdbcSchemaName) {
String sql = "SELECT /*+rule*/" +
" NULL AS pktable_cat, " +
" p.owner as pktable_schem, " +
" p.table_name as pktable_name, " +
" pc.column_name as pkcolumn_name, " +
" NULL as fktable_cat, " +
" f.owner as fktable_schem, " +
" f.table_name as fktable_name, " +
" fc.column_name as fkcolumn_name, " +
" fc.position as key_seq, " +
" NULL as update_rule, " +
" decode (f.delete_rule, 'CASCADE', 0, 'SET NULL', 2, 1) as delete_rule, " +
" f.constraint_name as fk_name, " +
" p.constraint_name as pk_name, " +
" decode(f.deferrable, 'DEFERRABLE', 5, 'NOT DEFERRABLE', 7, 'DEFERRED', 6) deferrability, " +
" f.validated as fk_validate " +
"FROM " +
"all_cons_columns pc " +
"INNER JOIN all_constraints p " +
"ON pc.owner = p.owner " +
"AND pc.constraint_name = p.constraint_name " +
"INNER JOIN all_constraints f " +
"ON pc.owner = f.r_owner " +
"AND pc.constraint_name = f.r_constraint_name " +
"INNER JOIN all_cons_columns fc " +
"ON fc.owner = f.owner " +
"AND fc.constraint_name = f.constraint_name " +
"AND fc.position = pc.position ";
if (getAllCatalogsStringScratchData() == null) {
sql += "WHERE f.owner = '" + jdbcSchemaName + "' ";
} else {
sql += "WHERE f.owner IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") ";
}
sql += "AND p.constraint_type in ('P', 'U') " +
"AND f.constraint_type = 'R' " +
"AND p.table_name NOT LIKE 'BIN$%' " +
"ORDER BY fktable_schem, fktable_name, key_seq";
return sql;
}
protected String getMSSQLSql(String jdbcSchemaName, String tableName) {
//comes from select object_definition(object_id('sp_fkeys'))
return "select " +
"convert(sysname,db_name()) AS PKTABLE_CAT, " +
"convert(sysname,schema_name(o1.schema_id)) AS PKTABLE_SCHEM, " +
"convert(sysname,o1.name) AS PKTABLE_NAME, " +
"convert(sysname,c1.name) AS PKCOLUMN_NAME, " +
"convert(sysname,db_name()) AS FKTABLE_CAT, " +
"convert(sysname,schema_name(o2.schema_id)) AS FKTABLE_SCHEM, " +
"convert(sysname,o2.name) AS FKTABLE_NAME, " +
"convert(sysname,c2.name) AS FKCOLUMN_NAME, " +
"isnull(convert(smallint,k.constraint_column_id), convert(smallint,0)) AS KEY_SEQ, " +
"convert(smallint, case ObjectProperty(f.object_id, 'CnstIsUpdateCascade') when 1 then 0 else 1 end) AS UPDATE_RULE, " +
"convert(smallint, case ObjectProperty(f.object_id, 'CnstIsDeleteCascade') when 1 then 0 else 1 end) AS DELETE_RULE, " +
"convert(sysname,object_name(f.object_id)) AS FK_NAME, " +
"convert(sysname,i.name) AS PK_NAME, " +
"convert(smallint, 7) AS DEFERRABILITY " +
"from " +
"sys.objects o1, " +
"sys.objects o2, " +
"sys.columns c1, " +
"sys.columns c2, " +
"sys.foreign_keys f inner join " +
"sys.foreign_key_columns k on (k.constraint_object_id = f.object_id) inner join " +
"sys.indexes i on (f.referenced_object_id = i.object_id and f.key_index_id = i.index_id) " +
"where " +
"o1.object_id = f.referenced_object_id and " +
"o2.object_id = f.parent_object_id and " +
"c1.object_id = f.referenced_object_id and " +
"c2.object_id = f.parent_object_id and " +
"c1.column_id = k.referenced_column_id and " +
"c2.column_id = k.parent_column_id and " +
"((object_schema_name(o1.object_id)='" + jdbcSchemaName + "'" +
" and convert(sysname,schema_name(o2.schema_id))='" + jdbcSchemaName + "' and " +
"convert(sysname,o2.name)='" + tableName + "' ) or ( convert(sysname,schema_name" +
"(o2.schema_id))='" + jdbcSchemaName + "' and convert(sysname,o2.name)='" + tableName +
"' )) order by 5, 6, 7, 9, 8";
}
private List<CachedRow> querytDB2Luw(String jdbcSchemaName, String tableName) throws DatabaseException, SQLException {
List<String> parameters = new ArrayList<>(2);
StringBuilder sql = new StringBuilder ("SELECT " +
" pk_col.tabschema AS pktable_cat, " +
" pk_col.tabname as pktable_name, " +
" pk_col.colname as pkcolumn_name, " +
" fk_col.tabschema as fktable_cat, " +
" fk_col.tabname as fktable_name, " +
" fk_col.colname as fkcolumn_name, " +
" fk_col.colseq as key_seq, " +
" decode (ref.updaterule, 'A', 3, 'R', 1, 1) as update_rule, " +
" decode (ref.deleterule, 'A', 3, 'C', 0, 'N', 2, 'R', 1, 1) as delete_rule, " +
" ref.constname as fk_name, " +
" ref.refkeyname as pk_name, " +
" 7 as deferrability " +
"FROM " +
"syscat.references ref " +
"join syscat.keycoluse fk_col on ref.constname=fk_col.constname and ref.tabschema=fk_col.tabschema and ref.tabname=fk_col.tabname " +
"join syscat.keycoluse pk_col on ref.refkeyname=pk_col.constname and ref.reftabschema=pk_col.tabschema and ref.reftabname=pk_col.tabname and pk_col.colseq=fk_col.colseq " +
"WHERE ref.tabschema = ? ");
parameters.add(jdbcSchemaName);
if (tableName != null) {
sql.append("and fk_col.tabname = ? ");
parameters.add(tableName);
}
sql.append("ORDER BY fk_col.colseq");
return executeAndExtract(database, sql.toString(), parameters.toArray());
}
private String getDB2ForAs400Sql(String jdbcSchemaName, String tableName) {
return "SELECT " +
"pktable_cat, " +
"pktable_name, " +
"pkcolumn_name, " +
"fktable_cat, " +
"fktable_name, " +
"fkcolumn_name, " +
"key_seq, " +
"update_rule, " +
"delete_rule, " +
"fk_name, " +
"pk_name, " +
"deferrability " +
"FROM " +
"sysibm.SQLFORKEYS " +
"WHERE " +
"FKTABLE_SCHEM = '" + jdbcSchemaName + "' " +
"AND FKTABLE_NAME = '" + tableName + "'";
}
protected List<CachedRow> queryDb2Zos(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
List<String> parameters = new ArrayList<>(2);
StringBuilder sql = new StringBuilder("SELECT " +
" ref.REFTBCREATOR AS pktable_cat, " +
" ref.REFTBNAME as pktable_name, " +
" pk_col.colname as pkcolumn_name, " +
" ref.CREATOR as fktable_cat, " +
" ref.TBNAME as fktable_name, " +
" fk_col.colname as fkcolumn_name, " +
" fk_col.colseq as key_seq, " +
" decode (ref.deleterule, 'A', 3, 'C', 0, 'N', 2, 'R', 1, 1) as delete_rule, " +
" ref.relname as fk_name, " +
" pk_col.colname as pk_name, " +
" 7 as deferrability " +
"FROM " +
"SYSIBM.SYSRELS ref " +
"join SYSIBM.SYSFOREIGNKEYS fk_col " +
"on ref.relname = fk_col.RELNAME " +
"and ref.CREATOR = fk_col.CREATOR " +
"and ref.TBNAME = fk_col.TBNAME " +
"join SYSIBM.SYSKEYCOLUSE pk_col " +
"on ref.REFTBCREATOR = pk_col.TBCREATOR " +
"and ref.REFTBNAME = pk_col.TBNAME " +
"and pk_col.colseq=fk_col.colseq " +
"WHERE ref.CREATOR = ? ");
parameters.add(((AbstractJdbcDatabase) CachingDatabaseMetaData.this.database).getJdbcSchemaName(catalogAndSchema));
if (tableName != null) {
sql.append("AND ref.TBNAME = ? ");
parameters.add(tableName);
}
sql.append("ORDER BY fk_col.colseq");
return executeAndExtract(CachingDatabaseMetaData.this.database, sql.toString(), parameters.toArray());
}
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
if (database instanceof AbstractDb2Database || database instanceof MSSQLDatabase) {
return super.shouldBulkSelect(schemaKey, resultSetCache); //can bulk and fast fetch
} else {
return database instanceof OracleDatabase; //oracle is slow, always bulk select while you are at it. Other databases need to go through all tables.
}
}
}
private class GetNotNullConstraintsResultSetCache extends ResultSetCache.SingleResultSetExtractor {
final String catalogName;
final String schemaName;
final String tableName;
private GetNotNullConstraintsResultSetCache(Database database, String catalogName, String schemaName, String tableName) {
super(database);
this.catalogName = catalogName;
this.schemaName = schemaName;
this.tableName = tableName;
}
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEMA"),
database, row.getString("TABLE_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, tableName);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return database instanceof OracleDatabase;
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("TABLE_SCHEMA");
}
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
LiquibaseTableNamesFactory liquibaseTableNamesFactory = Scope.getCurrentScope().getSingleton(LiquibaseTableNamesFactory.class);
List<String> liquibaseTableNames = liquibaseTableNamesFactory.getLiquibaseTableNames(database);
return liquibaseTableNames.stream().noneMatch(tableName::equalsIgnoreCase);
}
@Override
public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
if (database instanceof DmDatabase) {
return dmQuery(false);
}
if (database instanceof OracleDatabase) {
return oracleQuery(false);
}
return Collections.emptyList();
}
@Override
public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
if (database instanceof DmDatabase) {
return dmQuery(true);
}
if (database instanceof OracleDatabase) {
return oracleQuery(true);
}
return Collections.emptyList();
}
private List<CachedRow> oracleQuery(boolean bulk) throws DatabaseException, SQLException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
String jdbcTableName = database.escapeStringForDatabase(tableName);
String sqlToSelectNotNullConstraints = "SELECT NULL AS TABLE_CAT, atc.OWNER AS TABLE_SCHEMA, atc.OWNER, atc.TABLE_NAME, " +
"atc.COLUMN_NAME, NULLABLE, ac.VALIDATED as VALIDATED, ac.SEARCH_CONDITION, ac.CONSTRAINT_NAME " +
"FROM ALL_TAB_COLS atc " +
"JOIN all_cons_columns acc ON atc.OWNER = acc.OWNER AND atc.TABLE_NAME = acc.TABLE_NAME AND atc.COLUMN_NAME = acc.COLUMN_NAME " +
"JOIN all_constraints ac ON atc.OWNER = ac.OWNER AND atc.TABLE_NAME = ac.TABLE_NAME AND acc.CONSTRAINT_NAME = ac.CONSTRAINT_NAME ";
if (!bulk || getAllCatalogsStringScratchData() == null) {
sqlToSelectNotNullConstraints += " WHERE atc.OWNER='" + jdbcSchemaName + "' AND atc.hidden_column='NO' AND ac.CONSTRAINT_TYPE='C' and ac.search_condition is not null ";
} else {
sqlToSelectNotNullConstraints += " WHERE atc.OWNER IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") "
+ " AND atc.hidden_column='NO' AND ac.CONSTRAINT_TYPE='C' and ac.search_condition is not null ";
}
sqlToSelectNotNullConstraints += (!bulk && tableName != null && !tableName.isEmpty()) ? " AND atc.TABLE_NAME='" + jdbcTableName + "'" : "";
return this.executeAndExtract(sqlToSelectNotNullConstraints, database);
}
private List<CachedRow> dmQuery(boolean bulk) throws DatabaseException, SQLException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
String jdbcSchemaName = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
String jdbcTableName = database.escapeStringForDatabase(tableName);
String sqlToSelectNotNullConstraints = "SELECT NULL AS TABLE_CAT, atc.OWNER AS TABLE_SCHEMA, atc.OWNER, atc.TABLE_NAME, " +
"atc.COLUMN_NAME, NULLABLE, ac.VALIDATED as VALIDATED, ac.SEARCH_CONDITION, ac.CONSTRAINT_NAME " +
"FROM ALL_TAB_COLS atc " +
"JOIN all_cons_columns acc ON atc.OWNER = acc.OWNER AND atc.TABLE_NAME = acc.TABLE_NAME AND atc.COLUMN_NAME = acc.COLUMN_NAME " +
"JOIN all_constraints ac ON atc.OWNER = ac.OWNER AND atc.TABLE_NAME = ac.TABLE_NAME AND acc.CONSTRAINT_NAME = ac.CONSTRAINT_NAME ";
if (!bulk || getAllCatalogsStringScratchData() == null) {
sqlToSelectNotNullConstraints += " WHERE atc.OWNER='" + jdbcSchemaName + "' AND atc.hidden_column='NO' AND ac.CONSTRAINT_TYPE='C' and ac.search_condition is not null ";
} else {
sqlToSelectNotNullConstraints += " WHERE atc.OWNER IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ") "
+ " AND atc.hidden_column='NO' AND ac.CONSTRAINT_TYPE='C' and ac.search_condition is not null ";
}
sqlToSelectNotNullConstraints += (!bulk && tableName != null && !tableName.isEmpty()) ? " AND atc.TABLE_NAME='" + jdbcTableName + "'" : "";
return this.executeAndExtract(sqlToSelectNotNullConstraints, database);
}
@Override
protected List<CachedRow> extract(ResultSet resultSet, boolean informixIndexTrimHint) throws SQLException {
List<CachedRow> cachedRowList = new ArrayList<>();
if (!(database instanceof OracleDatabase)) {
return cachedRowList;
}
resultSet.setFetchSize(database.getFetchSize());
try {
List<Map> result = (List<Map>) new RowMapperNotNullConstraintsResultSetExtractor(new ColumnMapRowMapper(database.isCaseSensitive()) {
@Override
protected Object getColumnValue(ResultSet rs, int index) throws SQLException {
Object value = super.getColumnValue(rs, index);
if (!(value instanceof String)) {
return value;
}
return value.toString().trim();
}
}).extractData(resultSet);
for (Map row : result) {
cachedRowList.add(new CachedRow(row));
}
} finally {
JdbcUtil.closeResultSet(resultSet);
}
return cachedRowList;
}
}
public List<CachedRow> getTables(final String catalogName, final String schemaName, final String table) throws DatabaseException {
return getResultSetCache("getTables").get(new ResultSetCache.SingleResultSetExtractor(database) {
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
return table == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
}
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, table);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return database instanceof OracleDatabase;
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("TABLE_SCHEM");
}
@Override
public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
if (database instanceof DmDatabase) {
return queryDm(catalogAndSchema, table);
} else if (database instanceof OracleDatabase) {
return queryOracle(catalogAndSchema, table);
} else if (database instanceof MSSQLDatabase) {
return queryMssql(catalogAndSchema, table);
} else if (database instanceof Db2zDatabase) {
return queryDb2Zos(catalogAndSchema, table);
} else if (database instanceof PostgresDatabase) {
return queryPostgres(catalogAndSchema, table);
}
String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), ((table == null) ?
SQL_FILTER_MATCH_ALL : escapeForLike(table, database)), new String[]{"TABLE"}));
}
@Override
public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
if (database instanceof DmDatabase) {
return queryDm(catalogAndSchema, null);
} else if (database instanceof OracleDatabase) {
return queryOracle(catalogAndSchema, null);
} else if (database instanceof MSSQLDatabase) {
return queryMssql(catalogAndSchema, null);
} else if (database instanceof Db2zDatabase) {
return queryDb2Zos(catalogAndSchema, null);
} else if (database instanceof PostgresDatabase) {
return queryPostgres(catalogAndSchema, table);
}
String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), SQL_FILTER_MATCH_ALL, new String[]{"TABLE"}));
}
private List<CachedRow> queryMssql(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class);
String databaseName = StringUtil.trimToNull(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class));
String dbIdParam;
String databasePrefix;
if (databaseName == null) {
databasePrefix = "";
dbIdParam = "";
} else {
dbIdParam = ", db_id('" + databaseName + "')";
databasePrefix = "[" + databaseName + "].";
}
//From select object_definition(object_id('sp_tables'))
String sql = "select " +
"db_name(" + (databaseName == null ? "" : "db_id('" + databaseName + "')") + ") AS TABLE_CAT, " +
"convert(sysname,object_schema_name(o.object_id" + dbIdParam + ")) AS TABLE_SCHEM, " +
"convert(sysname,o.name) AS TABLE_NAME, " +
"'TABLE' AS TABLE_TYPE, " +
"CAST(ep.value as varchar(max)) as REMARKS " +
"from " + databasePrefix + "sys.all_objects o " +
"left outer join sys.extended_properties ep on ep.name='MS_Description' and major_id=o.object_id and minor_id=0 " +
"where " +
"o.type in ('U') " +
"and has_perms_by_name(" + (databaseName == null ? "" : "quotename('" + databaseName + "') + '.' + ") + "quotename(object_schema_name(o.object_id" + dbIdParam + ")) + '.' + quotename(o.name), 'object', 'select') = 1 " +
"and charindex(substring(o.type,1,1),'U') <> 0 " +
"and object_schema_name(o.object_id" + dbIdParam + ")='" + database.escapeStringForDatabase(ownerName) + "'";
if (tableName != null) {
sql += " AND o.name='" + database.escapeStringForDatabase(tableName) + "' ";
}
sql += "order by 4, 1, 2, 3";
return executeAndExtract(sql, database);
}
private List<CachedRow> queryOracle(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.TABLE_NAME as TABLE_NAME, " +
"a.TEMPORARY as TEMPORARY, a.DURATION as DURATION, 'TABLE' as TABLE_TYPE, " +
"c.COMMENTS as REMARKS, A.tablespace_name as tablespace_name, CASE WHEN A.tablespace_name = " +
"(SELECT DEFAULT_TABLESPACE FROM USER_USERS) THEN 'true' ELSE null END as default_tablespace " +
"from ALL_TABLES a " +
"join ALL_TAB_COMMENTS c on a.TABLE_NAME=c.table_name and a.owner=c.owner " +
"left outer join ALL_QUEUE_TABLES q ON a.TABLE_NAME = q.QUEUE_TABLE and a.OWNER = q.OWNER " +
"WHERE q.QUEUE_TABLE is null ";
String allCatalogsString = getAllCatalogsStringScratchData();
if (tableName != null || allCatalogsString == null) {
sql += "AND a.OWNER='" + ownerName + "'";
} else {
sql += "AND a.OWNER IN ('" + ownerName + "', " + allCatalogsString + ")";
}
if (tableName != null) {
sql += " AND a.TABLE_NAME='" + tableName + "'";
}
return executeAndExtract(sql, database);
}
private List<CachedRow> queryDm(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.TABLE_NAME as TABLE_NAME, " +
"a.TEMPORARY as TEMPORARY, a.DURATION as DURATION, 'TABLE' as TABLE_TYPE, " +
"c.COMMENTS as REMARKS, A.tablespace_name as tablespace_name, CASE WHEN A.tablespace_name = " +
"(SELECT DEFAULT_TABLESPACE FROM USER_USERS) THEN 'true' ELSE null END as default_tablespace " +
"from ALL_TABLES a " +
"join ALL_TAB_COMMENTS c on a.TABLE_NAME=c.table_name and a.owner=c.owner " +
//"left outer join ALL_QUEUE_TABLES q ON a.TABLE_NAME = q.QUEUE_TABLE and a.OWNER = q.OWNER " +
//"WHERE q.QUEUE_TABLE is null ";
"";
String allCatalogsString = getAllCatalogsStringScratchData();
if (tableName != null || allCatalogsString == null) {
sql += "AND a.OWNER='" + ownerName + "'";
} else {
sql += "AND a.OWNER IN ('" + ownerName + "', " + allCatalogsString + ")";
}
if (tableName != null) {
sql += " AND a.TABLE_NAME='" + tableName + "'";
}
return executeAndExtract(sql, database);
}
private List<CachedRow> queryDb2Zos(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
String sql = "SELECT CREATOR AS TABLE_SCHEM, " +
"NAME AS TABLE_NAME, " +
"'TABLE' AS TABLE_TYPE, " +
"REMARKS " +
"FROM SYSIBM.SYSTABLES " +
"WHERE TYPE = 'T'";
List<String> parameters = new ArrayList<>(2);
if (ownerName != null) {
sql += " AND CREATOR = ?";
parameters.add(ownerName);
}
if (tableName != null) {
sql += " AND NAME = ?";
parameters.add(tableName);
}
return executeAndExtract(database, sql, parameters.toArray());
}
private List<CachedRow> queryPostgres(CatalogAndSchema catalogAndSchema, String tableName) throws SQLException {
String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), ((tableName == null) ?
SQL_FILTER_MATCH_ALL : escapeForLike(tableName, database)), new String[]{"TABLE", "PARTITIONED TABLE"}));
}
});
}
public List<CachedRow> getViews(final String catalogName, final String schemaName, String viewName) throws DatabaseException {
final String view;
if (database instanceof DB2Database) {
view = database.correctObjectName(viewName, View.class);
} else {
view = viewName;
}
return getResultSetCache("getViews").get(new ResultSetCache.SingleResultSetExtractor(database) {
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
return view == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
}
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, view);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return database instanceof OracleDatabase;
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("TABLE_SCHEM");
}
@Override
public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
if (database instanceof DmDatabase) {
return queryDm(catalogAndSchema, view);
} else if (database instanceof OracleDatabase) {
return queryOracle(catalogAndSchema, view);
} else if (database instanceof MSSQLDatabase) {
return queryMssql(catalogAndSchema, view);
}
String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), ((view == null) ? SQL_FILTER_MATCH_ALL
: escapeForLike(view, database)), new String[]{"VIEW"}));
}
@Override
public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
if (database instanceof DmDatabase) {
return queryDm(catalogAndSchema, null);
} else if (database instanceof OracleDatabase) {
return queryOracle(catalogAndSchema, null);
} else if (database instanceof MSSQLDatabase) {
return queryMssql(catalogAndSchema, null);
}
String catalog = ((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema);
String schema = ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema);
return extract(databaseMetaData.getTables(catalog, escapeForLike(schema, database), SQL_FILTER_MATCH_ALL, new String[]{"VIEW"}));
}
private List<CachedRow> queryMssql(CatalogAndSchema catalogAndSchema, String viewName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getSchemaName(), Schema.class);
String databaseName = StringUtil.trimToNull(database.correctObjectName(catalogAndSchema.getCatalogName(), Catalog.class));
String dbIdParam = "";
String databasePrefix = "";
boolean haveDatabaseName = databaseName != null;
if (haveDatabaseName) {
dbIdParam = ", db_id('" + databaseName + "')";
databasePrefix = "[" + databaseName + "].";
}
String tableCatParam = haveDatabaseName ? "db_id('" + databaseName + "')" : "";
String permsParam = haveDatabaseName ? "quotename('" + databaseName + "') + '.' + " : "";
String sql = "select " +
"db_name(" + tableCatParam + ") AS TABLE_CAT, " +
"convert(sysname,object_schema_name(o.object_id" + dbIdParam + ")) AS TABLE_SCHEM, " +
"convert(sysname,o.name) AS TABLE_NAME, " +
"'VIEW' AS TABLE_TYPE, " +
"CAST(ep.value as varchar(max)) as REMARKS " +
"from " + databasePrefix + "sys.all_objects o " +
"left join sys.extended_properties ep on ep.name='MS_Description' and major_id=o.object_id and minor_id=0 " +
"where " +
"o.type in ('V') " +
"and has_perms_by_name(" + permsParam + "quotename(object_schema_name(o.object_id" + dbIdParam + ")) + '.' + quotename(o.name), 'object', 'select') = 1 " +
"and charindex(substring(o.type,1,1),'V') <> 0 " +
"and object_schema_name(o.object_id" + dbIdParam + ")='" + database.escapeStringForDatabase(ownerName) + "'";
if (viewName != null) {
sql += " AND o.name='" + database.escapeStringForDatabase(viewName) + "' ";
}
sql += "order by 4, 1, 2, 3";
return executeAndExtract(sql, database);
}
private List<CachedRow> queryOracle(CatalogAndSchema catalogAndSchema, String viewName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.VIEW_NAME as TABLE_NAME, 'TABLE' as TABLE_TYPE, c.COMMENTS as REMARKS, TEXT as OBJECT_BODY";
if (database.getDatabaseMajorVersion() > 10) {
sql += ", EDITIONING_VIEW";
}
sql += " from ALL_VIEWS a " +
"join ALL_TAB_COMMENTS c on a.VIEW_NAME=c.table_name and a.owner=c.owner ";
if (viewName != null || getAllCatalogsStringScratchData() == null) {
sql += "WHERE a.OWNER='" + ownerName + "'";
} else {
sql += "WHERE a.OWNER IN ('" + ownerName + "', " + getAllCatalogsStringScratchData() + ")";
}
if (viewName != null) {
sql += " AND a.VIEW_NAME='" + database.correctObjectName(viewName, View.class) + "'";
}
sql += " AND a.VIEW_NAME not in (select mv.name from all_registered_mviews mv where mv.owner=a.owner)";
return executeAndExtract(sql, database);
}
private List<CachedRow> queryDm(CatalogAndSchema catalogAndSchema, String viewName) throws DatabaseException, SQLException {
String ownerName = database.correctObjectName(catalogAndSchema.getCatalogName(), Schema.class);
String sql = "SELECT null as TABLE_CAT, a.OWNER as TABLE_SCHEM, a.VIEW_NAME as TABLE_NAME, 'TABLE' as TABLE_TYPE, c.COMMENTS as REMARKS, TEXT as OBJECT_BODY";
if (database.getDatabaseMajorVersion() > 10) {
sql += ", EDITIONING_VIEW";
}
sql += " from ALL_VIEWS a " +
"join ALL_TAB_COMMENTS c on a.VIEW_NAME=c.table_name and a.owner=c.owner ";
if (viewName != null || getAllCatalogsStringScratchData() == null) {
sql += "WHERE a.OWNER='" + ownerName + "'";
} else {
sql += "WHERE a.OWNER IN ('" + ownerName + "', " + getAllCatalogsStringScratchData() + ")";
}
if (viewName != null) {
sql += " AND a.VIEW_NAME='" + database.correctObjectName(viewName, View.class) + "'";
}
//sql += " AND a.VIEW_NAME not in (select mv.name from all_registered_mviews mv where mv.owner=a.owner)";
return executeAndExtract(sql, database);
}
});
}
public List<CachedRow> getPrimaryKeys(final String catalogName, final String schemaName, final String table) throws DatabaseException {
return getResultSetCache("getPrimaryKeys").get(new ResultSetCache.SingleResultSetExtractor(database) {
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(row.getString("TABLE_CAT"), row.getString("TABLE_SCHEM"), database, row.getString("TABLE_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, table);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return database instanceof OracleDatabase;
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("TABLE_SCHEM");
}
@Override
public List<CachedRow> fastFetchQuery() throws SQLException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
try {
List<CachedRow> foundPks = new ArrayList<>();
if (table == null) {
List<CachedRow> tables = CachingDatabaseMetaData.this.getTables(catalogName, schemaName, null);
for (CachedRow table : tables) {
List<CachedRow> pkInfo = getPkInfo(catalogAndSchema, table.getString("TABLE_NAME"));
if (pkInfo != null) {
foundPks.addAll(pkInfo);
}
}
return foundPks;
} else {
List<CachedRow> pkInfo = getPkInfo(catalogAndSchema, table);
if (pkInfo != null) {
foundPks.addAll(pkInfo);
}
}
return foundPks;
} catch (DatabaseException e) {
throw new SQLException(e);
}
}
private List<CachedRow> getPkInfo(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException, SQLException {
List<CachedRow> pkInfo;
if (database instanceof MSSQLDatabase) {
String sql = mssqlSql(catalogAndSchema, tableName);
pkInfo = executeAndExtract(sql, database);
} else {
if (database instanceof Db2zDatabase) {
String sql = "SELECT 'NULL' AS TABLE_CAT," +
" SYSTAB.TBCREATOR AS TABLE_SCHEM, " +
"SYSTAB.TBNAME AS TABLE_NAME, " +
"COLUSE.COLNAME AS COLUMN_NAME, " +
"COLUSE.COLSEQ AS KEY_SEQ, " +
"SYSTAB.CONSTNAME AS PK_NAME " +
"FROM SYSIBM.SYSTABCONST SYSTAB " +
"JOIN SYSIBM.SYSKEYCOLUSE COLUSE " +
"ON SYSTAB.TBCREATOR = COLUSE.TBCREATOR " +
"WHERE SYSTAB.TYPE = 'P' " +
"AND SYSTAB.TBNAME = ? " +
"AND SYSTAB.TBCREATOR = ? " +
"AND SYSTAB.TBNAME=COLUSE.TBNAME " +
"AND SYSTAB.CONSTNAME=COLUSE.CONSTNAME " +
"ORDER BY COLUSE.COLNAME";
try {
return executeAndExtract(database, sql, table, ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema));
} catch (DatabaseException e) {
throw new SQLException(e);
}
} else if (database instanceof DmDatabase) {
String sql = "SELECT NULL AS table_cat, c.owner AS table_schem, c.table_name, c.column_name as COLUMN_NAME, c.position AS key_seq, c.constraint_name AS pk_name, k.VALIDATED as VALIDATED " +
"FROM all_cons_columns c, all_constraints k " +
//"LEFT JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=k.table_name " +
"WHERE k.constraint_type = 'P' " +
//"AND d.object_name IS NULL " +
"AND k.table_name = '" + table + "' " +
"AND k.owner = '" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "' " +
"AND k.constraint_name = c.constraint_name " +
"AND k.table_name = c.table_name " +
"AND k.owner = c.owner " +
"ORDER BY column_name";
try {
return executeAndExtract(sql, database);
} catch (DatabaseException e) {
throw new SQLException(e);
}
} else if (database instanceof OracleDatabase) {
warnAboutDbaRecycleBin();
String sql = "SELECT NULL AS table_cat, c.owner AS table_schem, c.table_name, c.column_name as COLUMN_NAME, c.position AS key_seq, c.constraint_name AS pk_name, k.VALIDATED as VALIDATED " +
"FROM all_cons_columns c, all_constraints k " +
"LEFT JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=k.table_name " +
"WHERE k.constraint_type = 'P' " +
"AND d.object_name IS NULL " +
"AND k.table_name = '" + table + "' " +
"AND k.owner = '" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "' " +
"AND k.constraint_name = c.constraint_name " +
"AND k.table_name = c.table_name " +
"AND k.owner = c.owner " +
"ORDER BY column_name";
try {
return executeAndExtract(sql, database);
} catch (DatabaseException e) {
throw new SQLException(e);
}
} else if (database instanceof CockroachDatabase) {
// This is the same as the query generated by PGJDBC's getPrimaryKeys method, except it
// also adds an `asc_or_desc` column to the result.
String sql = "SELECT " +
" result.table_cat, " +
" result.table_schem, " +
" result.table_name, " +
" result.column_name, " +
" result.key_seq, " +
" result.pk_name, " +
" CASE result.indoption[result.key_seq - 1] & 1 " +
" WHEN 1 THEN 'D' " +
" ELSE 'A' " +
" END AS asc_or_desc " +
"FROM " +
" (" +
" SELECT " +
" NULL AS table_cat, " +
" n.nspname AS table_schem, " +
" ct.relname AS table_name, " +
" a.attname AS column_name, " +
" (information_schema._pg_expandarray(i.indkey)).n " +
" AS key_seq, " +
" ci.relname AS pk_name, " +
" information_schema._pg_expandarray(i.indkey) AS keys, " +
" i.indoption, " +
" a.attnum AS a_attnum " +
" FROM " +
" pg_catalog.pg_class AS ct " +
" JOIN pg_catalog.pg_attribute AS a ON (ct.oid = a.attrelid) " +
" JOIN pg_catalog.pg_namespace AS n ON " +
" (ct.relnamespace = n.oid) " +
" JOIN pg_catalog.pg_index AS i ON (a.attrelid = i.indrelid) " +
" JOIN pg_catalog.pg_class AS ci ON (ci.oid = i.indexrelid) " +
" WHERE " +
" true " +
" AND n.nspname = '" + ((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema) + "' " +
" AND ct.relname = '" + table + "' " +
" AND i.indisprimary" +
" ) " +
" AS result " +
"WHERE " +
" result.a_attnum = (result.keys).x " +
"ORDER BY " +
" result.table_name, result.pk_name, result.key_seq";
try {
return executeAndExtract(sql, database);
} catch (DatabaseException e) {
throw new SQLException(e);
}
} else {
return extract(
databaseMetaData.getPrimaryKeys(
((AbstractJdbcDatabase) database).getJdbcCatalogName(catalogAndSchema),
((AbstractJdbcDatabase) database).getJdbcSchemaName(catalogAndSchema),
table
)
);
}
}
return pkInfo;
}
private String mssqlSql(CatalogAndSchema catalogAndSchema, String tableName) throws DatabaseException {
String sql;
sql =
"SELECT " +
"DB_NAME() AS [TABLE_CAT], " +
"[s].[name] AS [TABLE_SCHEM], " +
"[t].[name] AS [TABLE_NAME], " +
"[c].[name] AS [COLUMN_NAME], " +
"CASE [ic].[is_descending_key] WHEN 0 THEN N'A' WHEN 1 THEN N'D' END AS [ASC_OR_DESC], " +
"[ic].[key_ordinal] AS [KEY_SEQ], " +
"[kc].[name] AS [PK_NAME] " +
"FROM [sys].[schemas] AS [s] " +
"INNER JOIN [sys].[tables] AS [t] " +
"ON [t].[schema_id] = [s].[schema_id] " +
"INNER JOIN [sys].[key_constraints] AS [kc] " +
"ON [kc].[parent_object_id] = [t].[object_id] " +
"INNER JOIN [sys].[indexes] AS [i] " +
"ON [i].[object_id] = [kc].[parent_object_id] " +
"AND [i].[index_id] = [kc].[unique_index_id] " +
"INNER JOIN [sys].[index_columns] AS [ic] " +
"ON [ic].[object_id] = [i].[object_id] " +
"AND [ic].[index_id] = [i].[index_id] " +
"INNER JOIN [sys].[columns] AS [c] " +
"ON [c].[object_id] = [ic].[object_id] " +
"AND [c].[column_id] = [ic].[column_id] " +
"WHERE [s].[name] = N'" + database.escapeStringForDatabase(catalogAndSchema.getSchemaName()) + "' " + // The schema name was corrected in the customized CatalogAndSchema
(tableName == null ? "" : "AND [t].[name] = N'" + database.escapeStringForDatabase(database.correctObjectName(tableName, Table.class)) + "' ") +
"AND [kc].[type] = 'PK' " +
"AND [ic].[key_ordinal] > 0 " +
"ORDER BY " +
"[ic].[key_ordinal]";
return sql;
}
@Override
public List<CachedRow> bulkFetchQuery() throws SQLException {
if (database instanceof DmDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
//warnAboutDbaRecycleBin();
try {
String sql = "SELECT NULL AS table_cat, c.owner AS table_schem, c.table_name, c.column_name, c.position AS key_seq,c.constraint_name AS pk_name, k.VALIDATED as VALIDATED FROM " +
"all_cons_columns c, " +
"all_constraints k " +
//"LEFT JOIN " + (((DmDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=k.table_name " +
"WHERE k.constraint_type = 'P' ";
//"AND d.object_name IS NULL ";
if (getAllCatalogsStringScratchData() == null) {
sql += "AND k.owner='" + catalogAndSchema.getCatalogName() + "' ";
} else {
sql += "AND k.owner IN ('" + catalogAndSchema.getCatalogName() + "', " + getAllCatalogsStringScratchData() + ")";
}
sql += "AND k.constraint_name = c.constraint_name " +
"AND k.table_name = c.table_name " +
"AND k.owner = c.owner " +
"ORDER BY column_name";
return executeAndExtract(sql, database);
} catch (DatabaseException e) {
throw new SQLException(e);
}
} else if (database instanceof OracleDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
warnAboutDbaRecycleBin();
try {
String sql = "SELECT NULL AS table_cat, c.owner AS table_schem, c.table_name, c.column_name, c.position AS key_seq,c.constraint_name AS pk_name, k.VALIDATED as VALIDATED FROM " +
"all_cons_columns c, " +
"all_constraints k " +
"LEFT JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=k.table_name " +
"WHERE k.constraint_type = 'P' " +
"AND d.object_name IS NULL ";
if (getAllCatalogsStringScratchData() == null) {
sql += "AND k.owner='" + catalogAndSchema.getCatalogName() + "' ";
} else {
sql += "AND k.owner IN ('" + catalogAndSchema.getCatalogName() + "', " + getAllCatalogsStringScratchData() + ")";
}
sql += "AND k.constraint_name = c.constraint_name " +
"AND k.table_name = c.table_name " +
"AND k.owner = c.owner " +
"ORDER BY column_name";
return executeAndExtract(sql, database);
} catch (DatabaseException e) {
throw new SQLException(e);
}
} else if (database instanceof MSSQLDatabase) {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
try {
return executeAndExtract(mssqlSql(catalogAndSchema, null), database);
} catch (DatabaseException e) {
throw new SQLException(e);
}
}
return null;
}
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
if ((database instanceof OracleDatabase) || (database instanceof MSSQLDatabase)) {
return table == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
} else {
return false;
}
}
});
}
public List<CachedRow> getUniqueConstraints(final String catalogName, final String schemaName, final String tableName) throws DatabaseException {
return getResultSetCache("getUniqueConstraints").get(new ResultSetCache.SingleResultSetExtractor(database) {
@Override
protected boolean shouldBulkSelect(String schemaKey, ResultSetCache resultSetCache) {
return tableName == null || getAllCatalogsStringScratchData() != null || super.shouldBulkSelect(schemaKey, resultSetCache);
}
@Override
public boolean bulkContainsSchema(String schemaKey) {
return database instanceof OracleDatabase;
}
@Override
public String getSchemaKey(CachedRow row) {
return row.getString("CONSTRAINT_SCHEM");
}
@Override
public ResultSetCache.RowData rowKeyParameters(CachedRow row) {
return new ResultSetCache.RowData(catalogName, schemaName, database, row.getString("TABLE_NAME"));
}
@Override
public ResultSetCache.RowData wantedKeyParameters() {
return new ResultSetCache.RowData(catalogName, schemaName, database, tableName);
}
@Override
public List<CachedRow> fastFetchQuery() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
return queryDb(catalogAndSchema, tableName);
}
@Override
public List<CachedRow> bulkFetchQuery() throws SQLException, DatabaseException {
CatalogAndSchema catalogAndSchema = new CatalogAndSchema(catalogName, schemaName).customize(database);
return queryDb(catalogAndSchema, null);
}
private List<CachedRow> queryDb(CatalogAndSchema catalogAndSchema, String tableName) throws SQLException, DatabaseException {
String jdbcCatalogName = catalogAndSchema.getCatalogName();
String jdbcSchemaName = catalogAndSchema.getSchemaName();
Database database = getDatabase();
List<String> parameters = new ArrayList<>(3);
String sql = null;
if (database instanceof Ingres9Database) {
sql = "select CONSTRAINT_NAME, TABLE_NAME from iiconstraints where schema_name ='"
+ schemaName + "' and constraint_type='U'";
if (tableName != null) {
sql += " and table_name='" + tableName + "'";
}
} else if ((database instanceof MySQLDatabase) || (database instanceof HsqlDatabase) || (database
instanceof MariaDBDatabase)) {
sql = "select CONSTRAINT_NAME, TABLE_NAME "
+ "from " + database.getSystemSchema() + ".table_constraints "
+ "where constraint_schema='" + jdbcCatalogName + "' "
+ "and constraint_type='UNIQUE'";
if (tableName != null) {
sql += " and table_name='" + tableName + "'";
}
} else if (database instanceof PostgresDatabase) {
sql = "select CONSTRAINT_NAME, TABLE_NAME "
+ "from " + database.getSystemSchema() + ".table_constraints "
+ "where constraint_catalog='" + jdbcCatalogName + "' "
+ "and constraint_schema='" + jdbcSchemaName + "' "
+ "and constraint_type='UNIQUE'";
if (tableName != null) {
sql += " and table_name='" + tableName + "'";
}
} else if (database.getClass().getName().contains("MaxDB")) { //have to check classname as this is currently an extension
sql = "select distinct tablename AS TABLE_NAME, constraintname AS CONSTRAINT_NAME from CONSTRAINTCOLUMNS WHERE CONSTRAINTTYPE = 'UNIQUE_CONST'";
if (tableName != null) {
sql += " and tablename='" + tableName + "'";
}
} else if (database instanceof MSSQLDatabase) {
sql =
"SELECT " +
"[TC].[CONSTRAINT_NAME], " +
"[TC].[TABLE_NAME], " +
"[TC].[CONSTRAINT_CATALOG] AS INDEX_CATALOG, " +
"[TC].[CONSTRAINT_SCHEMA] AS INDEX_SCHEMA, " +
"[IDX].[TYPE_DESC], " +
"[IDX].[name] AS INDEX_NAME " +
"FROM [INFORMATION_SCHEMA].[TABLE_CONSTRAINTS] AS [TC] " +
"JOIN sys.indexes AS IDX ON IDX.name=[TC].[CONSTRAINT_NAME] AND object_schema_name(object_id)=[TC].[CONSTRAINT_SCHEMA] " +
"WHERE [TC].[CONSTRAINT_TYPE] = 'UNIQUE' " +
"AND [TC].[CONSTRAINT_CATALOG] = N'" + database.escapeStringForDatabase(jdbcCatalogName) + "' " +
"AND [TC].[CONSTRAINT_SCHEMA] = N'" + database.escapeStringForDatabase(jdbcSchemaName) + "'";
if (tableName != null) {
sql += " AND [TC].[TABLE_NAME] = N'" + database.escapeStringForDatabase(database.correctObjectName(tableName, Table.class)) + "'";
}
} else if (database instanceof DmDatabase) {
// warnAboutDbaRecycleBin();
sql = "select uc.owner AS CONSTRAINT_SCHEM, uc.constraint_name, uc.table_name,uc.status,uc.deferrable,uc.deferred,ui.tablespace_name, ui.index_name, ui.owner as INDEX_CATALOG, uc.VALIDATED as VALIDATED, ac.COLUMN_NAME as COLUMN_NAME " +
"from all_constraints uc " +
"join all_indexes ui on uc.index_name = ui.index_name and uc.owner=ui.table_owner and uc.table_name=ui.table_name " +
//"LEFT OUTER JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=ui.table_name " +
"LEFT JOIN all_cons_columns ac ON ac.OWNER = uc.OWNER AND ac.TABLE_NAME = uc.TABLE_NAME AND ac.CONSTRAINT_NAME = uc.CONSTRAINT_NAME " +
"where uc.constraint_type='U' ";
if (tableName != null || getAllCatalogsStringScratchData() == null) {
sql += "and uc.owner = '" + jdbcSchemaName + "'";
} else {
sql += "and uc.owner IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ")";
}
//sql += "AND d.object_name IS NULL ";
if (tableName != null) {
sql += " and uc.table_name = '" + tableName + "'";
}
} else if (database instanceof OracleDatabase) {
warnAboutDbaRecycleBin();
sql = "select uc.owner AS CONSTRAINT_SCHEM, uc.constraint_name, uc.table_name,uc.status,uc.deferrable,uc.deferred,ui.tablespace_name, ui.index_name, ui.owner as INDEX_CATALOG, uc.VALIDATED as VALIDATED, ac.COLUMN_NAME as COLUMN_NAME " +
"from all_constraints uc " +
"join all_indexes ui on uc.index_name = ui.index_name and uc.owner=ui.table_owner and uc.table_name=ui.table_name " +
"LEFT OUTER JOIN " + (((OracleDatabase) database).canAccessDbaRecycleBin() ? "dba_recyclebin" : "user_recyclebin") + " d ON d.object_name=ui.table_name " +
"LEFT JOIN all_cons_columns ac ON ac.OWNER = uc.OWNER AND ac.TABLE_NAME = uc.TABLE_NAME AND ac.CONSTRAINT_NAME = uc.CONSTRAINT_NAME " +
"where uc.constraint_type='U' ";
if (tableName != null || getAllCatalogsStringScratchData() == null) {
sql += "and uc.owner = '" + jdbcSchemaName + "'";
} else {
sql += "and uc.owner IN ('" + jdbcSchemaName + "', " + getAllCatalogsStringScratchData() + ")";
}
sql += "AND d.object_name IS NULL ";
if (tableName != null) {
sql += " and uc.table_name = '" + tableName + "'";
}
} else if (database instanceof DB2Database) {
// if we are on DB2 AS400 iSeries
if (database.getDatabaseProductName().startsWith("DB2 UDB for AS/400")) {
sql = "select constraint_name as constraint_name, table_name as table_name from QSYS2.TABLE_CONSTRAINTS where table_schema='" + jdbcSchemaName + "' and constraint_type='UNIQUE'";
if (tableName != null) {
sql += " and table_name = '" + tableName + "'";
}
// DB2 z/OS
}
// here we are on DB2 UDB
else {
sql = "select distinct k.constname as constraint_name, t.tabname as TABLE_NAME "
+ "from syscat.keycoluse k "
+ "inner join syscat.tabconst t "
+ "on k.constname = t.constname "
+ "where t.tabschema = ? "
+ "and t.type = 'U'";
parameters.add(jdbcSchemaName);
if (tableName != null) {
sql += " and t.tabname = ?";
parameters.add(tableName);
}
}
} else if (database instanceof Db2zDatabase) {
sql = "select k.constname as constraint_name, t.tbname as TABLE_NAME"
+ " from SYSIBM.SYSKEYCOLUSE k"
+ " inner join SYSIBM.SYSTABCONST t"
+ " on k.constname = t.constname"
+ " and k.TBCREATOR = t.TBCREATOR"
+ " and k.TBNAME = t.TBNAME"
+ " where t.TBCREATOR = ?"
+ " and t.TYPE = 'U'";
parameters.add(jdbcSchemaName);
if (tableName != null) {
sql += " and t.TBNAME = ?";
parameters.add(tableName);
}
} else if (database instanceof FirebirdDatabase) {
sql = "SELECT TRIM(RDB$INDICES.RDB$INDEX_NAME) AS CONSTRAINT_NAME, " +
"TRIM(RDB$INDICES.RDB$RELATION_NAME) AS TABLE_NAME " +
"FROM RDB$INDICES "
+ "LEFT JOIN RDB$RELATION_CONSTRAINTS "
+ "ON RDB$RELATION_CONSTRAINTS.RDB$INDEX_NAME = RDB$INDICES.RDB$INDEX_NAME "
+ "WHERE RDB$INDICES.RDB$UNIQUE_FLAG IS NOT NULL "
+ "AND ("
+ "RDB$RELATION_CONSTRAINTS.RDB$CONSTRAINT_TYPE IS NULL "
+ "OR TRIM(RDB$RELATION_CONSTRAINTS.RDB$CONSTRAINT_TYPE)='UNIQUE') "
+ "AND NOT(RDB$INDICES.RDB$INDEX_NAME LIKE 'RDB$%')";
if (tableName != null) {
sql += " AND TRIM(RDB$INDICES.RDB$RELATION_NAME)='" + tableName + "'";
}
} else if (database instanceof DerbyDatabase) {
sql = "select c.constraintname as CONSTRAINT_NAME, tablename AS TABLE_NAME "
+ "from sys.systables t, sys.sysconstraints c, sys.sysschemas s "
+ "where s.schemaname='" + jdbcCatalogName + "' "
+ "and t.tableid = c.tableid "
+ "and t.schemaid=s.schemaid "
+ "and c.type = 'U'";
if (tableName != null) {
sql += " AND t.tablename = '" + tableName + "'";
}
} else if (database instanceof InformixDatabase) {
sql = "select unique sysindexes.idxname as CONSTRAINT_NAME, sysindexes.idxtype, systables.tabname as TABLE_NAME "
+ "from sysindexes, systables "
+ "left outer join sysconstraints on sysconstraints.tabid = systables.tabid and sysconstraints.constrtype = 'P' "
+ "where sysindexes.tabid = systables.tabid and sysindexes.idxtype = 'U' "
+ "and sysconstraints.idxname != sysindexes.idxname "
+ "and sysconstraints.tabid = sysindexes.tabid";
if (tableName != null) {
sql += " and systables.tabname = '" + database.correctObjectName(tableName, Table.class) + "'";
}
} else if (database instanceof SybaseDatabase) {
sql = "select idx.name as CONSTRAINT_NAME, tbl.name as TABLE_NAME "
+ "from sysindexes idx "
+ "inner join sysobjects tbl on tbl.id = idx.id "
+ "where idx.indid between 1 and 254 "
+ "and (idx.status & 2) = 2 "
+ "and tbl.type = 'U'";
if (tableName != null) {
sql += " and tbl.name = '" + database.correctObjectName(tableName, Table.class) + "'";
}
} else if (database instanceof SybaseASADatabase) {
sql = "select sysconstraint.constraint_name, sysconstraint.constraint_type, systable.table_name " +
"from sysconstraint, systable " +
"where sysconstraint.table_object_id = systable.object_id " +
"and sysconstraint.constraint_type = 'U'";
if (tableName != null) {
sql += " and systable.table_name = '" + tableName + "'";
}
} else {
if (database instanceof H2Database) {
try {
if (database.getDatabaseMajorVersion() >= 2) {
sql = "select CONSTRAINT_NAME, CONSTRAINT_TYPE, TABLE_NAME "
+ "from " + database.getSystemSchema() + ".table_constraints "
+ "where constraint_schema='" + jdbcSchemaName + "' "
+ "and constraint_catalog='" + jdbcCatalogName + "' "
+ "and constraint_type='UNIQUE'";
if (tableName != null) {
sql += " and table_name='" + tableName + "'";
}
}
} catch (DatabaseException e) {
Scope.getCurrentScope().getLog(getClass()).fine("Cannot determine h2 version, using default unique constraint query");
}
}
if (sql == null) {
sql = "select CONSTRAINT_NAME, CONSTRAINT_TYPE, TABLE_NAME "
+ "from " + database.getSystemSchema() + ".constraints "
+ "where constraint_schema='" + jdbcSchemaName + "' "
+ "and constraint_catalog='" + jdbcCatalogName + "' "
+ "and constraint_type='UNIQUE'";
if (tableName != null) {
sql += " and table_name='" + tableName + "'";
}
}
}
return executeAndExtract(database, database instanceof InformixDatabase, sql, parameters.toArray());
}
});
}
}
private String getAllCatalogsStringScratchData() {
return (String) getScratchData(ALL_CATALOGS_STRING_SCRATCH_KEY);
}
private String escapeForLike(String string, Database database) {
if (string == null) {
return null;
}
if (database instanceof SQLiteDatabase) {
//sqlite jdbc's queries does not support escaped patterns.
return string;
}
return string
.replace("%", "\\%")
.replace("_", "\\_");
}
}
liquibase.snapshot.jvm.ColumnSnapshotGeneratorInformix:
package liquibase.snapshot.jvm;
import liquibase.Scope;
import liquibase.database.Database;
import liquibase.database.core.InformixDatabase;
import liquibase.exception.DatabaseException;
import liquibase.executor.ExecutorService;
import liquibase.snapshot.CachedRow;
import liquibase.snapshot.SnapshotGenerator;
import liquibase.statement.core.RawSqlStatement;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.Column;
import liquibase.structure.core.DataType;
import java.util.HashMap;
import java.util.Map;
/**
* Informix-specific parts of ColumnSnapshotGenerator to account for various quirks in IBM Informix.
*/
public class ColumnSnapshotGeneratorInformix extends ColumnSnapshotGenerator {
private static final Map<Integer, String> qualifiers = new HashMap<>();
static {
qualifiers.put(0, "YEAR");
qualifiers.put(2, "MONTH");
qualifiers.put(4, "DAY");
qualifiers.put(6, "HOUR");
qualifiers.put(8, "MINUTE");
qualifiers.put(10, "SECOND");
qualifiers.put(11, "FRACTION(1)");
qualifiers.put(12, "FRACTION(2)");
qualifiers.put(13, "FRACTION(3)");
qualifiers.put(14, "FRACTION(4)");
qualifiers.put(15, "FRACTION(5)");
}
@Override
public int getPriority(Class<? extends DatabaseObject> objectType, Database database) {
if (database instanceof InformixDatabase)
return PRIORITY_DATABASE;
else
return PRIORITY_NONE; // Other DB? Let the generic handler do it.
}
@Override
public Class<? extends SnapshotGenerator>[] replaces() {
return new Class[]{ColumnSnapshotGenerator.class};
}
@Override
protected DataType readDataType(CachedRow columnMetadataResultSet, Column column, Database database) throws DatabaseException {
// For an explanation of the information encoded in the column length, please see
// https://www.ibm.com/support/knowledgecenter/SSGU8G_11.50.0/com.ibm.sqlr.doc/ids_sqr_027.htm
String typeName = columnMetadataResultSet.getString("TYPE_NAME").toUpperCase();
if ("DATETIME".equals(typeName) || "INTERVAL".equals(typeName)) {
int colLength = columnMetadataResultSet.getInt("COLUMN_SIZE");
int firstQualifierType = (colLength % 256) / 16;
int lastQualifierType = (colLength % 256) % 16;
String type = "DATETIME".equals(typeName) ? "DATETIME" : "INTERVAL";
String firstQualifier = qualifiers.get(firstQualifierType);
String lastQualifier = qualifiers.get(lastQualifierType);
if (firstQualifier == null) {
throw new DatabaseException(
String.format(
"Encountered unknown firstQualifier code (%d) for column '%s', basic date type '%s', " +
"while trying to decipher information encoded in the column length (%d)",
firstQualifierType, column.toString(), typeName, colLength)
);
}
if (lastQualifier == null) {
throw new DatabaseException(
String.format(
"Encountered unknown lastQualifier code (%d) for column '%s', basic date type '%s', " +
"while trying to decipher information encoded in the column length (%d)",
firstQualifierType, column.toString(), typeName, colLength)
);
}
DataType dataTypeMetaData = new DataType(type + " " + firstQualifier + " TO " + lastQualifier);
dataTypeMetaData.setColumnSizeUnit(DataType.ColumnSizeUnit.BYTE);
return dataTypeMetaData;
} else {
return super.readDataType(columnMetadataResultSet, column, database);
}
}
/**
* https://github.com/liquibase/liquibase/issues/1462
*
* liquibase.exception.UnexpectedLiquibaseException: liquibase.exception.DatabaseException:
* Encountered unknown firstQualifier code (1) for column 'master.databasechangelog.dateexecuted',
* basic date type 'DATETIME', while trying to decipher information encoded in the column length (25)
* @param database database
* @param defaultValue defaultValue
* @return collength
*/
private int collengthQuery(Database database, int defaultValue) {
try {
String databasechangelog = database.getDatabaseChangeLogTableName();
String query = "SELECT collength FROM syscolumns WHERE tabid = (SELECT tabid FROM systables WHERE lower(tabname) = lower('" + databasechangelog + "')) AND lower(colname) = lower('dateexecuted')";
return Scope.getCurrentScope().getSingleton(ExecutorService.class).getExecutor("jdbc", database).queryForObject(new RawSqlStatement(query), Integer.class);
} catch (Exception e) {
Scope.getCurrentScope().getLog(getClass()).info("Error query collength!", e);
}
return defaultValue;
}
}
resources.META-INF.services.liquibase.database.Database,在 resources.META-INF.services 资源目录下新增 liquibase.database.Database 文件,文件内容为:
liquibase.database.core.CockroachDatabase
liquibase.database.core.DB2Database
liquibase.database.core.Db2zDatabase
liquibase.database.core.DerbyDatabase
liquibase.database.core.EnterpriseDBDatabase
liquibase.database.core.FirebirdDatabase
liquibase.database.core.H2Database
liquibase.database.core.HsqlDatabase
liquibase.database.core.InformixDatabase
liquibase.database.core.Ingres9Database
liquibase.database.core.MSSQLDatabase
liquibase.database.core.MariaDBDatabase
liquibase.database.core.MockDatabase
liquibase.database.core.MySQLDatabase
liquibase.database.core.OracleDatabase
liquibase.database.core.PostgresDatabase
liquibase.database.core.SQLiteDatabase
liquibase.database.core.SybaseASADatabase
liquibase.database.core.SybaseDatabase
liquibase.database.core.UnsupportedDatabase
liquibase.database.core.SnowflakeDatabase
# 新增达梦数据库配置
liquibase.database.core.DmDatabase
3.注意事项
引入的 liquibase-core 包必须是 4.27.0 版本,flowable 7.0.1 引入的版本是 4.29.2,不使用 4.27.0 版本会报错:
The called method's class, liquibase.database.core.OracleDatabase, is available from the following locations
引入的 liquibase-core 包必须在引入 flowable 之前。