mirror of https://github.com/alibaba/druid.git
Compare commits
11 Commits
6bb0b57168
...
06924d93f2
Author | SHA1 | Date |
---|---|---|
|
06924d93f2 | |
|
915edd2982 | |
|
e51daaa38b | |
|
88d90f1434 | |
|
dac24c674e | |
|
96a47b7369 | |
|
2e788bde25 | |
|
b5169b4264 | |
|
b5e431e1a3 | |
|
2f11505ce7 | |
|
924cd088b7 |
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid-parent</artifactId>
|
||||
<version>1.2.26-SNAPSHOT</version>
|
||||
<version>1.2.28-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>druid</artifactId>
|
||||
|
|
|
@ -18,7 +18,7 @@ package com.alibaba.druid;
|
|||
public final class VERSION {
|
||||
public static final int MajorVersion = 1;
|
||||
public static final int MinorVersion = 2;
|
||||
public static final int RevisionVersion = 24;
|
||||
public static final int RevisionVersion = 28;
|
||||
|
||||
public static String getVersionNumber() {
|
||||
return VERSION.MajorVersion + "." + VERSION.MinorVersion + "." + VERSION.RevisionVersion;
|
||||
|
|
|
@ -187,20 +187,23 @@ public abstract class DruidAbstractDataSource extends WrapperAdapter implements
|
|||
protected volatile long cachedPreparedStatementCount;
|
||||
protected volatile long cachedPreparedStatementDeleteCount;
|
||||
protected volatile long cachedPreparedStatementMissCount;
|
||||
protected volatile long userPasswordVersion;
|
||||
|
||||
private volatile FilterChainImpl filterChain;
|
||||
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> errorCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "errorCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> dupCloseCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "dupCloseCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> startTransactionCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "startTransactionCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> commitCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "commitCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> rollbackCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "rollbackCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> cachedPreparedStatementHitCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementHitCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> preparedStatementCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "preparedStatementCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> closedPreparedStatementCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "closedPreparedStatementCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> cachedPreparedStatementCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> cachedPreparedStatementDeleteCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementDeleteCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> cachedPreparedStatementMissCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementMissCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource>
|
||||
errorCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "errorCount"),
|
||||
dupCloseCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "dupCloseCount"),
|
||||
startTransactionCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "startTransactionCount"),
|
||||
commitCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "commitCount"),
|
||||
rollbackCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "rollbackCount"),
|
||||
cachedPreparedStatementHitCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementHitCount"),
|
||||
preparedStatementCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "preparedStatementCount"),
|
||||
closedPreparedStatementCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "closedPreparedStatementCount"),
|
||||
cachedPreparedStatementCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementCount"),
|
||||
cachedPreparedStatementDeleteCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementDeleteCount"),
|
||||
cachedPreparedStatementMissCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "cachedPreparedStatementMissCount"),
|
||||
userPasswordVersionUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "userPasswordVersion");
|
||||
protected static final AtomicReferenceFieldUpdater<DruidAbstractDataSource, FilterChainImpl> filterChainUpdater
|
||||
= AtomicReferenceFieldUpdater.newUpdater(DruidAbstractDataSource.class, FilterChainImpl.class, "filterChain");
|
||||
|
||||
|
@ -256,7 +259,6 @@ public abstract class DruidAbstractDataSource extends WrapperAdapter implements
|
|||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> destroyCountUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "destroyCount");
|
||||
static final AtomicLongFieldUpdater<DruidAbstractDataSource> createStartNanosUpdater = AtomicLongFieldUpdater.newUpdater(DruidAbstractDataSource.class, "createStartNanos");
|
||||
|
||||
private Boolean useUnfairLock = true;
|
||||
private boolean useLocalSessionState = true;
|
||||
private boolean keepConnectionUnderlyingTransactionIsolation;
|
||||
|
||||
|
@ -382,8 +384,6 @@ public abstract class DruidAbstractDataSource extends WrapperAdapter implements
|
|||
this.lock = new ReentrantLock(!useUnfairLock);
|
||||
this.notEmpty = this.lock.newCondition();
|
||||
this.empty = this.lock.newCondition();
|
||||
|
||||
this.useUnfairLock = useUnfairLock;
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
|
@ -519,6 +519,14 @@ public abstract class DruidAbstractDataSource extends WrapperAdapter implements
|
|||
cachedPreparedStatementMissCountUpdater.incrementAndGet(this);
|
||||
}
|
||||
|
||||
protected void incrementUserPasswordVersion() {
|
||||
userPasswordVersionUpdater.incrementAndGet(this);
|
||||
}
|
||||
|
||||
protected long getUserPasswordVersion() {
|
||||
return userPasswordVersionUpdater.get(this);
|
||||
}
|
||||
|
||||
public long getCachedPreparedStatementMissCount() {
|
||||
return cachedPreparedStatementMissCount;
|
||||
}
|
||||
|
|
|
@ -85,6 +85,7 @@ public final class DruidConnectionHolder {
|
|||
final ReentrantLock lock = new ReentrantLock();
|
||||
protected String initSchema;
|
||||
protected Socket socket;
|
||||
protected final long userPasswordVersion;
|
||||
|
||||
volatile FilterChainImpl filterChain;
|
||||
|
||||
|
@ -116,6 +117,7 @@ public final class DruidConnectionHolder {
|
|||
this.createNanoSpan = connectNanoSpan;
|
||||
this.variables = variables;
|
||||
this.globalVariables = globalVariables;
|
||||
this.userPasswordVersion = dataSource.getUserPasswordVersion();
|
||||
|
||||
this.connectTimeMillis = System.currentTimeMillis();
|
||||
this.lastActiveTimeMillis = connectTimeMillis;
|
||||
|
@ -468,4 +470,7 @@ public final class DruidConnectionHolder {
|
|||
return buf.toString();
|
||||
}
|
||||
|
||||
public long getUserPasswordVersion() {
|
||||
return userPasswordVersion;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -234,8 +234,8 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
lock.lock();
|
||||
try {
|
||||
urlUserPasswordChanged = (url != null && !this.jdbcUrl.equals(url))
|
||||
|| (username != null && !this.username.equals(username))
|
||||
|| (password != null && !this.password.equals(password));
|
||||
|| (username != null && !username.equals(this.username))
|
||||
|| (password != null && !password.equals(this.password));
|
||||
|
||||
String connectUser = username != null ? username : this.username;
|
||||
if (username != null) {
|
||||
|
@ -266,19 +266,22 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
|
||||
lock.lock();
|
||||
try {
|
||||
if (url != null && !this.jdbcUrl.equals(url)) {
|
||||
this.jdbcUrl = url; // direct set url, ignore init check
|
||||
LOG.info("jdbcUrl changed");
|
||||
}
|
||||
if (urlUserPasswordChanged) {
|
||||
if (url != null && !url.equals(this.jdbcUrl)) {
|
||||
this.jdbcUrl = url; // direct set url, ignore init check
|
||||
LOG.info("jdbcUrl changed");
|
||||
}
|
||||
|
||||
if (username != null && !this.username.equals(username)) {
|
||||
this.username = username; // direct set, ignore init check
|
||||
LOG.info("username changed");
|
||||
}
|
||||
if (username != null && !username.equals(this.username)) {
|
||||
this.username = username; // direct set, ignore init check
|
||||
LOG.info("username changed");
|
||||
}
|
||||
|
||||
if (password != null && !this.password.equals(password)) {
|
||||
this.password = password; // direct set, ignore init check
|
||||
LOG.info("password changed");
|
||||
if (password != null && !password.equals(this.password)) {
|
||||
this.password = password; // direct set, ignore init check
|
||||
LOG.info("password changed");
|
||||
}
|
||||
incrementUserPasswordVersion();
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -305,32 +308,46 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
}
|
||||
|
||||
DruidDataSourceUtils.configFromProperties(this, properties);
|
||||
|
||||
if (urlUserPasswordChanged) {
|
||||
for (int i = poolingCount - 1; i >= 0; i--) {
|
||||
DruidConnectionHolder connection = connections[i];
|
||||
JdbcUtils.close(connection.conn);
|
||||
destroyCountUpdater.incrementAndGet(this);
|
||||
connections[i] = null;
|
||||
}
|
||||
poolingCount = 0;
|
||||
emptySignal();
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
|
||||
int minIdle = this.minIdle;
|
||||
for (int i = 0; i < minIdle; ++i) {
|
||||
// check need fill
|
||||
lock.lock();
|
||||
int replaceCount = 0;
|
||||
// replace older version urlUserPassword Connection
|
||||
while ((hasOlderVersionUrlUserPasswordConnection())) {
|
||||
try {
|
||||
if (activeCount + poolingCount >= minIdle) {
|
||||
PhysicalConnectionInfo phyConnInfo = createPhysicalConnection();
|
||||
|
||||
boolean result = false;
|
||||
lock.lock();
|
||||
try {
|
||||
for (int i = poolingCount - 1; i >= 0; i--) {
|
||||
if (connections[i].getUserPasswordVersion() < userPasswordVersion) {
|
||||
connections[i] = new DruidConnectionHolder(DruidDataSource.this, phyConnInfo);
|
||||
result = true;
|
||||
replaceCount++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
JdbcUtils.close(phyConnInfo.getPhysicalConnection());
|
||||
LOG.info("replace older version urlUserPassword failed.");
|
||||
break;
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
} catch (SQLException e) {
|
||||
LOG.error("fill init connection error", e);
|
||||
}
|
||||
}
|
||||
|
||||
if (replaceCount > 0) {
|
||||
LOG.info("replace older version urlUserPassword Connection : " + replaceCount);
|
||||
}
|
||||
|
||||
while ((isLowWaterLevel())) {
|
||||
try {
|
||||
PhysicalConnectionInfo physicalConnection = createPhysicalConnection();
|
||||
|
||||
|
@ -345,6 +362,30 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
}
|
||||
}
|
||||
|
||||
private boolean hasOlderVersionUrlUserPasswordConnection() {
|
||||
lock.lock();
|
||||
try {
|
||||
long userPasswordVersion = this.userPasswordVersion;
|
||||
for (int i = 0; i < poolingCount; i++) {
|
||||
if (connections[i].getUserPasswordVersion() < userPasswordVersion) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isLowWaterLevel() {
|
||||
lock.lock();
|
||||
try {
|
||||
return activeCount + poolingCount < minIdle;
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isKillWhenSocketReadTimeout() {
|
||||
return killWhenSocketReadTimeout;
|
||||
}
|
||||
|
@ -1915,7 +1956,8 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
return;
|
||||
}
|
||||
|
||||
if (phyMaxUseCount > 0 && holder.useCount >= phyMaxUseCount) {
|
||||
if ((phyMaxUseCount > 0 && holder.useCount >= phyMaxUseCount)
|
||||
|| holder.userPasswordVersion < getUserPasswordVersion()) {
|
||||
discardConnection(holder);
|
||||
return;
|
||||
}
|
||||
|
@ -1975,6 +2017,7 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
}
|
||||
}
|
||||
|
||||
boolean full = false;
|
||||
lock.lock();
|
||||
try {
|
||||
if (holder.active) {
|
||||
|
@ -1985,13 +2028,20 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
|
||||
result = putLast(holder, currentTimeMillis);
|
||||
recycleCount++;
|
||||
if (!result) {
|
||||
full = poolingCount + activeCount >= maxActive;
|
||||
}
|
||||
} finally {
|
||||
lock.unlock();
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
JdbcUtils.close(holder.conn);
|
||||
LOG.info("connection recycle failed.");
|
||||
String msg = "connection recycle failed.";
|
||||
if (full) {
|
||||
msg += " pool is full";
|
||||
}
|
||||
LOG.info(msg);
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
holder.clearStatementCache();
|
||||
|
@ -2442,7 +2492,7 @@ public class DruidDataSource extends DruidAbstractDataSource
|
|||
}
|
||||
|
||||
protected boolean put(PhysicalConnectionInfo physicalConnectionInfo) {
|
||||
DruidConnectionHolder holder = null;
|
||||
DruidConnectionHolder holder;
|
||||
try {
|
||||
holder = new DruidConnectionHolder(DruidDataSource.this, physicalConnectionInfo);
|
||||
} catch (SQLException ex) {
|
||||
|
|
|
@ -33,12 +33,14 @@ public class SQLAlterTableAlterColumn extends SQLObjectImpl implements SQLAlterT
|
|||
private SQLName after;
|
||||
private SQLDataType dataType;
|
||||
private boolean toFirst;
|
||||
private SQLExpr using;
|
||||
|
||||
@Override
|
||||
protected void accept0(SQLASTVisitor visitor) {
|
||||
if (visitor.visit(this)) {
|
||||
acceptChild(visitor, column);
|
||||
acceptChild(visitor, setDefault);
|
||||
acceptChild(visitor, using);
|
||||
}
|
||||
visitor.endVisit(this);
|
||||
}
|
||||
|
@ -124,4 +126,16 @@ public class SQLAlterTableAlterColumn extends SQLObjectImpl implements SQLAlterT
|
|||
}
|
||||
this.dataType = x;
|
||||
}
|
||||
|
||||
public SQLExpr getUsing() {
|
||||
return using;
|
||||
}
|
||||
|
||||
public void setUsing(SQLExpr using) {
|
||||
if (using != null) {
|
||||
using.setParent(this);
|
||||
}
|
||||
this.using = using;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -787,6 +787,18 @@ public class PGSQLStatementParser extends SQLStatementParser {
|
|||
lexer.nextToken();
|
||||
accept(Token.NULL);
|
||||
alterColumn.setSetNotNull(true);
|
||||
} else if (lexer.identifierEquals("DATA")) {
|
||||
// alter column ... set data type ...
|
||||
lexer.nextToken();
|
||||
accept(Token.TYPE);
|
||||
SQLDataType dataType = this.exprParser.parseDataType();
|
||||
alterColumn.setDataType(dataType);
|
||||
// using ...
|
||||
if (lexer.token() == USING) {
|
||||
lexer.nextToken();
|
||||
SQLExpr usingExpr = this.exprParser.expr();
|
||||
alterColumn.setUsing(usingExpr);
|
||||
}
|
||||
} else {
|
||||
accept(Token.DEFAULT);
|
||||
SQLExpr defaultValue = this.exprParser.expr();
|
||||
|
|
|
@ -2995,6 +2995,15 @@ public class PGOutputVisitor extends SQLASTOutputVisitor implements PGASTVisitor
|
|||
return false;
|
||||
}
|
||||
|
||||
public boolean visit(SQLAlterTableAlterColumn x) {
|
||||
super.visit(x);
|
||||
if (x.getUsing() != null) {
|
||||
print0(ucase ? " USING " : " using ");
|
||||
x.getUsing().accept(this);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void printTableOptionsPrefix(SQLCreateTableStatement x) {
|
||||
println();
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
package com.alibaba.druid.bvt.pool;
|
||||
|
||||
import com.alibaba.druid.pool.DruidDataSource;
|
||||
import com.alibaba.druid.pool.DruidPooledConnection;
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Assert;
|
||||
|
||||
import java.sql.Connection;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
* 这个场景测试initialSize > maxActive
|
||||
*
|
||||
* @author wenshao [szujobs@hotmail.com]
|
||||
*/
|
||||
public class UserPasswordVersionTest extends TestCase {
|
||||
private DruidDataSource dataSource;
|
||||
|
||||
protected void setUp() throws Exception {
|
||||
dataSource = new DruidDataSource();
|
||||
dataSource.setUrl("jdbc:mock:xxx");
|
||||
dataSource.setUsername("u0");
|
||||
dataSource.setPassword("p0");
|
||||
dataSource.setTestOnBorrow(false);
|
||||
dataSource.setMaxActive(30);
|
||||
dataSource.setMaxWait(30);
|
||||
dataSource.setInitialSize(1);
|
||||
dataSource.setMinIdle(3);
|
||||
dataSource.init();
|
||||
}
|
||||
|
||||
protected void tearDown() throws Exception {
|
||||
dataSource.close();
|
||||
}
|
||||
|
||||
public void test_maxWait() throws Exception {
|
||||
ExecutorService executor = Executors.newFixedThreadPool(10);
|
||||
final CountDownLatch latch0 = new CountDownLatch(1);
|
||||
executor.submit(
|
||||
() -> {
|
||||
try {
|
||||
DruidPooledConnection[] connections = new DruidPooledConnection[10];
|
||||
for (int i = 0; i < connections.length; i++) {
|
||||
connections[i] = dataSource.getConnection();
|
||||
}
|
||||
for (int i = 0; i < connections.length; i++) {
|
||||
connections[i].close();
|
||||
}
|
||||
assertEquals(connections.length, dataSource.getPoolingCount());
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
latch0.countDown();
|
||||
}
|
||||
}
|
||||
);
|
||||
latch0.await();
|
||||
|
||||
DruidPooledConnection conn = dataSource.getConnection();
|
||||
assertEquals(0, conn.getConnectionHolder().getUserPasswordVersion());
|
||||
|
||||
final CountDownLatch latch1 = new CountDownLatch(1);
|
||||
executor.submit(() -> {
|
||||
try {
|
||||
Properties properties = new Properties();
|
||||
properties.put("druid.username", "u1");
|
||||
properties.put("druid.password", "p1");
|
||||
dataSource.configFromProperties(properties);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
} finally {
|
||||
latch1.countDown();
|
||||
}
|
||||
});
|
||||
latch1.await();
|
||||
|
||||
assertEquals(9, dataSource.getPoolingCount());
|
||||
assertEquals(1, dataSource.getActiveCount());
|
||||
|
||||
conn.close();
|
||||
|
||||
assertEquals(9, dataSource.getPoolingCount());
|
||||
assertEquals(0, dataSource.getActiveCount());
|
||||
|
||||
DruidPooledConnection conn1 = dataSource.getConnection();
|
||||
assertEquals(1, conn1.getConnectionHolder().getUserPasswordVersion());
|
||||
conn1.close();
|
||||
|
||||
assertEquals(9, dataSource.getPoolingCount());
|
||||
assertEquals(0, dataSource.getActiveCount());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,257 @@
|
|||
package com.alibaba.druid.sql.dialect.postgresql.parser;
|
||||
|
||||
import com.alibaba.druid.DbType;
|
||||
import com.alibaba.druid.sql.SQLUtils;
|
||||
import com.alibaba.druid.sql.ast.SQLStatement;
|
||||
import com.alibaba.druid.sql.ast.expr.SQLCastExpr;
|
||||
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAlterColumn;
|
||||
import com.alibaba.druid.sql.ast.statement.SQLAlterTableStatement;
|
||||
import com.alibaba.druid.sql.dialect.postgresql.visitor.PGSchemaStatVisitor;
|
||||
import com.alibaba.druid.sql.parser.ParserException;
|
||||
import com.alibaba.druid.stat.TableStat;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* @author gaattc
|
||||
* @since 1.2.24
|
||||
*/
|
||||
public class PGSQLStatementParserTest {
|
||||
|
||||
/**
|
||||
* ALTER COLUMN ... SET DATA TYPE ...
|
||||
*/
|
||||
@Test
|
||||
public void testAlterColumnSetDataType() {
|
||||
String sql = "alter table if exists products alter column price set data type decimal(12,2)";
|
||||
|
||||
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
|
||||
List<SQLStatement> statementList = parser.parseStatementList();
|
||||
|
||||
assertEquals(1, statementList.size());
|
||||
SQLAlterTableStatement stmt = (SQLAlterTableStatement) statementList.get(0);
|
||||
assertEquals(SQLAlterTableStatement.class, stmt.getClass());
|
||||
|
||||
assertEquals("products", stmt.getTableName());
|
||||
assertTrue(stmt.isIfExists());
|
||||
|
||||
SQLAlterTableAlterColumn alterColumnItem = (SQLAlterTableAlterColumn) stmt.getItems().get(0);
|
||||
assertNotNull(alterColumnItem);
|
||||
|
||||
assertEquals("price", alterColumnItem.getColumn().getName().getSimpleName());
|
||||
assertNotNull(alterColumnItem.getDataType());
|
||||
assertEquals("decimal", alterColumnItem.getDataType().getName().toLowerCase());
|
||||
assertEquals(2, alterColumnItem.getDataType().getArguments().size());
|
||||
assertEquals("12", alterColumnItem.getDataType().getArguments().get(0).toString());
|
||||
assertEquals("2", alterColumnItem.getDataType().getArguments().get(1).toString());
|
||||
|
||||
String outputSql = stmt.toString();
|
||||
assertEquals("ALTER TABLE IF EXISTS products\n\tALTER COLUMN price SET DATA TYPE decimal(12, 2)", outputSql);
|
||||
}
|
||||
|
||||
/**
|
||||
* ALTER COLUMN ... SET DEFAULT ...
|
||||
*/
|
||||
@Test
|
||||
public void testAlterColumnSetDefault() {
|
||||
String sql = "ALTER TABLE products ALTER COLUMN price SET DEFAULT 7.77";
|
||||
|
||||
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
|
||||
List<SQLStatement> statementList = parser.parseStatementList();
|
||||
SQLAlterTableStatement stmt = (SQLAlterTableStatement) statementList.get(0);
|
||||
|
||||
SQLAlterTableAlterColumn alterColumnItem = (SQLAlterTableAlterColumn) stmt.getItems().get(0);
|
||||
|
||||
assertEquals("price", alterColumnItem.getColumn().getName().getSimpleName());
|
||||
assertNotNull(alterColumnItem.getSetDefault());
|
||||
assertEquals("7.77", alterColumnItem.getSetDefault().toString());
|
||||
assertNull(alterColumnItem.getDataType());
|
||||
}
|
||||
|
||||
/**
|
||||
* ALTER COLUMN ... SET NOT NULL
|
||||
*/
|
||||
@Test
|
||||
public void testAlterColumnSetNotNull() {
|
||||
String sql = "ALTER TABLE products ALTER COLUMN price SET NOT NULL";
|
||||
|
||||
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
|
||||
List<SQLStatement> statementList = parser.parseStatementList();
|
||||
SQLAlterTableStatement stmt = (SQLAlterTableStatement) statementList.get(0);
|
||||
|
||||
SQLAlterTableAlterColumn alterColumnItem = (SQLAlterTableAlterColumn) stmt.getItems().get(0);
|
||||
|
||||
assertEquals("price", alterColumnItem.getColumn().getName().getSimpleName());
|
||||
assertTrue(alterColumnItem.isSetNotNull());
|
||||
assertNull(alterColumnItem.getDataType());
|
||||
}
|
||||
|
||||
/**
|
||||
* DROP
|
||||
*/
|
||||
@Test
|
||||
public void testAlterColumnDropDefault() {
|
||||
String sql = "ALTER TABLE products ALTER COLUMN price DROP DEFAULT";
|
||||
|
||||
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
|
||||
List<SQLStatement> statementList = parser.parseStatementList();
|
||||
SQLAlterTableStatement stmt = (SQLAlterTableStatement) statementList.get(0);
|
||||
|
||||
SQLAlterTableAlterColumn alterColumnItem = (SQLAlterTableAlterColumn) stmt.getItems().get(0);
|
||||
|
||||
assertEquals("price", alterColumnItem.getColumn().getName().getSimpleName());
|
||||
assertTrue(alterColumnItem.isDropDefault());
|
||||
assertNull(alterColumnItem.getDataType());
|
||||
}
|
||||
|
||||
/**
|
||||
* 测试核心功能:ALTER COLUMN ... SET DATA TYPE ... USING ...
|
||||
*/
|
||||
@Test
|
||||
public void testAlterColumnSetDataTypeWithUsing() {
|
||||
String sql = "ALTER TABLE my_table ALTER COLUMN my_column SET DATA TYPE integer USING my_column::integer";
|
||||
|
||||
SQLAlterTableStatement stmt = parseSingleAlterStatement(sql);
|
||||
|
||||
// 验证表名
|
||||
assertEquals("my_table", stmt.getTableName());
|
||||
|
||||
SQLAlterTableAlterColumn item = (SQLAlterTableAlterColumn) stmt.getItems().get(0);
|
||||
|
||||
// 验证列名
|
||||
assertEquals("my_column", item.getColumn().getName().getSimpleName());
|
||||
|
||||
// 验证新数据类型
|
||||
assertEquals("integer", item.getDataType().getName().toLowerCase());
|
||||
|
||||
// 验证 USING 子句
|
||||
assertNotNull("USING clause should be parsed", item.getUsing());
|
||||
assertTrue("USING expression should be a CAST expression", item.getUsing() instanceof SQLCastExpr);
|
||||
assertEquals("my_column::integer", SQLUtils.toPGString(item.getUsing()));
|
||||
}
|
||||
|
||||
/**
|
||||
* 边界场景测试:测试带有 schema 的复杂类型名
|
||||
*/
|
||||
@Test
|
||||
public void testAlterColumnWithSchemaQualifiedType() {
|
||||
String sql = "ALTER TABLE public.users ALTER COLUMN user_status SET DATA TYPE custom_schema.user_enum USING user_status::text::custom_schema.user_enum";
|
||||
|
||||
SQLAlterTableStatement stmt = parseSingleAlterStatement(sql);
|
||||
|
||||
// 验证带 schema 的表名
|
||||
assertEquals("public.users", stmt.getTableSource().toString());
|
||||
|
||||
SQLAlterTableAlterColumn item = (SQLAlterTableAlterColumn) stmt.getItems().get(0);
|
||||
|
||||
// 验证数据类型
|
||||
assertEquals("custom_schema.user_enum", item.getDataType().toString());
|
||||
|
||||
// 验证 USING 子句
|
||||
assertNotNull(item.getUsing());
|
||||
assertEquals("custom_schema.user_enum(user_status::text)", SQLUtils.toPGString(item.getUsing()));
|
||||
}
|
||||
|
||||
/**
|
||||
* 测试 Visitor 还原 PG 语法的完整性
|
||||
* 确保解析后的AST能够被 Visitor 正确地还原为原始 SQL 或等价的 PG SQL。
|
||||
*/
|
||||
@Test
|
||||
public void testVisitorOutputCompleteness() {
|
||||
String originalSql = "ALTER TABLE my_table ALTER COLUMN my_column SET DATA TYPE integer USING my_column::integer";
|
||||
SQLStatement stmt = parseSingleAlterStatement(originalSql);
|
||||
|
||||
// 使用 SQLUtils (其内部使用 Visitor) 来格式化 SQL
|
||||
String formattedSql = SQLUtils.toSQLString(stmt, DbType.postgresql, new SQLUtils.FormatOption(true, true));
|
||||
|
||||
String expectedFormattedSql =
|
||||
"ALTER TABLE my_table\n" +
|
||||
"\tALTER COLUMN my_column SET DATA TYPE integer USING my_column::integer";
|
||||
|
||||
assertEquals(expectedFormattedSql, formattedSql);
|
||||
}
|
||||
|
||||
/**
|
||||
* 测试 Visitor 统计功能的正确性
|
||||
*/
|
||||
@Test
|
||||
public void testSchemaStatVisitorWithUsing() {
|
||||
String sql = "ALTER TABLE my_table ALTER COLUMN my_column SET DATA TYPE integer USING old_column::integer";
|
||||
|
||||
// 使用 SQLUtils.parseStatements 来简化解析
|
||||
SQLStatement stmt = SQLUtils.parseStatements(sql, DbType.postgresql).get(0);
|
||||
|
||||
PGSchemaStatVisitor visitor = new PGSchemaStatVisitor();
|
||||
stmt.accept(visitor);
|
||||
|
||||
// 验证表被识别
|
||||
assertTrue("The table 'my_table' should be present in the stats.",
|
||||
visitor.containsTable("my_table"));
|
||||
|
||||
// 验证 ALTER COLUMN <column> 被识别
|
||||
TableStat.Column myColumnStat = visitor.getColumn("my_table", "my_column");
|
||||
assertNotNull("The altered column 'my_column' should be identified by the visitor.", myColumnStat);
|
||||
|
||||
// 验证 USING <column> 被识别
|
||||
TableStat.Column oldColumnStat = visitor.getColumn("my_table", "old_column");
|
||||
assertNotNull("The column 'old_column' from the USING clause should be identified by the visitor.", oldColumnStat);
|
||||
}
|
||||
|
||||
/**
|
||||
* 异常场景测试:测试非法的 'SET DATA' 语法
|
||||
*/
|
||||
@Test(expected = ParserException.class)
|
||||
public void testInvalidSyntax_SetDataWithoutType() {
|
||||
String sql = "ALTER TABLE my_table ALTER COLUMN my_column SET DATA my_column";
|
||||
// 应该在解析 'my_column' 时报错,因为它期望 'TYPE'
|
||||
parseSingleAlterStatement(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* 异常场景测试:测试 'SET DATA TYPE' 后缺少类型定义
|
||||
*/
|
||||
@Test(expected = ParserException.class)
|
||||
public void testInvalidSyntax_SetDataTypeWithoutDefinition() {
|
||||
String sql = "ALTER TABLE my_table ALTER COLUMN my_column SET DATA TYPE";
|
||||
// 解析到末尾,发现缺少数据类型,应该报错
|
||||
parseSingleAlterStatement(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* 异常场景测试:测试 'USING' 关键字后缺少表达式
|
||||
*/
|
||||
@Test(expected = ParserException.class)
|
||||
public void testInvalidSyntax_UsingWithoutExpression() {
|
||||
String sql = "ALTER TABLE my_table ALTER COLUMN my_column SET DATA TYPE integer USING";
|
||||
// 解析到末尾,发现缺少 USING 表达式,应该报错
|
||||
parseSingleAlterStatement(sql);
|
||||
}
|
||||
|
||||
/**
|
||||
* 辅助方法,用于解析单条 ALTER 语句并返回其 AST
|
||||
* @param sql SQL 字符串
|
||||
* @return SQLAlterTableStatement 对象
|
||||
*/
|
||||
private SQLAlterTableStatement parseSingleAlterStatement(String sql) {
|
||||
PGSQLStatementParser parser = new PGSQLStatementParser(sql);
|
||||
List<SQLStatement> statementList = parser.parseStatementList();
|
||||
|
||||
if (statementList.size() != 1) {
|
||||
Assert.fail("Expected a single statement, but got " + statementList.size());
|
||||
}
|
||||
|
||||
SQLStatement stmt = statementList.get(0);
|
||||
if (!(stmt instanceof SQLAlterTableStatement)) {
|
||||
Assert.fail("Expected SQLAlterTableStatement, but got " + stmt.getClass().getName());
|
||||
}
|
||||
return (SQLAlterTableStatement) stmt;
|
||||
}
|
||||
|
||||
}
|
|
@ -34,7 +34,7 @@
|
|||
<dependency>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid-spring-boot-starter</artifactId>
|
||||
<version>1.2.26-SNAPSHOT</version>
|
||||
<version>1.2.28-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<!-- Spring and Spring Boot dependencies -->
|
||||
<dependency>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid-parent</artifactId>
|
||||
<version>1.2.26-SNAPSHOT</version>
|
||||
<version>1.2.28-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>druid-spring-boot-3-starter</artifactId>
|
||||
|
@ -53,44 +53,52 @@
|
|||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>slf4j-api</artifactId>
|
||||
<version>2.0.17</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-autoconfigure</artifactId>
|
||||
<version>${springboot3.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework</groupId>
|
||||
<artifactId>spring-jdbc</artifactId>
|
||||
<version>6.1.21</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<!-- @ConfigurationProperties annotation processing (metadata for IDEs) -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-configuration-processor</artifactId>
|
||||
<version>${springboot3.version}</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-web</artifactId>
|
||||
<version>${springboot3.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<version>${springboot3.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-jpa</artifactId>
|
||||
<version>${springboot3.version}</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.h2database</groupId>
|
||||
<artifactId>h2</artifactId>
|
||||
<version>2.3.232</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
@ -119,8 +127,8 @@
|
|||
<plugin>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>9</source>
|
||||
<target>9</target>
|
||||
<source>17</source>
|
||||
<target>17</target>
|
||||
<annotationProcessorPaths>
|
||||
<path>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid-parent</artifactId>
|
||||
<version>1.2.26-SNAPSHOT</version>
|
||||
<version>1.2.28-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>druid-spring-boot-starter</artifactId>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid-parent</artifactId>
|
||||
<version>1.2.26-SNAPSHOT</version>
|
||||
<version>1.2.28-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<groupId>com.alibaba</groupId>
|
||||
|
|
17
pom.xml
17
pom.xml
|
@ -6,7 +6,7 @@
|
|||
|
||||
<groupId>com.alibaba</groupId>
|
||||
<artifactId>druid-parent</artifactId>
|
||||
<version>1.2.26-SNAPSHOT</version>
|
||||
<version>1.2.28-SNAPSHOT</version>
|
||||
<name>${project.artifactId}</name>
|
||||
<description>A JDBC datasource implementation.</description>
|
||||
<packaging>pom</packaging>
|
||||
|
@ -80,10 +80,6 @@
|
|||
</developers>
|
||||
|
||||
<distributionManagement>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<url>https://central.sonatype.org/service/local/staging/deploy/maven2/</url>
|
||||
</repository>
|
||||
<snapshotRepository>
|
||||
<id>central</id>
|
||||
<url>https://central.sonatype.com/repository/maven-snapshots/</url>
|
||||
|
@ -176,6 +172,15 @@
|
|||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.sonatype.central</groupId>
|
||||
<artifactId>central-publishing-maven-plugin</artifactId>
|
||||
<version>0.8.0</version>
|
||||
<extensions>true</extensions>
|
||||
<configuration>
|
||||
<publishingServerId>central</publishingServerId>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
<pluginManagement>
|
||||
<plugins>
|
||||
|
@ -201,7 +206,7 @@
|
|||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.4.1</version>
|
||||
<version>3.8.0</version>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
|
|
Loading…
Reference in New Issue