Compare commits

...

8 Commits

Author SHA1 Message Date
changlu 8409d8f5db
Merge df7909f716 into a71fa6add0 2025-10-06 05:59:14 +00:00
wenshao a71fa6add0 improved odps sql parser
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (11, ubuntu-latest) (push) Has been cancelled Details
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (17, ubuntu-latest) (push) Has been cancelled Details
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (21, ubuntu-latest) (push) Has been cancelled Details
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (8, ubuntu-latest) (push) Has been cancelled Details
2025-09-22 13:30:03 +08:00
wenshao 0b375e47ed fix template variant 2025-09-15 19:34:17 +08:00
林枸 fa3e37fa05 Fix load DialectFeature performance issue and Dialect init performance issue. 2025-09-15 17:58:52 +08:00
wenshao 16c0a1d74f fix Lexer#loadKeywords performance 2025-09-15 15:08:34 +08:00
wenshao 77fa5e84a2 fix SQLDialect performance 2025-09-15 11:35:19 +08:00
Shaojin Wen 81ebdebf67
Checkstyle 2509 (#6525)
* fix test checkstyle

* fix testcase

* fix testcase

* fix checkstyle
2025-09-14 02:27:49 +08:00
changlu df7909f716 [master][visitor][fix SQLTableAliasCollectVisitor 收集别名表] 2025-03-25 16:54:03 +08:00
3276 changed files with 9169 additions and 16946 deletions

View File

@ -38,7 +38,7 @@ public class MySqlMockExecuteHandlerImpl implements MockExecuteHandler {
@Override
public ResultSet executeQuery(MockStatementBase statement, String sql) throws SQLException {
SQLStatementParser parser = new MySqlStatementParser(sql);
List<SQLStatement> stmtList = parser.parseStatementList(); //
List<SQLStatement> stmtList = parser.parseStatementList();
if (stmtList.size() > 1) {
throw new SQLException("not support multi-statment. " + sql);

View File

@ -2868,7 +2868,7 @@ public class DruidDataSource extends DruidAbstractDataSource
if (timeBetweenEvictionRunsMillis > 0) {
Thread.sleep(timeBetweenEvictionRunsMillis);
} else {
Thread.sleep(1000); //
Thread.sleep(1000);
}
if (Thread.interrupted()) {

View File

@ -5,11 +5,14 @@ import com.alibaba.druid.util.FnvHash;
import com.alibaba.druid.util.Utils;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
/**
* @since 1.2.25
*/
public class SQLDialect {
private static final Map<DbType, SQLDialect> DIALECTS = new ConcurrentHashMap<>();
private int quoteChars;
private final DbType dbType;
private final Keyword keywords;
@ -73,6 +76,13 @@ public class SQLDialect {
}
public static SQLDialect of(DbType dbType) {
if (dbType == null) {
return null;
}
return DIALECTS.computeIfAbsent(dbType, SQLDialect::create);
}
private static SQLDialect create(DbType dbType) {
String dir = "META-INF/druid/parser/".concat(dbType.name().toLowerCase());
Properties props = Utils.loadProperties(dir.concat("/dialect.properties"));

View File

@ -38,7 +38,7 @@ import com.alibaba.druid.sql.dialect.hive.visitor.HiveOutputVisitor;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveSchemaStatVisitor;
import com.alibaba.druid.sql.dialect.hologres.visitor.HologresOutputVisitor;
import com.alibaba.druid.sql.dialect.impala.visitor.ImpalaOutputVisitor;
import com.alibaba.druid.sql.dialect.infomix.visitor.InformixOutputVisitor;
import com.alibaba.druid.sql.dialect.informix.visitor.InformixOutputVisitor;
import com.alibaba.druid.sql.dialect.mysql.ast.MySqlObject;
import com.alibaba.druid.sql.dialect.mysql.ast.clause.MySqlSelectIntoStatement;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlInsertStatement;

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.athena;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Athena {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.athena);
}

View File

@ -2,16 +2,17 @@ package com.alibaba.druid.sql.dialect.athena.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.athena.Athena;
import com.alibaba.druid.sql.dialect.athena.ast.stmt.AthenaCreateTableStatement;
import com.alibaba.druid.sql.dialect.presto.visitor.PrestoOutputVisitor;
public class AthenaOutputVisitor extends PrestoOutputVisitor implements AthenaASTVisitor {
public AthenaOutputVisitor(StringBuilder appender) {
super(appender, DbType.athena);
super(appender, DbType.athena, Athena.DIALECT);
}
public AthenaOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.athena, parameterized);
super(appender, DbType.athena, Athena.DIALECT, parameterized);
}
protected void printCreateTable(SQLCreateTableStatement x, boolean printSelect) {

View File

@ -10,8 +10,9 @@ import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class BigQueryLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
static final Keywords BIGQUERY_KEYWORDS;
static final DialectFeature BIGQUERY_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<String, Token>();
// map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -113,7 +114,13 @@ public class BigQueryLexer extends Lexer {
map.put("RAISE", Token.RAISE);
map.put("ELSEIF", Token.ELSEIF);
return new Keywords(map);
BIGQUERY_KEYWORDS = new Keywords(map);
BIGQUERY_FEATURE.configFeature(SQLDateExpr, GroupByAll, InRestSpecificOperation);
}
@Override
protected Keywords loadKeywords() {
return BIGQUERY_KEYWORDS;
}
public BigQueryLexer(String input, SQLParserFeature... features) {
@ -299,8 +306,7 @@ public class BigQueryLexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(SQLDateExpr, GroupByAll, InRestSpecificOperation);
this.dialectFeature = BIGQUERY_FEATURE;
}
@Override

View File

@ -4,6 +4,7 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.bigquery.BQ;
import com.alibaba.druid.sql.dialect.bigquery.ast.*;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.alibaba.druid.util.FnvHash;
@ -13,11 +14,11 @@ import java.util.List;
public class BigQueryOutputVisitor extends SQLASTOutputVisitor
implements BigQueryVisitor {
public BigQueryOutputVisitor(StringBuilder appender) {
super(appender, DbType.bigquery);
super(appender, DbType.bigquery, BQ.DIALECT);
}
public BigQueryOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.bigquery, parameterized);
super(appender, DbType.bigquery, BQ.DIALECT, parameterized);
}
protected void printPartitionedBy(SQLCreateTableStatement x) {

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.blink;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Blink {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.blink);
}

View File

@ -10,9 +10,9 @@ import java.util.HashMap;
import java.util.Map;
public class BlinkLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords BLINK_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -27,7 +27,12 @@ public class BlinkLexer extends Lexer {
map.put("IF", Token.IF);
map.put("PERIOD", Token.PERIOD);
return new Keywords(map);
BLINK_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return BLINK_KEYWORDS;
}
public BlinkLexer(String input) {

View File

@ -18,6 +18,7 @@ package com.alibaba.druid.sql.dialect.blink.vsitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLTableElement;
import com.alibaba.druid.sql.dialect.blink.Blink;
import com.alibaba.druid.sql.dialect.blink.ast.BlinkCreateTableStatement;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
@ -25,7 +26,7 @@ import java.util.List;
public class BlinkOutputVisitor extends SQLASTOutputVisitor implements BlinkVisitor {
public BlinkOutputVisitor(StringBuilder appender) {
super(appender, DbType.blink);
super(appender, DbType.blink, Blink.DIALECT);
}
public BlinkOutputVisitor(StringBuilder appender, DbType dbType) {

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.clickhouse;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class CK {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.clickhouse);
}

View File

@ -1,6 +1,7 @@
package com.alibaba.druid.sql.dialect.clickhouse.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.DialectFeature;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
@ -12,9 +13,10 @@ import java.util.Map;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
public class CKLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords CK_KEYWORDS;
static final DialectFeature CK_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -41,7 +43,19 @@ public class CKLexer extends Lexer {
map.put("CODEC", Token.CODEC);
map.remove("ANY");
return new Keywords(map);
CK_KEYWORDS = new Keywords(map);
CK_FEATURE.configFeature(
AsofJoin,
GlobalJoin,
JoinRightTableAlias,
ParseLimitBy,
TableAliasAsof
);
}
@Override
protected Keywords loadKeywords() {
return CK_KEYWORDS;
}
public CKLexer(String input, SQLParserFeature... features) {
@ -55,13 +69,6 @@ public class CKLexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
AsofJoin,
GlobalJoin,
JoinRightTableAlias,
ParseLimitBy,
TableAliasAsof
);
this.dialectFeature = CK_FEATURE;
}
}

View File

@ -3,6 +3,7 @@ package com.alibaba.druid.sql.dialect.clickhouse.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.clickhouse.CK;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKAlterTableUpdateStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKSelectQueryBlock;
@ -17,7 +18,7 @@ import java.util.List;
public class CKOutputVisitor extends SQLASTOutputVisitor implements CKASTVisitor {
public CKOutputVisitor(StringBuilder appender) {
super(appender, DbType.clickhouse);
super(appender, DbType.clickhouse, CK.DIALECT);
}
public CKOutputVisitor(StringBuilder appender, DbType dbType) {
@ -25,7 +26,7 @@ public class CKOutputVisitor extends SQLASTOutputVisitor implements CKASTVisitor
}
public CKOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.clickhouse, parameterized);
super(appender, DbType.clickhouse, CK.DIALECT, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.databricks;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class DataBricks {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.databricks);
}

View File

@ -2,12 +2,13 @@ package com.alibaba.druid.sql.dialect.databricks.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.databricks.DataBricks;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.dialect.spark.visitor.SparkOutputASTVisitor;
public class DatabricksOutputASTVisitor extends SparkOutputASTVisitor implements DatabricksASTASTVisitor {
public DatabricksOutputASTVisitor(StringBuilder appender) {
super(appender, DbType.databricks);
super(appender, DbType.databricks, DataBricks.DIALECT);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.db2;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Db2 {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.db2);
}

View File

@ -15,6 +15,7 @@
*/
package com.alibaba.druid.sql.dialect.db2.parser;
import com.alibaba.druid.sql.parser.DialectFeature;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
@ -26,9 +27,10 @@ import java.util.Map;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.ParseAssignItemSkip;
public class DB2Lexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords DB2_KEYWORDS;
static final DialectFeature DB2_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -48,7 +50,13 @@ public class DB2Lexer extends Lexer {
map.put("RESTRICT", Token.RESTRICT);
map.put("CASCADE", Token.CASCADE);
return new Keywords(map);
DB2_KEYWORDS = new Keywords(map);
DB2_FEATURE.configFeature(ParseAssignItemSkip);
}
@Override
protected Keywords loadKeywords() {
return DB2_KEYWORDS;
}
public DB2Lexer(String input) {
@ -64,7 +72,6 @@ public class DB2Lexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(ParseAssignItemSkip);
this.dialectFeature = DB2_FEATURE;
}
}

View File

@ -22,16 +22,17 @@ import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.dialect.db2.Db2;
import com.alibaba.druid.sql.dialect.db2.ast.stmt.*;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
public class DB2OutputVisitor extends SQLASTOutputVisitor implements DB2ASTVisitor {
public DB2OutputVisitor(StringBuilder appender) {
super(appender, DbType.db2);
super(appender, DbType.db2, Db2.DIALECT);
}
public DB2OutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.db2, parameterized);
super(appender, DbType.db2, Db2.DIALECT, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.doris;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Doris {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.doris);
}

View File

@ -11,9 +11,9 @@ import java.util.Map;
public class DorisLexer
extends StarRocksLexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords DORIS_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -45,7 +45,12 @@ public class DorisLexer
map.put("RLIKE", Token.RLIKE);
map.put("OVERWRITE", Token.OVERWRITE);
return new Keywords(map);
DORIS_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return DORIS_KEYWORDS;
}
public DorisLexer(String input, SQLParserFeature... features) {

View File

@ -9,6 +9,7 @@ import com.alibaba.druid.sql.ast.SQLUnpivot;
import com.alibaba.druid.sql.ast.statement.SQLInsertStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.ast.statement.SQLTableSampling;
import com.alibaba.druid.sql.dialect.doris.Doris;
import com.alibaba.druid.sql.dialect.doris.ast.DorisExprTableSource;
import com.alibaba.druid.sql.dialect.starrocks.visitor.StarRocksOutputVisitor;
@ -16,18 +17,11 @@ import java.util.List;
public class DorisOutputVisitor extends StarRocksOutputVisitor implements DorisASTVisitor {
public DorisOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.doris;
}
public DorisOutputVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
dbType = DbType.doris;
super(appender, DbType.doris, Doris.DIALECT);
}
public DorisOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.doris;
super(appender, DbType.doris, Doris.DIALECT, parameterized);
}
public void printSqlSetQuantifier(SQLSelectQueryBlock x) {

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.gaussdb;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class GaussDb {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.gaussdb);
}

View File

@ -10,9 +10,9 @@ import java.util.HashMap;
import java.util.Map;
public class GaussDbLexer extends PGLexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords GAUSSDB_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.put("DISTRIBUTE", Token.DISTRIBUTE);
map.put("SET", Token.SET);
map.put("PARTITION", Token.PARTITION);
@ -21,9 +21,14 @@ public class GaussDbLexer extends PGLexer {
map.put("KEY", Token.KEY);
map.put("OVERWRITE", Token.OVERWRITE);
map.put("LOCAL", Token.LOCAL);
map.putAll(super.loadKeywords().getKeywords());
map.putAll(PGLexer.PG_KEYWORDS.getKeywords());
map.remove("LANGUAGE"); // GaussDB does not consider it as a reserved keyword
return new Keywords(map);
GAUSSDB_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return GAUSSDB_KEYWORDS;
}
public GaussDbLexer(String input, SQLParserFeature... features) {

View File

@ -8,6 +8,7 @@ import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLInsertStatement;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.dialect.gaussdb.GaussDb;
import com.alibaba.druid.sql.dialect.gaussdb.ast.GaussDbDistributeBy;
import com.alibaba.druid.sql.dialect.gaussdb.ast.GaussDbPartitionValue;
import com.alibaba.druid.sql.dialect.gaussdb.ast.stmt.GaussDbCreateTableStatement;
@ -21,13 +22,11 @@ import java.util.stream.Collectors;
public class GaussDbOutputVisitor extends PGOutputVisitor implements GaussDbASTVisitor {
public GaussDbOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.gaussdb;
super(appender, DbType.gaussdb, GaussDb.DIALECT, parameterized);
}
public GaussDbOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.gaussdb;
super(appender, DbType.gaussdb, GaussDb.DIALECT);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.h2;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class H2 {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.h2);
}

View File

@ -25,9 +25,9 @@ import java.util.HashMap;
import java.util.Map;
public class H2Lexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords H2_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -41,7 +41,12 @@ public class H2Lexer extends Lexer {
map.put("LIMIT", Token.LIMIT);
map.put("IF", Token.IF);
return new Keywords(map);
H2_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return H2_KEYWORDS;
}
public H2Lexer(String input) {

View File

@ -23,6 +23,7 @@ import com.alibaba.druid.sql.ast.expr.SQLHexExpr;
import com.alibaba.druid.sql.ast.expr.SQLMethodInvokeExpr;
import com.alibaba.druid.sql.ast.expr.SQLQueryExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.h2.H2;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlAlterTableModifyColumn;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlTableIndex;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
@ -35,7 +36,7 @@ public class H2OutputVisitor extends SQLASTOutputVisitor implements H2ASTVisitor
private static final Log LOG = LogFactory.getLog(H2OutputVisitor.class);
public H2OutputVisitor(StringBuilder appender) {
super(appender, DbType.h2);
super(appender, DbType.h2, H2.DIALECT);
}
public H2OutputVisitor(StringBuilder appender, DbType dbType) {

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.hive;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Hive {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.hive);
}

View File

@ -16,6 +16,7 @@
package com.alibaba.druid.sql.dialect.hive.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.DialectFeature;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.NotAllowCommentException;
@ -33,9 +34,10 @@ import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class HiveLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
public static final Keywords HIVE_KEYWORDS;
public static final DialectFeature HIVE_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -60,7 +62,30 @@ public class HiveLexer extends Lexer {
map.put("DIV", Token.DIV);
map.put("QUALIFY", Token.QUALIFY);
return new Keywords(map);
HIVE_KEYWORDS = new Keywords(map);
HIVE_FEATURE.configFeature(
ScanSQLTypeWithFrom,
NextTokenColon,
ScanAliasU,
JoinRightTableFrom,
GroupByAll,
SQLDateExpr,
ParseAssignItemRparenCommaSetReturn,
TableAliasLock,
TableAliasPartition,
AsSkip,
AsSequence,
AsDatabase,
AsDefault
);
HIVE_FEATURE.unconfigFeature(
PrimaryBangBangSupport
);
}
@Override
protected Keywords loadKeywords() {
return HIVE_KEYWORDS;
}
public HiveLexer(String input, DbType dbType) {
@ -189,12 +214,16 @@ public class HiveLexer extends Lexer {
break;
case 'u':
if ((features & SQLParserFeature.SupportUnicodeCodePoint.mask) != 0) {
char c1 = charAt(++pos);
char c2 = charAt(++pos);
char c3 = charAt(++pos);
char c4 = charAt(++pos);
int codePointSize = 0;
for (int i = 0; i < 4; i++, codePointSize++) {
char c = charAt(pos + 1 + i);
if (!((c >= '0' && c <= '9') || (c >= 'a' && c <= 'f') || (c >= 'A' && c <= 'F'))) {
break;
}
}
int intVal = Integer.parseInt(new String(new char[]{c1, c2, c3, c4}), 16);
int intVal = Integer.parseInt(text.substring(pos + 1, pos + 1 + codePointSize), 16);
pos += codePointSize;
putChar((char) intVal);
} else {
@ -390,24 +419,6 @@ public class HiveLexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanSQLTypeWithFrom,
NextTokenColon,
ScanAliasU,
JoinRightTableFrom,
GroupByAll,
SQLDateExpr,
ParseAssignItemRparenCommaSetReturn,
TableAliasLock,
TableAliasPartition,
AsSkip,
AsSequence,
AsDatabase,
AsDefault
);
this.dialectFeature.unconfigFeature(
PrimaryBangBangSupport
);
this.dialectFeature = HIVE_FEATURE;
}
}

View File

@ -16,6 +16,7 @@
package com.alibaba.druid.sql.dialect.hive.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
import com.alibaba.druid.sql.ast.SQLAdhocTableSource;
import com.alibaba.druid.sql.ast.SQLCommentHint;
import com.alibaba.druid.sql.ast.SQLExpr;
@ -25,6 +26,7 @@ import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.hive.Hive;
import com.alibaba.druid.sql.dialect.hive.ast.HiveAddJarStatement;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsert;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsertStatement;
@ -40,15 +42,19 @@ import java.util.Map;
public class HiveOutputVisitor extends SQLASTOutputVisitor implements HiveASTVisitor {
public HiveOutputVisitor(StringBuilder appender) {
super(appender, DbType.hive);
super(appender, DbType.hive, Hive.DIALECT);
}
public HiveOutputVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
public HiveOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
super(appender, dbType, dialect);
}
public HiveOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.hive, parameterized);
super(appender, DbType.hive, Hive.DIALECT, parameterized);
}
public HiveOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect, boolean parameterized) {
super(appender, DbType.hive, Hive.DIALECT, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.hologres;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Hologres {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.hologres);
}

View File

@ -2,16 +2,10 @@ package com.alibaba.druid.sql.dialect.hologres.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGLexer;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class HologresLexer
extends PGLexer {
@Override
protected Keywords loadKeywords() {
return super.loadKeywords();
}
public HologresLexer(String input, SQLParserFeature... features) {
super(input, features);
dbType = DbType.hologres;

View File

@ -6,6 +6,7 @@ import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.ast.statement.SQLTableSource;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKSelectQueryBlock;
import com.alibaba.druid.sql.dialect.hologres.Hologres;
import com.alibaba.druid.sql.dialect.postgresql.visitor.PGOutputVisitor;
import com.alibaba.druid.sql.parser.CharTypes;
import com.alibaba.druid.sql.visitor.VisitorFeature;
@ -14,11 +15,11 @@ import java.util.List;
public class HologresOutputVisitor extends PGOutputVisitor {
public HologresOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.hologres, parameterized);
super(appender, DbType.hologres, Hologres.DIALECT, parameterized);
}
public HologresOutputVisitor(StringBuilder appender) {
super(appender, DbType.hologres);
super(appender, DbType.hologres, Hologres.DIALECT);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.impala;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Impala {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.impala);
}

View File

@ -6,15 +6,20 @@ import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class ImpalaLexer extends HiveLexer {
static final Keywords IMPALA_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>(HiveLexer.HIVE_KEYWORDS.getKeywords());
map.put("UPSERT", Token.UPSERT);
IMPALA_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
Keywords keywords = super.loadKeywords();
Map<String, Token> map = keywords.getKeywords();
map.put("UPSERT", Token.UPSERT);
return new Keywords(map);
return IMPALA_KEYWORDS;
}
public ImpalaLexer(String input) {

View File

@ -4,6 +4,7 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveOutputVisitor;
import com.alibaba.druid.sql.dialect.impala.Impala;
import com.alibaba.druid.sql.dialect.impala.ast.ImpalaSQLPartitionValue;
import com.alibaba.druid.sql.dialect.impala.stmt.ImpalaCreateTableStatement;
import com.alibaba.druid.sql.dialect.impala.stmt.ImpalaInsertStatement;
@ -12,13 +13,11 @@ import java.util.List;
public class ImpalaOutputVisitor extends HiveOutputVisitor implements ImpalaASTVisitor {
public ImpalaOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.impala;
super(appender, DbType.impala, Impala.DIALECT);
}
public ImpalaOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.impala;
super(appender, DbType.impala, Impala.DIALECT, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.informix;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Informix {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.informix);
}

View File

@ -1,4 +1,4 @@
package com.alibaba.druid.sql.dialect.infomix.parser;
package com.alibaba.druid.sql.dialect.informix.parser;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;

View File

@ -1,4 +1,4 @@
package com.alibaba.druid.sql.dialect.infomix.parser;
package com.alibaba.druid.sql.dialect.informix.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.*;

View File

@ -1,9 +1,10 @@
package com.alibaba.druid.sql.dialect.infomix.visitor;
package com.alibaba.druid.sql.dialect.informix.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLLimit;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.dialect.informix.Informix;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
public class InformixOutputVisitor extends SQLASTOutputVisitor {
@ -12,7 +13,7 @@ public class InformixOutputVisitor extends SQLASTOutputVisitor {
}
public InformixOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.informix, parameterized);
super(appender, DbType.informix, Informix.DIALECT, parameterized);
}
protected void printSelectListBefore(SQLSelectQueryBlock x) {

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.mysql;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public final class MySQL {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.mysql);
}

View File

@ -29,10 +29,10 @@ import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class MySqlLexer extends Lexer {
public static SymbolTable quoteTable = new SymbolTable(8192);
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
public static DialectFeature MYSQL_FEATURE = new DialectFeature();
static final Keywords MYSQL_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -67,7 +67,32 @@ public class MySqlLexer extends Lexer {
map.put("RLIKE", Token.RLIKE);
map.put("FULLTEXT", Token.FULLTEXT);
return new Keywords(map);
MYSQL_KEYWORDS = new Keywords(map);
MYSQL_FEATURE.configFeature(
NextTokenPrefixN,
ScanString2PutDoubleBackslash,
JoinRightTableWith,
PostNaturalJoin,
MultipleJoinOn,
GroupByPostDesc,
GroupByItemOrder,
SQLDateExpr,
PrimaryLbraceOdbcEscape,
ParseSelectItemPrefixX,
ParseStatementListUpdatePlanCache,
ParseStatementListRollbackReturn,
ParseStatementListCommitReturn,
ParseDropTableTables,
AsSequence
);
MYSQL_FEATURE.unconfigFeature(
AdditiveRestPipesAsConcat
);
}
@Override
protected Keywords loadKeywords() {
return MYSQL_KEYWORDS;
}
public MySqlLexer(char[] input, int inputLength, boolean skipComment) {
@ -954,26 +979,6 @@ public class MySqlLexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
NextTokenPrefixN,
ScanString2PutDoubleBackslash,
JoinRightTableWith,
PostNaturalJoin,
MultipleJoinOn,
GroupByPostDesc,
GroupByItemOrder,
SQLDateExpr,
PrimaryLbraceOdbcEscape,
ParseSelectItemPrefixX,
ParseStatementListUpdatePlanCache,
ParseStatementListRollbackReturn,
ParseStatementListCommitReturn,
ParseDropTableTables,
AsSequence
);
this.dialectFeature.unconfigFeature(
AdditiveRestPipesAsConcat
);
this.dialectFeature = MYSQL_FEATURE;
}
}

View File

@ -19,6 +19,7 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.mysql.MySQL;
import com.alibaba.druid.sql.dialect.mysql.ast.*;
import com.alibaba.druid.sql.dialect.mysql.ast.clause.*;
import com.alibaba.druid.sql.dialect.mysql.ast.clause.ConditionValue.ConditionType;
@ -44,11 +45,11 @@ public class MySqlOutputVisitor extends SQLASTOutputVisitor implements MySqlASTV
}
public MySqlOutputVisitor(StringBuilder appender) {
super(appender, DbType.mysql);
super(appender, DbType.mysql, MySQL.DIALECT);
}
public MySqlOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.mysql, parameterized);
super(appender, DbType.mysql, MySQL.DIALECT, parameterized);
try {
configFromProperty();

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.odps;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Odps {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.odps);
}

View File

@ -643,6 +643,7 @@ public class OdpsExprParser extends HiveExprParser {
if (expr instanceof SQLIdentifierExpr
&& ((SQLIdentifierExpr) expr).nameHashCode64() == FnvHash.Constants.NEW) {
SQLIdentifierExpr ident = (SQLIdentifierExpr) expr;
Lexer.SavePoint savePoint = lexer.markOut();
OdpsNewExpr newExpr = new OdpsNewExpr();
if (lexer.token() == Token.IDENTIFIER) { //.GSON
@ -730,6 +731,9 @@ public class OdpsExprParser extends HiveExprParser {
} else {
expr = newExpr;
}
} else if (lexer.token() != Token.LPAREN) {
lexer.reset(savePoint);
return ident;
} else {
accept(Token.LPAREN);
this.exprList(newExpr.getArguments(), newExpr);
@ -835,4 +839,8 @@ public class OdpsExprParser extends HiveExprParser {
return name;
}
protected SQLExpr relationalRestVariant(SQLExpr expr) {
return expr;
}
}

View File

@ -28,23 +28,10 @@ import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
public class OdpsLexer extends HiveLexer {
public OdpsLexer(String input, SQLParserFeature... features) {
super(input);
init();
dbType = DbType.odps;
this.skipComment = true;
this.keepComments = false;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords ODPS_KEYWORDS;
static final DialectFeature ODPS_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -65,7 +52,76 @@ public class OdpsLexer extends HiveLexer {
map.put("MATCHED", Token.MATCHED);
map.put("", Token.SEMI);
return new Keywords(map);
ODPS_KEYWORDS = new Keywords(map);
ODPS_FEATURE.configFeature(
ScanSQLTypeBlockComment,
ScanSQLTypeWithSemi,
ScanSQLTypeWithFunction,
ScanSQLTypeWithBegin,
ScanSQLTypeWithAt,
ScanVariableAt,
ScanVariableMoveToSemi,
ScanVariableSkipIdentifiers,
ScanNumberCommonProcess,
ScanHiveCommentDoubleSpace,
QueryRestSemi,
JoinAt,
UDJ,
TwoConsecutiveUnion,
RewriteGroupByCubeRollupToFunction,
PrimaryTwoConsecutiveSet,
ParseAllIdentifier,
PrimaryRestCommaAfterLparen,
InRestSpecificOperation,
ParseAssignItemEqSemiReturn,
ParseAssignItemEqeq,
ParseStatementListLparenContinue,
ParseRevokeFromUser,
ParseCreateSql,
TableAliasConnectWhere,
TableAliasTable,
TableAliasBetween,
TableAliasRest,
AliasLiteralFloat,
ScanSQLTypeWithFrom,
NextTokenColon,
ScanAliasU,
JoinRightTableFrom,
GroupByAll,
SQLDateExpr,
ParseAssignItemRparenCommaSetReturn,
TableAliasLock,
TableAliasPartition,
AsSkip,
AsSequence,
AsDatabase,
AsDefault
);
ODPS_FEATURE.unconfigFeature(
ParseStatementListSelectUnsupportedSyntax,
ScanNumberPrefixB,
ScanAliasU,
AcceptUnion,
PrimaryBangBangSupport
);
}
public OdpsLexer(String input, SQLParserFeature... features) {
super(input);
init();
dbType = DbType.odps;
this.skipComment = true;
this.keepComments = false;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
@Override
protected Keywords loadKeywords() {
return ODPS_KEYWORDS;
}
private void init() {
@ -242,43 +298,6 @@ public class OdpsLexer extends HiveLexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanSQLTypeBlockComment,
ScanSQLTypeWithSemi,
ScanSQLTypeWithFunction,
ScanSQLTypeWithBegin,
ScanSQLTypeWithAt,
ScanVariableAt,
ScanVariableMoveToSemi,
ScanVariableSkipIdentifiers,
ScanNumberCommonProcess,
ScanHiveCommentDoubleSpace,
QueryRestSemi,
JoinAt,
UDJ,
TwoConsecutiveUnion,
RewriteGroupByCubeRollupToFunction,
PrimaryTwoConsecutiveSet,
ParseAllIdentifier,
PrimaryRestCommaAfterLparen,
InRestSpecificOperation,
ParseAssignItemEqSemiReturn,
ParseAssignItemEqeq,
ParseStatementListLparenContinue,
ParseRevokeFromUser,
ParseCreateSql,
TableAliasConnectWhere,
TableAliasTable,
TableAliasBetween,
TableAliasRest,
AliasLiteralFloat
);
this.dialectFeature.unconfigFeature(
ParseStatementListSelectUnsupportedSyntax,
ScanNumberPrefixB,
ScanAliasU,
AcceptUnion
);
this.dialectFeature = ODPS_FEATURE;
}
}

View File

@ -23,6 +23,7 @@ import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveOutputVisitor;
import com.alibaba.druid.sql.dialect.odps.Odps;
import com.alibaba.druid.sql.dialect.odps.ast.*;
import com.alibaba.druid.sql.visitor.VisitorFeature;
import com.alibaba.druid.util.FnvHash;
@ -62,7 +63,7 @@ public class OdpsOutputVisitor extends HiveOutputVisitor implements OdpsASTVisit
}
public OdpsOutputVisitor(StringBuilder appender) {
super(appender, DbType.odps);
super(appender, DbType.odps, Odps.DIALECT);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.oracle;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Oracle {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.oracle);
}

View File

@ -27,8 +27,9 @@ import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
public class OracleLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
static final Keywords ORACLE_KEYWORDS;
static final DialectFeature ORACLE_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>(Keywords.DEFAULT_KEYWORDS.getKeywords());
map.put("BEGIN", Token.BEGIN);
@ -117,7 +118,20 @@ public class OracleLexer extends Lexer {
map.put("", Token.LPAREN);
map.put("", Token.RPAREN);
return new Keywords(map);
ORACLE_KEYWORDS = new Keywords(map);
ORACLE_FEATURE.configFeature(
ScanSQLTypeWithBegin,
SQLDateExpr,
PrimaryVariantColon,
CreateTableBodySupplemental,
AsCommaFrom
);
ORACLE_FEATURE.unconfigFeature(SQLTimestampExpr);
}
@Override
protected Keywords loadKeywords() {
return ORACLE_KEYWORDS;
}
public OracleLexer(char[] input, int inputLength, boolean skipComment) {
@ -398,14 +412,6 @@ public class OracleLexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanSQLTypeWithBegin,
SQLDateExpr,
PrimaryVariantColon,
CreateTableBodySupplemental,
AsCommaFrom
);
this.dialectFeature.unconfigFeature(SQLTimestampExpr);
this.dialectFeature = ORACLE_FEATURE;
}
}

View File

@ -21,6 +21,7 @@ import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource.JoinType;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement;
import com.alibaba.druid.sql.dialect.oracle.Oracle;
import com.alibaba.druid.sql.dialect.oracle.ast.OracleDataTypeIntervalDay;
import com.alibaba.druid.sql.dialect.oracle.ast.OracleDataTypeIntervalYear;
import com.alibaba.druid.sql.dialect.oracle.ast.OraclePartitionSingle;
@ -54,7 +55,7 @@ public class OracleOutputVisitor extends SQLASTOutputVisitor implements OracleAS
}
public OracleOutputVisitor(StringBuilder appender, boolean printPostSemi) {
super(appender, DbType.oracle);
super(appender, DbType.oracle, Oracle.DIALECT);
this.printPostSemi = printPostSemi;
}

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.oscar;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Oscar {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.oscar);
}

View File

@ -25,9 +25,9 @@ import static com.alibaba.druid.sql.parser.CharTypes.isIdentifierChar;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class OscarLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords OSCAR_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -79,7 +79,12 @@ public class OscarLexer extends Lexer {
map.put("TEMPORARY", Token.TEMPORARY);
map.put("TEMP", Token.TEMP);
return new Keywords(map);
OSCAR_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return OSCAR_KEYWORDS;
}
public OscarLexer(String input, SQLParserFeature... features) {

View File

@ -27,6 +27,7 @@ import com.alibaba.druid.sql.dialect.oracle.ast.stmt.*;
import com.alibaba.druid.sql.dialect.oracle.parser.OracleFunctionDataType;
import com.alibaba.druid.sql.dialect.oracle.parser.OracleProcedureDataType;
import com.alibaba.druid.sql.dialect.oracle.visitor.OracleASTVisitor;
import com.alibaba.druid.sql.dialect.oscar.Oscar;
import com.alibaba.druid.sql.dialect.oscar.ast.stmt.*;
import com.alibaba.druid.sql.dialect.oscar.ast.stmt.OscarSelectQueryBlock.FetchClause;
import com.alibaba.druid.sql.dialect.oscar.ast.stmt.OscarSelectQueryBlock.ForClause;
@ -38,11 +39,11 @@ import java.util.Set;
public class OscarOutputVisitor extends SQLASTOutputVisitor implements OscarASTVisitor, OracleASTVisitor {
public OscarOutputVisitor(StringBuilder appender) {
super(appender, DbType.oscar);
super(appender, DbType.oscar, Oscar.DIALECT);
}
public OscarOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.oscar, parameterized);
super(appender, DbType.oscar, Oscar.DIALECT, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.phoenix;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Phoenix {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.phoenix);
}

View File

@ -28,9 +28,9 @@ import java.util.Map;
* Created by wenshao on 16/9/13.
*/
public class PhoenixLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
public static final Keywords PHOENIX_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -47,7 +47,12 @@ public class PhoenixLexer extends Lexer {
map.put("MATCHED", Token.MATCHED);
map.put("UPSERT", Token.UPSERT);
return new Keywords(map);
PHOENIX_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return PHOENIX_KEYWORDS;
}
public PhoenixLexer(String input, SQLParserFeature... features) {

View File

@ -16,6 +16,7 @@
package com.alibaba.druid.sql.dialect.phoenix.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.phoenix.Phoenix;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
/**
@ -23,10 +24,10 @@ import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
*/
public class PhoenixOutputVisitor extends SQLASTOutputVisitor implements PhoenixASTVisitor {
public PhoenixOutputVisitor(StringBuilder appender) {
super(appender, DbType.phoenix);
super(appender, DbType.phoenix, Phoenix.DIALECT);
}
public PhoenixOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.phoenix, parameterized);
super(appender, DbType.phoenix, Phoenix.DIALECT, parameterized);
}
}

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.postgresql;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class PG {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.postgresql);
}

View File

@ -27,9 +27,10 @@ import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class PGLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
public static final Keywords PG_KEYWORDS;
public static final DialectFeature PG_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -74,7 +75,17 @@ public class PGLexer extends Lexer {
map.put("INTERVAL", Token.INTERVAL);
map.put("LANGUAGE", Token.LANGUAGE);
return new Keywords(map);
PG_KEYWORDS = new Keywords(map);
PG_FEATURE.configFeature(
ScanVariableGreaterThan,
SQLDateExpr,
ParseStatementListWhen
);
}
@Override
protected Keywords loadKeywords() {
return PG_KEYWORDS;
}
public PGLexer(String input, SQLParserFeature... features) {
@ -258,11 +269,6 @@ public class PGLexer extends Lexer {
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanVariableGreaterThan,
SQLDateExpr,
ParseStatementListWhen
);
this.dialectFeature = PG_FEATURE;
}
}

View File

@ -16,6 +16,7 @@
package com.alibaba.druid.sql.dialect.postgresql.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.*;
@ -27,6 +28,7 @@ import com.alibaba.druid.sql.dialect.oracle.ast.stmt.*;
import com.alibaba.druid.sql.dialect.oracle.parser.OracleFunctionDataType;
import com.alibaba.druid.sql.dialect.oracle.parser.OracleProcedureDataType;
import com.alibaba.druid.sql.dialect.oracle.visitor.OracleASTVisitor;
import com.alibaba.druid.sql.dialect.postgresql.PG;
import com.alibaba.druid.sql.dialect.postgresql.ast.expr.*;
import com.alibaba.druid.sql.dialect.postgresql.ast.stmt.*;
import com.alibaba.druid.sql.dialect.postgresql.ast.stmt.PGSelectQueryBlock.FetchClause;
@ -43,16 +45,16 @@ public class PGOutputVisitor extends SQLASTOutputVisitor implements PGASTVisitor
public PGOutputVisitor(StringBuilder appender) {
super(appender, DbType.postgresql);
}
public PGOutputVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
public PGOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
super(appender, dbType, dialect);
}
public PGOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.postgresql, parameterized);
super(appender, DbType.postgresql, PG.DIALECT, parameterized);
}
public PGOutputVisitor(StringBuilder appender, DbType dbType, boolean parameterized) {
super(appender, DbType.postgresql, parameterized);
public PGOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect, boolean parameterized) {
super(appender, dbType, dialect, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.presto;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Presto {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.presto);
}

View File

@ -1,6 +1,7 @@
package com.alibaba.druid.sql.dialect.presto.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.DialectFeature;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
@ -13,9 +14,10 @@ import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.AsDataba
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.SQLDateExpr;
public class PrestoLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords PRESTO_KEYWORDS;
static final DialectFeature PRESTO_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -34,7 +36,16 @@ public class PrestoLexer extends Lexer {
map.put("IF", Token.IF);
return new Keywords(map);
PRESTO_KEYWORDS = new Keywords(map);
PRESTO_FEATURE.configFeature(
SQLDateExpr,
AsDatabase
);
}
@Override
protected Keywords loadKeywords() {
return PRESTO_KEYWORDS;
}
public PrestoLexer(String input, DbType dbType, SQLParserFeature... features) {
@ -49,10 +60,6 @@ public class PrestoLexer extends Lexer {
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
SQLDateExpr,
AsDatabase
);
this.dialectFeature = PRESTO_FEATURE;
}
}

View File

@ -16,11 +16,13 @@
package com.alibaba.druid.sql.dialect.presto.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLLimit;
import com.alibaba.druid.sql.ast.expr.SQLArrayExpr;
import com.alibaba.druid.sql.ast.expr.SQLDecimalExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.presto.Presto;
import com.alibaba.druid.sql.dialect.presto.ast.PrestoColumnWith;
import com.alibaba.druid.sql.dialect.presto.ast.PrestoDateTimeExpr;
import com.alibaba.druid.sql.dialect.presto.ast.stmt.PrestoAlterFunctionStatement;
@ -44,15 +46,19 @@ public class PrestoOutputVisitor extends SQLASTOutputVisitor implements PrestoAS
}
public PrestoOutputVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
super(appender, dbType, Presto.DIALECT);
}
public PrestoOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
super(appender, dbType, dialect);
}
public PrestoOutputVisitor(StringBuilder appender, boolean parameterized) {
this(appender, DbType.presto, parameterized);
this(appender, DbType.presto, Presto.DIALECT, parameterized);
}
public PrestoOutputVisitor(StringBuilder appender, DbType dbType, boolean parameterized) {
super(appender, dbType, parameterized);
public PrestoOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect, boolean parameterized) {
super(appender, dbType, dialect, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.redshift;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Redshift {
public static final SQLDialect dialect = SQLDialect.of(DbType.redshift);
}

View File

@ -11,9 +11,9 @@ import java.util.Map;
public class RedshiftLexer
extends PGLexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords REDSHIFT_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -73,7 +73,12 @@ public class RedshiftLexer
map.put("SORTKEY", Token.SORTKEY);
map.put("BACKUP", Token.BACKUP);
return new Keywords(map);
REDSHIFT_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return REDSHIFT_KEYWORDS;
}
public RedshiftLexer(String input, SQLParserFeature... features) {

View File

@ -8,17 +8,16 @@ import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.dialect.postgresql.visitor.PGOutputVisitor;
import com.alibaba.druid.sql.dialect.redshift.Redshift;
import com.alibaba.druid.sql.dialect.redshift.stmt.*;
public class RedshiftOutputVisitor extends PGOutputVisitor implements RedshiftASTVisitor {
public RedshiftOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.hologres;
super(appender, DbType.redshift, Redshift.dialect, parameterized);
}
public RedshiftOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.hologres;
super(appender, DbType.redshift, Redshift.dialect);
}
public boolean visit(RedshiftSelectQueryBlock x) {

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.spark;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Spark {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.spark);
}

View File

@ -8,8 +8,8 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLObject;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.dialect.spark.visitor.SparkASTVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
@ -22,7 +22,7 @@ import java.util.Map;
* @author peiheng.qph
* @version $Id: AntsparkCreateTableStatement.java, v 0.1 2018年09月14日 15:02 peiheng.qph Exp $
*/
public class SparkCreateTableStatement extends SQLCreateTableStatement {
public class SparkCreateTableStatement extends HiveCreateTableStatement {
protected List<SQLAssignItem> mappedBy = new ArrayList<SQLAssignItem>(1);
protected List<SQLExpr> skewedBy = new ArrayList<SQLExpr>();
protected List<SQLExpr> skewedByOn = new ArrayList<SQLExpr>();

View File

@ -6,6 +6,7 @@ package com.alibaba.druid.sql.dialect.spark.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.hive.parser.HiveLexer;
import com.alibaba.druid.sql.parser.DialectFeature;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
@ -13,15 +14,19 @@ import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.NextTokenColon;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.ScanAliasU;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.ScanSQLTypeWithFrom;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
/**
* @author peiheng.qph
* @version $Id: AntsparkLexer.java, v 0.1 2018年09月14日 15:04 peiheng.qph Exp $
*/
public class SparkLexer extends HiveLexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords SPARK_KEYWORDS;
static final DialectFeature SPARK_FEATURE = new DialectFeature();
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -47,7 +52,33 @@ public class SparkLexer extends HiveLexer {
map.put("QUALIFY", Token.QUALIFY);
map.put("OR", Token.OR);
return new Keywords(map);
SPARK_KEYWORDS = new Keywords(map);
SPARK_FEATURE.configFeature(
QueryTable,
ParseSelectItemPrefixX,
JoinRightTableFrom,
ScanSQLTypeWithFrom,
NextTokenColon,
ScanAliasU,
JoinRightTableFrom,
GroupByAll,
SQLDateExpr,
ParseAssignItemRparenCommaSetReturn,
TableAliasLock,
TableAliasPartition,
AsSkip,
AsSequence,
AsDatabase,
AsDefault
);
SPARK_FEATURE.unconfigFeature(
PrimaryBangBangSupport
);
}
@Override
protected Keywords loadKeywords() {
return SPARK_KEYWORDS;
}
public SparkLexer(String input) {
@ -66,11 +97,6 @@ public class SparkLexer extends HiveLexer {
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
QueryTable,
ParseSelectItemPrefixX,
JoinRightTableFrom
);
this.dialectFeature = SPARK_FEATURE;
}
}

View File

@ -5,12 +5,14 @@
package com.alibaba.druid.sql.dialect.spark.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLObject;
import com.alibaba.druid.sql.ast.expr.SQLHexExpr;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveOutputVisitor;
import com.alibaba.druid.sql.dialect.spark.Spark;
import com.alibaba.druid.sql.dialect.spark.ast.SparkCreateTableStatement;
import com.alibaba.druid.sql.dialect.spark.ast.stmt.SparkCacheTableStatement;
import com.alibaba.druid.sql.dialect.spark.ast.stmt.SparkCreateScanStatement;
@ -24,12 +26,12 @@ import java.util.Map.Entry;
* @version $Id: SparkOutputVisitor.java, v 0.1 2018年09月17日 10:40 peiheng.qph Exp $
*/
public class SparkOutputASTVisitor extends HiveOutputVisitor implements SparkASTVisitor {
public SparkOutputASTVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
public SparkOutputASTVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
super(appender, dbType, dialect);
}
public SparkOutputASTVisitor(StringBuilder appender) {
super(appender);
super(appender, DbType.spark, Spark.DIALECT);
}
//add using statment

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.sqlserver;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class SqlServer {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.sqlserver);
}

View File

@ -25,9 +25,9 @@ import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
import static com.alibaba.druid.sql.parser.Token.IDENTIFIER;
public class SQLServerLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords SQLSERVER_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -48,7 +48,12 @@ public class SQLServerLexer extends Lexer {
map.put("MATCHED", Token.MATCHED);
map.put("OPTION", Token.OPTION);
return new Keywords(map);
SQLSERVER_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return SQLSERVER_KEYWORDS;
}
public SQLServerLexer(char[] input, int inputLength, boolean skipComment) {

View File

@ -16,10 +16,12 @@
package com.alibaba.druid.sql.dialect.sqlserver.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLSequenceExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.sqlserver.SqlServer;
import com.alibaba.druid.sql.dialect.sqlserver.ast.SQLServerOutput;
import com.alibaba.druid.sql.dialect.sqlserver.ast.SQLServerSelectQueryBlock;
import com.alibaba.druid.sql.dialect.sqlserver.ast.expr.SQLServerObjectReferenceExpr;
@ -33,13 +35,20 @@ import java.util.List;
public class SQLServerOutputVisitor extends SQLASTOutputVisitor implements SQLServerASTVisitor {
public SQLServerOutputVisitor(StringBuilder appender) {
super(appender, DbType.sqlserver);
super(appender, DbType.sqlserver, SqlServer.DIALECT);
}
public SQLServerOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
super(appender, dbType, dialect);
}
public SQLServerOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.sqlserver, parameterized);
super(appender, DbType.sqlserver, SqlServer.DIALECT, parameterized);
}
public SQLServerOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect, boolean parameterized) {
super(appender, dbType, dialect, parameterized);
}
public boolean visit(SQLServerSelectQueryBlock x) {
print0(ucase ? "SELECT " : "select ");

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.starrocks;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class StarRocks {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.starrocks);
}

View File

@ -10,9 +10,9 @@ import java.util.HashMap;
import java.util.Map;
public class StarRocksLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords STARROCKS_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -37,7 +37,12 @@ public class StarRocksLexer extends Lexer {
map.put("RLIKE", Token.RLIKE);
map.put("USING", Token.USING);
map.put("OVERWRITE", Token.OVERWRITE);
return new Keywords(map);
STARROCKS_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return STARROCKS_KEYWORDS;
}
public StarRocksLexer(String input) {

View File

@ -1,6 +1,7 @@
package com.alibaba.druid.sql.dialect.starrocks.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
import com.alibaba.druid.sql.ast.DistributedByType;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
@ -15,6 +16,7 @@ import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLNotNullConstraint;
import com.alibaba.druid.sql.ast.statement.SQLNullConstraint;
import com.alibaba.druid.sql.ast.statement.SQLPrimaryKeyImpl;
import com.alibaba.druid.sql.dialect.starrocks.StarRocks;
import com.alibaba.druid.sql.dialect.starrocks.ast.StarRocksAggregateKey;
import com.alibaba.druid.sql.dialect.starrocks.ast.StarRocksDuplicateKey;
import com.alibaba.druid.sql.dialect.starrocks.ast.StarRocksIndexDefinition;
@ -33,15 +35,19 @@ public class StarRocksOutputVisitor extends SQLASTOutputVisitor implements StarR
}
public StarRocksOutputVisitor(StringBuilder appender) {
super(appender, DbType.starrocks);
super(appender, DbType.starrocks, StarRocks.DIALECT);
}
public StarRocksOutputVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
public StarRocksOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
super(appender, dbType, dialect);
}
public StarRocksOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.starrocks, parameterized);
super(appender, DbType.starrocks, StarRocks.DIALECT, parameterized);
}
public StarRocksOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect, boolean parameterized) {
super(appender, dbType, dialect, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.supersql;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class SuperSql {
public static final SQLDialect DIALECT = SQLDialect.of(DbType.supersql);
}

View File

@ -10,9 +10,9 @@ import java.util.HashMap;
import java.util.Map;
public class SuperSqlLexer extends PrestoLexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords SUPERSQL_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
map.put("FETCH", Token.FETCH);
map.put("FIRST", Token.FIRST);
@ -32,8 +32,14 @@ public class SuperSqlLexer extends PrestoLexer {
map.put("PARTITIONED", Token.PARTITIONED);
map.put("RLIKE", Token.RLIKE);
return new Keywords(map);
SUPERSQL_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return SUPERSQL_KEYWORDS;
}
public SuperSqlLexer(String input, SQLParserFeature... features) {
super(input, features);
this.dbType = DbType.supersql;

View File

@ -3,14 +3,15 @@ package com.alibaba.druid.sql.dialect.supersql.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLInsertStatement;
import com.alibaba.druid.sql.dialect.presto.visitor.PrestoOutputVisitor;
import com.alibaba.druid.sql.dialect.supersql.SuperSql;
public class SuperSqlOutputVisitor extends PrestoOutputVisitor implements SuperSqlASTVisitor {
public SuperSqlOutputVisitor(StringBuilder appender) {
super(appender, DbType.supersql);
super(appender, DbType.supersql, SuperSql.DIALECT);
}
public SuperSqlOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, DbType.supersql, parameterized);
super(appender, DbType.supersql, SuperSql.DIALECT, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.synapse;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class Synapse {
public static final SQLDialect dialect = SQLDialect.of(DbType.synapse);
}

View File

@ -2,17 +2,16 @@ package com.alibaba.druid.sql.dialect.synapse.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.sqlserver.visitor.SQLServerOutputVisitor;
import com.alibaba.druid.sql.dialect.synapse.Synapse;
import com.alibaba.druid.sql.dialect.synapse.ast.stmt.SynapseCreateTableStatement;
public class SynapseOutputVisitor extends SQLServerOutputVisitor implements SynapseASTVisitor {
public SynapseOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.synapse;
super(appender, DbType.synapse, Synapse.dialect);
}
public SynapseOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.synapse;
super(appender, DbType.synapse, Synapse.dialect, parameterized);
}
@Override

View File

@ -0,0 +1,8 @@
package com.alibaba.druid.sql.dialect.teradata;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLDialect;
public class TeraData {
public static final SQLDialect dialect = SQLDialect.of(DbType.teradata);
}

View File

@ -10,9 +10,9 @@ import java.util.HashMap;
import java.util.Map;
public class TDLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
static final Keywords TERADATA_KEYWORDS;
static {
Map<String, Token> map = new HashMap<>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -20,7 +20,12 @@ public class TDLexer extends Lexer {
map.put("TOP", Token.TOP);
map.put("QUALIFY", Token.QUALIFY);
return new Keywords(map);
TERADATA_KEYWORDS = new Keywords(map);
}
@Override
protected Keywords loadKeywords() {
return TERADATA_KEYWORDS;
}
public TDLexer(String input, SQLParserFeature... features) {

View File

@ -7,6 +7,7 @@ import com.alibaba.druid.sql.ast.SQLTop;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLPrimaryKeyImpl;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.dialect.teradata.TeraData;
import com.alibaba.druid.sql.dialect.teradata.ast.TDCreateTableStatement;
import com.alibaba.druid.sql.dialect.teradata.ast.TDDateDataType;
import com.alibaba.druid.sql.dialect.teradata.ast.TDNormalize;
@ -15,7 +16,7 @@ import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
public class TDOutputVisitor extends SQLASTOutputVisitor implements TDASTVisitor {
public TDOutputVisitor(StringBuilder appender) {
super(appender, DbType.teradata);
super(appender, DbType.teradata, TeraData.dialect);
}
protected void printSelectListBefore(SQLSelectQueryBlock x) {

View File

@ -39,6 +39,7 @@ import static com.alibaba.druid.sql.parser.Token.*;
* @author wenshao [szujobs@hotmail.com]
*/
public class Lexer {
protected static final DialectFeature FEATURE = new DialectFeature();
protected static SymbolTable symbols_l2 = new SymbolTable(512);
protected int features; //SQLParserFeature.of(SQLParserFeature.EnableSQLBinaryOpExprGroup);
@ -265,7 +266,7 @@ public class Lexer {
}
protected void initDialectFeature() {
this.dialectFeature = new DialectFeature();
this.dialectFeature = FEATURE;
}
public Lexer(char[] input, int inputLength, boolean skipComment) {
@ -2278,7 +2279,6 @@ public class Lexer {
if (ch != ':' && ch != '#' && ch != '$' && !(ch == '@' && dialectFeatureEnabled(ScanVariableAt))) {
throw new ParserException("illegal variable. " + info());
}
boolean templateParameter = false;
mark = pos;
bufPos = 1;
char ch;
@ -2296,14 +2296,13 @@ public class Lexer {
boolean ident = false;
for (; ; ) {
ch = charAt(++pos);
if (isEOF() || (templateParameter && (ch == ';' || ch == '' || ch == '\r'))) {
if (isEOF() || ch == ';' || ch == '' || ch == '\r' || ch == '\n') {
pos--;
bufPos--;
break;
}
if (ch == '}' && !ident) {
templateParameter = false;
if (isIdentifierChar(charAt(pos + 1))) {
bufPos++;
ident = true;
@ -2314,7 +2313,6 @@ public class Lexer {
if (ident && ch == '$') {
if (charAt(pos + 1) == '{') {
templateParameter = true;
bufPos++;
ident = false;
continue;

View File

@ -3532,13 +3532,11 @@ public class SQLExprParser extends SQLParser {
expr = new SQLBinaryOpExpr(expr, operator, rightExp, dbType);
} else if (token == Token.VARIANT) {
String value = lexer.stringVal();
lexer.nextToken();
SQLExpr variantExpr = new SQLVariantRefExpr(value);
if (lexer.token == Token.IN) {
variantExpr = inRest(variantExpr);
SQLExpr expr1 = relationalRestVariant(expr);
if (expr1 == expr) {
break;
}
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Blank, variantExpr, dbType);
expr = expr1;
} else {
break;
}
@ -3627,13 +3625,11 @@ public class SQLExprParser extends SQLParser {
expr = new SQLBinaryOpExpr(expr, op, rightExp, dbType);
} else if (lexer.token == Token.VARIANT) {
String value = lexer.stringVal();
lexer.nextToken();
SQLExpr variantExpr = new SQLVariantRefExpr(value);
if (lexer.token == Token.IN) {
variantExpr = inRest(variantExpr);
SQLExpr expr1 = relationalRestVariant(expr);
if (expr == expr1) {
break;
}
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Blank, variantExpr, dbType);
expr = expr1;
} else {
break;
}
@ -3994,10 +3990,7 @@ public class SQLExprParser extends SQLParser {
}
break;
case VARIANT:
rightExp = new SQLVariantRefExpr(lexer.stringVal);
expr = new SQLBinaryOpExpr(expr, SQLBinaryOperator.Blank, rightExp, dbType);
lexer.nextToken();
return expr;
return relationalRestVariant(expr);
default:
return expr;
}
@ -4033,6 +4026,16 @@ public class SQLExprParser extends SQLParser {
return expr;
}
protected SQLExpr relationalRestVariant(SQLExpr expr) {
String value = lexer.stringVal();
lexer.nextToken();
SQLExpr variantExpr = new SQLVariantRefExpr(value);
if (lexer.token == Token.IN) {
variantExpr = inRest(variantExpr);
}
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Blank, variantExpr, dbType);
}
public SQLExpr notRationalRest(SQLExpr expr, boolean global) {
switch (lexer.token) {
case LIKE:

View File

@ -57,7 +57,7 @@ import com.alibaba.druid.sql.dialect.hologres.parser.HologresStatementParser;
import com.alibaba.druid.sql.dialect.impala.parser.ImpalaExprParser;
import com.alibaba.druid.sql.dialect.impala.parser.ImpalaLexer;
import com.alibaba.druid.sql.dialect.impala.parser.ImpalaStatementParser;
import com.alibaba.druid.sql.dialect.infomix.parser.InformixStatementParser;
import com.alibaba.druid.sql.dialect.informix.parser.InformixStatementParser;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlSelectQueryBlock;
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlExprParser;
import com.alibaba.druid.sql.dialect.mysql.parser.MySqlLexer;
@ -730,7 +730,7 @@ public class SQLParserUtils {
set = true;
}
if (lexer.identifierEquals("ADD") && (dbType == DbType.hive || dbType == DbType.odps)) {
if (lexer.identifierEquals("ADD") && (dbType == DbType.hive || dbType == DbType.odps || dbType == DbType.spark)) {
lexer.nextToken();
if (lexer.identifierEquals("JAR")) {
lexer.nextPath();
@ -902,7 +902,7 @@ public class SQLParserUtils {
}
prePos = lexer.pos;
if (lexer.identifierEquals("ADD") && (dbType == DbType.hive || dbType == DbType.odps)) {
if (lexer.identifierEquals("ADD") && (dbType == DbType.hive || dbType == DbType.odps || dbType == DbType.spark)) {
lexer.nextToken();
if (lexer.identifierEquals("JAR")) {
lexer.nextPath();

View File

@ -1340,6 +1340,7 @@ public class SQLSelectParser extends SQLParser {
return tableSrc;
}
protected SQLExprTableSource getTableSource() {
return new SQLExprTableSource();
}
@ -1539,6 +1540,13 @@ public class SQLSelectParser extends SQLParser {
tableSource.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token == Token.HINT && dbType == DbType.odps) {
List<SQLCommentHint> hints = this.exprParser.parseHints();
for (SQLCommentHint hint : hints) {
tableSource.addAfterComment(hint.getText());
}
}
if (tableSource.getAlias() == null || tableSource.getAlias().length() == 0) {
Token token = lexer.token;
long hash;

View File

@ -4923,6 +4923,9 @@ public class SQLStatementParser extends SQLParser {
}
lexer.nextToken();
if (lexer.token == WHERE) {
break;
}
}
}

View File

@ -29,7 +29,7 @@ import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKOutputVisitor;
import com.alibaba.druid.sql.dialect.db2.visitor.DB2OutputVisitor;
import com.alibaba.druid.sql.dialect.h2.visitor.H2OutputVisitor;
import com.alibaba.druid.sql.dialect.hologres.visitor.HologresOutputVisitor;
import com.alibaba.druid.sql.dialect.infomix.visitor.InformixOutputVisitor;
import com.alibaba.druid.sql.dialect.informix.visitor.InformixOutputVisitor;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlInsertStatement;
import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlASTVisitor;
import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlOutputVisitor;

View File

@ -120,19 +120,29 @@ public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements Paramet
}
public SQLASTOutputVisitor(StringBuilder appender, DbType dbType) {
this(appender, dbType, SQLDialect.of(dbType));
}
public SQLASTOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect) {
this.appender = appender;
this.dbType = dbType;
this.dialect = SQLDialect.of(dbType);
this.dialect = dialect;
}
public SQLASTOutputVisitor(StringBuilder appender, boolean parameterized) {
this.appender = appender;
this.dialect = SQLDialect.of(dbType);
this.config(VisitorFeature.OutputParameterized, parameterized);
}
public SQLASTOutputVisitor(StringBuilder appender, DbType dbType, boolean parameterized) {
this(appender, dbType, SQLDialect.of(dbType), parameterized);
}
public SQLASTOutputVisitor(StringBuilder appender, DbType dbType, SQLDialect dialect, boolean parameterized) {
this.appender = appender;
this.dbType = dbType;
this.dialect = SQLDialect.of(dbType);
this.dialect = dialect;
this.config(VisitorFeature.OutputParameterized, parameterized);
}
@ -1043,7 +1053,7 @@ public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements Paramet
boolean printOpSpace = true;
if (relational) {
if (dbType == DbType.hive && x.getParent() instanceof SQLMethodInvokeExpr) {
if ((DbType.hive == dbType || DbType.spark == dbType) && x.getParent() instanceof SQLMethodInvokeExpr) {
print(' ');
} else {
println();
@ -5281,7 +5291,7 @@ public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements Paramet
public boolean visit(SQLAlterTableAddColumn x) {
print0(ucase ? "ADD" : "add");
if (DbType.odps == dbType || DbType.hive == dbType) {
if (DbType.odps == dbType || DbType.hive == dbType || DbType.spark == dbType) {
print0(ucase ? " COLUMNS" : " columns");
}
@ -5512,7 +5522,7 @@ public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements Paramet
|| dbType == DbType.mariadb) {
println();
print0(ucase ? "WITH (" : "with (");
} else if (dbType == DbType.hive || dbType == DbType.presto || dbType == DbType.trino || dbType == DbType.supersql) {
} else if (dbType == DbType.hive || dbType == DbType.presto || dbType == DbType.trino || dbType == DbType.supersql || dbType == DbType.spark) {
println();
print0(ucase ? "WITH DBPROPERTIES (" : "with dbproperties (");
} else {
@ -6103,7 +6113,7 @@ public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements Paramet
public boolean visit(SQLAlterTableAlterColumn x) {
if (DbType.odps == dbType) {
print0(ucase ? "CHANGE COLUMN" : "change column");
} else if (DbType.hive == dbType) {
} else if (DbType.hive == dbType || DbType.spark == dbType) {
print0(ucase ? "CHANGE" : "change");
} else {
print0(ucase ? "ALTER COLUMN" : "alter column");
@ -6324,7 +6334,7 @@ public class SQLASTOutputVisitor extends SQLASTVisitorAdapter implements Paramet
SQLAlterTableItem item = x.getItems().get(i);
if (i != 0) {
SQLAlterTableItem former = x.getItems().get(i - 1);
if ((this.dbType == DbType.hive || this.dbType == DbType.odps)
if ((this.dbType == DbType.hive || this.dbType == DbType.odps || this.dbType == DbType.spark)
&& former instanceof SQLAlterTableAddPartition
&& item instanceof SQLAlterTableAddPartition) {
// ignore comma

View File

@ -82,7 +82,7 @@ public class SQLTableAliasCollectVisitor extends SQLASTVisitorAdapter {
public boolean visit(SQLExprTableSource x) {
String alias = x.getAlias();
if (alias == null) {
if (alias != null) {
SQLExpr expr = x.getExpr();
if (expr instanceof SQLName) {
long hashCode64 = ((SQLName) expr).nameHashCode64();

View File

@ -444,7 +444,7 @@ public class TableStat {
private boolean having;
private boolean join;
private boolean primaryKey; // for ddl
private boolean unique; //
private boolean unique;
private boolean update;
private Map<String, Object> attributes = new HashMap<String, Object>();
private transient String fullName;
@ -631,7 +631,7 @@ public class TableStat {
DropIndex(256), //
CreateIndex(512), //
Replace(1024),
DESC(2048); //
DESC(2048);
public final int mark;

View File

@ -813,7 +813,7 @@ public class MonitorDaoJdbcImpl implements MonitorDao {
public List<MonitorApp> listApp(String domain) throws SQLException {
List<MonitorApp> list = new ArrayList<MonitorApp>();
String sql = "select id, domain, app from druid_app " //
String sql = "select id, domain, app from druid_app "
+ " where domain = ?";
Connection conn = null;
PreparedStatement stmt = null;
@ -838,7 +838,7 @@ public class MonitorDaoJdbcImpl implements MonitorDao {
}
public MonitorApp findApp(String domain, String app) throws SQLException {
String sql = "select id, domain, app from druid_app " //
String sql = "select id, domain, app from druid_app "
+ " where domain = ? and app = ?";
Connection conn = null;
PreparedStatement stmt = null;
@ -876,7 +876,7 @@ public class MonitorDaoJdbcImpl implements MonitorDao {
public List<MonitorCluster> listCluster(String domain, String app) throws SQLException {
List<MonitorCluster> list = new ArrayList<MonitorCluster>();
String sql = "select id, domain, app, cluster from druid_cluster " //
String sql = "select id, domain, app, cluster from druid_cluster "
+ " where domain = ?";
if (app != null) {
@ -919,7 +919,7 @@ public class MonitorDaoJdbcImpl implements MonitorDao {
}
public MonitorCluster findCluster(String domain, String app, String cluster) throws SQLException {
String sql = "select id, domain, app, cluster from druid_cluster " //
String sql = "select id, domain, app, cluster from druid_cluster "
+ " where domain = ? and app = ? and cluster = ?";
Connection conn = null;
PreparedStatement stmt = null;
@ -960,19 +960,19 @@ public class MonitorDaoJdbcImpl implements MonitorDao {
Date startTime, long pid) throws SQLException {
MonitorInstance monitorInst = findInst(domain, app, cluster, host);
if (monitorInst == null) {
String sql = "insert into druid_inst (domain, app, cluster, host, ip, lastActiveTime, lastPID) " //
String sql = "insert into druid_inst (domain, app, cluster, host, ip, lastActiveTime, lastPID) "
+ " values (?, ?, ?, ?, ?, ?, ?)";
JdbcUtils.execute(dataSource, sql, domain, app, cluster, host, ip, startTime, pid);
} else {
String sql = "update druid_inst set ip = ?, lastActiveTime = ?, lastPID = ? " //
String sql = "update druid_inst set ip = ?, lastActiveTime = ?, lastPID = ? "
+ " where domain = ? and app = ? and cluster = ? and host = ? ";
JdbcUtils.execute(dataSource, sql, ip, startTime, pid, domain, app, cluster, host);
}
}
public MonitorInstance findInst(String domain, String app, String cluster, String host) throws SQLException {
String sql = "select id, domain, app, cluster, host, ip, lastActiveTime, lastPID from druid_inst " //
+ " where domain = ? and app = ? and cluster = ? and host = ? " //
String sql = "select id, domain, app, cluster, host, ip, lastActiveTime, lastPID from druid_inst "
+ " where domain = ? and app = ? and cluster = ? and host = ? "
+ " limit 1";
Connection conn = null;
PreparedStatement stmt = null;
@ -1002,7 +1002,7 @@ public class MonitorDaoJdbcImpl implements MonitorDao {
public List<MonitorInstance> listInst(String domain, String app, String cluster) throws SQLException {
List<MonitorInstance> list = new ArrayList<MonitorInstance>();
String sql = "select id, domain, app, cluster, host, ip, lastActiveTime, lastPID from druid_inst " //
String sql = "select id, domain, app, cluster, host, ip, lastActiveTime, lastPID from druid_inst "
+ "where domain = ?";
if (app != null) {

View File

@ -880,7 +880,7 @@ public final class JdbcUtils implements JdbcConstants {
StringBuilder sql = new StringBuilder() //
.append("insert into ") //
.append(tableName) //
.append("("); //
.append("(");
int nameCount = 0;
for (String name : names) {

View File

@ -0,0 +1 @@
quote=`

View File

@ -0,0 +1,151 @@
ADD
ALL
ALTER
ANALYZE
AND
ARRAY
AS
ASC
BETWEEN
BIGINT
BITMAP
BOTH
BY
CASE
CHAR
CHARACTER
CHECK
COLLATE
COLUMN
COMPACTION
CONVERT
CREATE
CROSS
CUBE
CURRENT_DATE
CURRENT_TIME
CURRENT_TIMESTAMP
CURRENT_USER
CURRENT_ROLE
DATABASE
DATABASES
DECIMAL
DECIMALV2
DECIMAL32
DECIMAL64
DECIMAL128
DEFAULT
DELETE
DENSE_RANK
DESC
DESCRIBE
DISTINCT
DOUBLE
DROP
DUAL
DEFERRED
ELSE
EXCEPT
EXISTS
EXPLAIN
FALSE
FIRST_VALUE
FLOAT
FOR
FORCE
FROM
FULL
FUNCTION
GRANT
GROUP
GROUPS
GROUPING
GROUPING_ID
HAVING
HLL
HOST
IF
IGNORE
IN
INDEX
INFILE
INNER
INSERT
INT
INTEGER
INTERSECT
INTO
IS
IMMEDIATE
JOIN
JSON
KEY
KEYS
KILL
LAG
LARGEINT
LAST_VALUE
LATERAL
LEAD
LEFT
LIKE
LIMIT
LOAD
LOCALTIME
LOCALTIMESTAMP
MAXVALUE
MINUS
MOD
NTILE
NOT
NULL
ON
OR
ORDER
OUTER
OUTFILE
OVER
PARTITION
PERCENTILE
PRIMARY
PROCEDURE
QUALIFY
RANGE
RANK
READ
REGEXP
RELEASE
RENAME
REPLACE
REVOKE
RIGHT
RLIKE
ROW
ROWS
ROW_NUMBER
SCHEMA
SCHEMAS
SELECT
SET
SET_VAR
SHOW
SMALLINT
SYSTEM
TABLE
TERMINATED
TEXT
THEN
TINYINT
TO
TRUE
UNION
UNIQUE
UNSIGNED
UPDATE
USE
USING
VALUES
VARCHAR
WHEN
WHERE
WITH

View File

@ -0,0 +1,160 @@
ALL
ALTER
AND
ARRAY
AS
AUTHORIZATION
BETWEEN
BIGINT
BINARY
BOOLEAN
BOTH
BY
CASE
CAST
CHAR
COLUMN
CONF
CREATE
CROSS
CUBE
CURRENT
CURRENT_DATE
CURRENT_TIMESTAMP
CURSOR
DATABASE
DATE
DECIMAL
DELETE
DESCRIBE
DISTINCT
DOUBLE
DROP
ELSE
END
EXCHANGE
EXISTS
EXTENDED
EXTERNAL
FALSE
FETCH
FLOAT
FOLLOWING
FOR
FROM
FULL
FUNCTION
GRANT
GROUP
GROUPING
HAVING
IF
IMPORT
IN
INNER
INSERT
INT
INTERSECT
INTERVAL
INTO
IS
JOIN
LATERAL
LEFT
LESS
LIKE
LOCAL
MACRO
MAP
MORE
NONE
NOT
NULL
OF
ON
OR
ORDER
OUT
OUTER
OVER
PARTIALSCAN
PARTITION
PERCENT
PRECEDING
PRESERVE
PROCEDURE
RANGE
READS
REDUCE
REVOKE
RIGHT
ROLLUP
ROW
ROWS
SELECT
SET
SMALLINT
TABLE
TABLESAMPLE
THEN
TIMESTAMP
TO
TRANSFORM
TRIGGER
TRUE
TRUNCATE
UNBOUNDED
UNION
UNIQUEJOIN
UPDATE
USER
USING
UTC_TMESTAMP
VALUES
VARCHAR
WHEN
WHERE
WINDOW
WITH
COMMIT
ONLY
REGEXP
RLIKE
ROLLBACK
START
CACHE
CONSTRAINT
FOREIGN
PRIMARY
REFERENCES
EXCEPT
EXTRACT
FLOOR
GROUPING
INTEGER
MINUS
PRECISION
MERGE
ANY
APPLICATION
DEC
NUMERIC
SYNC
TIME
TIMESTAMPLOCALTZ
UNIQUE
COMPACTIONID
CONNECTOR
CONNECTORS
CONVERT
DDL
FORCE
LEADING
OLDER
PKFK_JOIN
PREPARE
QUALIFY
REAL
SOME
THAN
TRAILING

View File

@ -0,0 +1,23 @@
TINYINT
SMALLINT
MEDIUMINT
INT
INTEGER
BIGINT
DECIMAL
NUMERIC
FLOAT
DOUBLE
DATE
DATETIME
TIMESTAMP
INTERVAL
STRING
VARCHAR
CHAR
BOOLEAN
BINARY
ARRAY
MAP
STRUCT
UNIONTYPE

View File

@ -0,0 +1 @@
quote=`

Some files were not shown because too many files have changed in this diff Show More