Merge branch 'alibaba:master' into master

This commit is contained in:
AdolphLv 2024-08-13 09:03:41 +08:00 committed by GitHub
commit 00fe8a503b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
413 changed files with 10384 additions and 16050 deletions

View File

@ -80,6 +80,8 @@ public enum DbType {
bigquery(1L << 48),
impala(1L << 49),
ingres(0),
cloudscape(0),
timesten(0),
@ -136,7 +138,7 @@ public enum DbType {
}
public static boolean isPostgreSQLDbStyle(DbType dbType) {
return dbType == DbType.postgresql || dbType == DbType.edb || dbType == DbType.greenplum;
return dbType == DbType.postgresql || dbType == DbType.edb || dbType == DbType.greenplum || dbType == DbType.hologres;
}
public final boolean equals(String other) {
return this == of(other);

View File

@ -1,998 +0,0 @@
/*
* Copyright 1999-2017 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.sql;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLDataTypeImpl;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.oracle.ast.clause.OracleWithSubqueryEntry;
import com.alibaba.druid.sql.dialect.oracle.ast.expr.OracleSysdateExpr;
import com.alibaba.druid.sql.dialect.oracle.ast.stmt.OracleSelectJoin;
import com.alibaba.druid.sql.dialect.oracle.ast.stmt.OracleSelectQueryBlock;
import com.alibaba.druid.sql.dialect.oracle.ast.stmt.OracleSelectSubqueryTableSource;
import com.alibaba.druid.sql.dialect.oracle.ast.stmt.OracleSelectTableReference;
import com.alibaba.druid.util.FnvHash;
import java.util.List;
public class SQLTransformUtils {
public static SQLExpr transformDecode(SQLMethodInvokeExpr x) {
if (x == null) {
return null;
}
if (FnvHash.Constants.DECODE != x.methodNameHashCode64()) {
throw new IllegalArgumentException(x.getMethodName());
}
List<SQLExpr> arguments = x.getArguments();
SQLCaseExpr caseExpr = new SQLCaseExpr();
caseExpr.setParent(x.getParent());
caseExpr.setValueExpr(arguments.get(0));
if (arguments.size() == 4) {
SQLExpr param1 = arguments.get(1);
x.setMethodName("if");
SQLBinaryOpExpr condition;
if (param1 instanceof SQLNullExpr) {
condition = new SQLBinaryOpExpr(arguments.get(0), SQLBinaryOperator.Is, param1);
} else {
condition = new SQLBinaryOpExpr(arguments.get(0), SQLBinaryOperator.Equality, param1);
}
condition.setParent(x);
arguments.set(0, condition);
arguments.set(1, arguments.get(2));
arguments.set(2, arguments.get(3));
arguments.remove(3);
return x;
}
for (int i = 1; i + 1 < arguments.size(); i += 2) {
SQLCaseExpr.Item item = new SQLCaseExpr.Item();
SQLExpr conditionExpr = arguments.get(i);
item.setConditionExpr(conditionExpr);
SQLExpr valueExpr = arguments.get(i + 1);
if (valueExpr instanceof SQLMethodInvokeExpr) {
SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) valueExpr;
if (FnvHash.Constants.DECODE == methodInvokeExpr.methodNameHashCode64()) {
valueExpr = transformDecode(methodInvokeExpr);
}
}
item.setValueExpr(valueExpr);
caseExpr.addItem(item);
}
if (arguments.size() % 2 == 0) {
SQLExpr defaultExpr = arguments.get(arguments.size() - 1);
if (defaultExpr instanceof SQLMethodInvokeExpr) {
SQLMethodInvokeExpr methodInvokeExpr = (SQLMethodInvokeExpr) defaultExpr;
if (FnvHash.Constants.DECODE == methodInvokeExpr.methodNameHashCode64()) {
defaultExpr = transformDecode(methodInvokeExpr);
}
}
caseExpr.setElseExpr(defaultExpr);
}
caseExpr.setParent(x.getParent());
return caseExpr;
}
public static SQLDataType transformOracleToMySql(SQLDataType x) {
final String name = x.getName();
final long nameHash = x.nameHashCode64();
if (name == null) {
return x;
}
List<SQLExpr> argumentns = x.getArguments();
SQLDataType dataType;
if (nameHash == FnvHash.Constants.UROWID) {
int len = 4000;
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLIntegerExpr) {
len = ((SQLIntegerExpr) arg0).getNumber().intValue();
}
}
dataType = new SQLDataTypeImpl("varchar", len);
} else if (nameHash == FnvHash.Constants.ROWID) {
dataType = new SQLDataTypeImpl("char", 10);
} else if (nameHash == FnvHash.Constants.BOOLEAN) {
dataType = new SQLDataTypeImpl("tinyint");
} else if (nameHash == FnvHash.Constants.INTEGER) {
dataType = new SQLDataTypeImpl("int");
} else if (nameHash == FnvHash.Constants.FLOAT
|| nameHash == FnvHash.Constants.BINARY_FLOAT) {
dataType = new SQLDataTypeImpl("float");
} else if (nameHash == FnvHash.Constants.REAL
|| nameHash == FnvHash.Constants.BINARY_DOUBLE
|| nameHash == FnvHash.Constants.DOUBLE_PRECISION) {
dataType = new SQLDataTypeImpl("double");
} else if (nameHash == FnvHash.Constants.NUMBER) {
if (argumentns.isEmpty()) {
dataType = new SQLDataTypeImpl("decimal", 38);
} else {
SQLExpr arg0 = argumentns.get(0);
int precision, scale = 0;
if (arg0 instanceof SQLAllColumnExpr) {
precision = 9;
} else {
precision = ((SQLIntegerExpr) arg0).getNumber().intValue();
}
if (argumentns.size() > 1) {
scale = ((SQLIntegerExpr) argumentns.get(1)).getNumber().intValue();
}
if (scale > precision) {
if (arg0 instanceof SQLAllColumnExpr) {
precision = 19;
if (scale > precision) {
precision = scale;
}
} else {
precision = scale;
}
}
if (scale == 0) {
if (precision < 3) {
dataType = new SQLDataTypeImpl("tinyint");
} else if (precision < 5) {
dataType = new SQLDataTypeImpl("smallint");
} else if (precision < 9) {
dataType = new SQLDataTypeImpl("int");
} else if (precision <= 20) {
dataType = new SQLDataTypeImpl("bigint");
} else {
dataType = new SQLDataTypeImpl("decimal", precision);
}
} else {
dataType = new SQLDataTypeImpl("decimal", precision, scale);
}
}
} else if (nameHash == FnvHash.Constants.DEC
|| nameHash == FnvHash.Constants.DECIMAL) {
dataType = x.clone();
dataType.setName("decimal");
int precision = 0;
if (argumentns.size() > 0) {
precision = ((SQLIntegerExpr) argumentns.get(0)).getNumber().intValue();
}
int scale = 0;
if (argumentns.size() > 1) {
scale = ((SQLIntegerExpr) argumentns.get(1)).getNumber().intValue();
if (precision < scale) {
((SQLIntegerExpr) dataType.getArguments().get(1)).setNumber(precision);
}
}
// if (precision == 38 && scale == 0 && x.getParent() instanceof SQLCastExpr) {
// dataType.getArguments().clear();
// dataType.setName("int");
// }
/////////////////////////////////
} else if (nameHash == FnvHash.Constants.RAW) {
int len;
if (argumentns.isEmpty()) {
len = -1;
} else if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len == -1) {
dataType = new SQLDataTypeImpl("binary");
} else if (len <= 255) {
dataType = new SQLDataTypeImpl("binary", len);
} else {
dataType = new SQLDataTypeImpl("varbinary", len);
}
} else if (nameHash == FnvHash.Constants.CHAR
|| nameHash == FnvHash.Constants.CHARACTER) {
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
int len;
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len <= 255) {
dataType = new SQLCharacterDataType("char", len);
} else {
dataType = new SQLCharacterDataType("varchar", len);
}
} else if (argumentns.isEmpty()) {
dataType = new SQLCharacterDataType("char");
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else if (nameHash == FnvHash.Constants.NCHAR) {
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
int len;
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len <= 255) {
dataType = new SQLCharacterDataType("nchar", len);
} else {
dataType = new SQLCharacterDataType("nvarchar", len);
}
} else if (argumentns.isEmpty()) {
dataType = new SQLCharacterDataType("nchar");
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else if (nameHash == FnvHash.Constants.VARCHAR2) {
if (argumentns.size() > 0) {
int len;
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
dataType = new SQLCharacterDataType("varchar", len);
} else {
dataType = new SQLCharacterDataType("varchar");
}
} else if (nameHash == FnvHash.Constants.NVARCHAR2) {
if (argumentns.size() > 0) {
int len;
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
dataType = new SQLCharacterDataType("nvarchar", len);
} else {
dataType = new SQLCharacterDataType("nvarchar");
}
} else if (nameHash == FnvHash.Constants.BFILE) {
dataType = new SQLCharacterDataType("varchar", 255);
} else if (nameHash == FnvHash.Constants.DATE
|| nameHash == FnvHash.Constants.TIMESTAMP) {
int len = -1;
if (argumentns.size() > 0) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
}
if (len >= 0) {
if (len > 6) {
len = 6;
}
dataType = new SQLDataTypeImpl("datetime", len);
} else {
dataType = new SQLDataTypeImpl("datetime");
}
} else if (nameHash == FnvHash.Constants.BLOB
|| nameHash == FnvHash.Constants.LONG_RAW) {
argumentns.clear();
dataType = new SQLDataTypeImpl("LONGBLOB");
} else if (nameHash == FnvHash.Constants.CLOB
|| nameHash == FnvHash.Constants.NCLOB
|| nameHash == FnvHash.Constants.LONG
|| nameHash == FnvHash.Constants.XMLTYPE) {
argumentns.clear();
dataType = new SQLCharacterDataType("LONGTEXT");
} else {
dataType = x;
}
if (dataType != x) {
dataType.setParent(x.getParent());
}
return dataType;
}
public static SQLDataType transformOracleToAliyunAds(SQLDataType x) {
final String dataTypeName = x.getName().toLowerCase();
SQLDataType dataType;
if (dataTypeName.equals("varchar2")
|| dataTypeName.equals("varchar")
|| dataTypeName.equals("char")
|| dataTypeName.equals("nchar")
|| dataTypeName.equals("nvarchar")
|| dataTypeName.equals("nvarchar2")
|| dataTypeName.equals("clob")
|| dataTypeName.equals("nclob")
|| dataTypeName.equals("blob")
|| dataTypeName.equals("long")
|| dataTypeName.equals("long raw")
|| dataTypeName.equals("raw")
) {
dataType = new SQLCharacterDataType("varchar");
} else if (dataTypeName.equals("number")
|| dataTypeName.equals("decimal")
|| dataTypeName.equals("dec")
|| dataTypeName.equals("numeric")) {
int scale = 0;
if (x.getArguments().size() > 1) {
scale = ((SQLIntegerExpr) x.getArguments().get(1)).getNumber().intValue();
}
if (scale == 0) {
dataType = new SQLDataTypeImpl("bigint");
} else {
dataType = new SQLDataTypeImpl("double");
}
} else if (dataTypeName.equals("date")
|| dataTypeName.equals("datetime")
|| dataTypeName.equals("timestamp")) {
dataType = new SQLDataTypeImpl("timestamp");
} else if (dataTypeName.equals("float")
|| dataTypeName.equals("binary_float")) {
dataType = new SQLDataTypeImpl("float");
} else if (dataTypeName.equals("double")
|| dataTypeName.equals("binary_double")) {
dataType = new SQLDataTypeImpl("double");
} else {
dataType = x;
}
if (dataType != x) {
dataType.setParent(x.getParent());
}
return dataType;
}
public static SQLDataType transformOracleToPostgresql(SQLDataType x) {
final String name = x.getName();
final long nameHash = x.nameHashCode64();
if (name == null) {
return x;
}
List<SQLExpr> argumentns = x.getArguments();
SQLDataType dataType;
if (nameHash == FnvHash.Constants.UROWID) {
int len = 4000;
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLIntegerExpr) {
len = ((SQLIntegerExpr) arg0).getNumber().intValue();
}
}
dataType = new SQLDataTypeImpl(SQLDataType.Constants.VARCHAR, len);
} else if (nameHash == FnvHash.Constants.ROWID) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.CHAR, 10);
} else if (nameHash == FnvHash.Constants.BOOLEAN || nameHash == FnvHash.Constants.SMALLINT) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.SMALLINT);
} else if (nameHash == FnvHash.Constants.INTEGER
|| nameHash == FnvHash.Constants.INT) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DECIMAL, 38);
} else if (nameHash == FnvHash.Constants.BINARY_FLOAT) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.REAL);
} else if (nameHash == FnvHash.Constants.BINARY_DOUBLE
|| nameHash == FnvHash.Constants.FLOAT
|| nameHash == FnvHash.Constants.DOUBLE
|| nameHash == FnvHash.Constants.REAL
|| nameHash == FnvHash.Constants.DOUBLE_PRECISION) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DOUBLE_PRECISION);
} else if (nameHash == FnvHash.Constants.NUMBER) {
if (argumentns.isEmpty()) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DECIMAL, 38);
} else {
SQLExpr arg0 = argumentns.get(0);
int precision, scale = 0;
if (arg0 instanceof SQLAllColumnExpr) {
precision = 19;
scale = -1;
} else {
precision = ((SQLIntegerExpr) arg0).getNumber().intValue();
}
if (argumentns.size() > 1) {
scale = ((SQLIntegerExpr) argumentns.get(1)).getNumber().intValue();
}
if (scale > precision) {
if (arg0 instanceof SQLAllColumnExpr) {
precision = 19;
if (scale > precision) {
precision = scale;
}
} else {
precision = scale;
}
}
if (scale == 0) {
if (precision < 5) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.SMALLINT);
} else if (precision < 9) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.INT);
} else if (precision <= 20) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.BIGINT);
} else {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DECIMAL, precision);
}
} else if (scale == -1) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DOUBLE_PRECISION);
} else {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DECIMAL, precision, scale);
}
}
} else if (nameHash == FnvHash.Constants.DEC
|| nameHash == FnvHash.Constants.DECIMAL) {
dataType = x.clone();
dataType.setName(SQLDataType.Constants.DECIMAL);
int precision = 0;
if (argumentns.size() > 0) {
precision = ((SQLIntegerExpr) argumentns.get(0)).getNumber().intValue();
}
int scale = 0;
if (argumentns.size() > 1) {
scale = ((SQLIntegerExpr) argumentns.get(1)).getNumber().intValue();
if (precision < scale) {
((SQLIntegerExpr) dataType.getArguments().get(1)).setNumber(precision);
}
}
} else if (nameHash == FnvHash.Constants.CHARACTER) {
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
int len;
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
dataType = new SQLCharacterDataType(SQLDataType.Constants.CHAR, len);
} else if (argumentns.isEmpty()) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.CHAR);
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else if (nameHash == FnvHash.Constants.CHAR) {
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
int len;
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len <= 2000) {
dataType = x;
dataType.setName(SQLDataType.Constants.CHAR);
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.TEXT);
}
} else if (argumentns.isEmpty()) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.CHAR);
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else if (nameHash == FnvHash.Constants.NCHAR) {
// no changed
dataType = x;
dataType.setName(SQLDataType.Constants.NCHAR);
} else if (nameHash == FnvHash.Constants.VARCHAR
|| nameHash == FnvHash.Constants.VARCHAR2) {
if (argumentns.size() > 0) {
int len;
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else if (arg0 instanceof SQLVariantRefExpr) {
len = 2000;
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len <= 4000) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR, len);
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.TEXT);
}
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR);
}
} else if (nameHash == FnvHash.Constants.NVARCHAR
|| nameHash == FnvHash.Constants.NVARCHAR2
|| nameHash == FnvHash.Constants.NCHAR_VARYING) {
if (argumentns.size() > 0) {
int len;
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR, len);
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR);
}
} else if (nameHash == FnvHash.Constants.BFILE) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR, 255);
} else if (nameHash == FnvHash.Constants.DATE) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.TIMESTAMP, 0);
} else if (nameHash == FnvHash.Constants.TIMESTAMP) {
x.setName(SQLDataType.Constants.TIMESTAMP);
if (x.isWithLocalTimeZone()) {
x.setWithLocalTimeZone(false);
x.setWithTimeZone(null);
}
dataType = x;
} else if (nameHash == FnvHash.Constants.DATETIME) {
int len = -1;
if (argumentns.size() > 0) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
}
if (len > 0) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.TIMESTAMP, len);
} else {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.TIMESTAMP);
}
} else if (nameHash == FnvHash.Constants.BLOB
|| nameHash == FnvHash.Constants.LONG_RAW
|| nameHash == FnvHash.Constants.RAW) {
argumentns.clear();
dataType = new SQLDataTypeImpl(SQLDataType.Constants.BYTEA);
} else if (nameHash == FnvHash.Constants.CLOB
|| nameHash == FnvHash.Constants.NCLOB
|| nameHash == FnvHash.Constants.LONG) {
argumentns.clear();
dataType = new SQLCharacterDataType(SQLDataType.Constants.TEXT);
} else if (nameHash == FnvHash.Constants.XMLTYPE) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.XML);
} else {
dataType = x;
}
if (dataType != x) {
dataType.setParent(x.getParent());
}
return dataType;
}
public static SQLExpr transformOracleToPostgresql(SQLMethodInvokeExpr x) {
final long nameHashCode64 = x.methodNameHashCode64();
List<SQLExpr> parameters = x.getArguments();
if (nameHashCode64 == FnvHash.Constants.SYS_GUID) {
SQLMethodInvokeExpr uuid_generate_v4 = new SQLMethodInvokeExpr("uuid_generate_v4");
uuid_generate_v4.setParent(x.getParent());
return uuid_generate_v4;
}
if (nameHashCode64 == FnvHash.Constants.TRUNC) {
if (parameters.size() == 1) {
SQLExpr param0 = parameters.get(0);
if (param0 instanceof OracleSysdateExpr
|| (param0 instanceof SQLIdentifierExpr
&& ((SQLIdentifierExpr) param0).nameHashCode64() == FnvHash.Constants.CURRENT_TIMESTAMP)) {
SQLMethodInvokeExpr current_timestamp = new SQLMethodInvokeExpr("CURRENT_TIMESTAMP");
current_timestamp.addArgument(new SQLIntegerExpr(0));
current_timestamp.setParent(x.getParent());
return current_timestamp;
}
}
}
if (nameHashCode64 == FnvHash.Constants.CURRENT_TIMESTAMP) {
if (parameters.isEmpty() && x.getParent() instanceof SQLColumnDefinition) {
SQLDataType dataType = ((SQLColumnDefinition) x.getParent()).getDataType();
if (dataType.nameHashCode64() == FnvHash.Constants.TIMESTAMP
&& dataType.getArguments().size() == 1) {
x.addArgument(dataType.getArguments().get(0).clone());
} else {
x.addArgument(new SQLIntegerExpr(0));
}
return x;
}
}
if (nameHashCode64 == FnvHash.Constants.SYSTIMESTAMP) {
SQLMethodInvokeExpr xx = x.clone();
xx.setMethodName("SYSTIMESTAMP");
xx.setParent(x.getParent());
return xx;
}
if (nameHashCode64 == FnvHash.Constants.LOCALTIMESTAMP) {
SQLMethodInvokeExpr xx = x.clone();
xx.setMethodName("LOCALTIMESTAMP");
xx.setParent(x.getParent());
return xx;
}
if (nameHashCode64 == FnvHash.Constants.USERENV) {
if (x.getArguments().size() == 1) {
SQLExpr param0 = x.getArguments().get(0);
if (param0 instanceof SQLCharExpr) {
String text = ((SQLCharExpr) param0).getText();
if ("SESSIONID".equalsIgnoreCase(text)) {
SQLMethodInvokeExpr xx = new SQLMethodInvokeExpr();
xx.setMethodName("get_session_id");
xx.setParent(x.getParent());
return xx;
}
}
}
}
if (nameHashCode64 == FnvHash.Constants.USERENV) {
if (x.getArguments().size() == 1) {
SQLExpr param0 = x.getArguments().get(0);
if (param0 instanceof SQLCharExpr) {
String text = ((SQLCharExpr) param0).getText();
if ("SESSIONID".equalsIgnoreCase(text)) {
SQLMethodInvokeExpr xx = new SQLMethodInvokeExpr();
xx.setMethodName("get_session_id");
xx.setParent(x.getParent());
return xx;
}
}
}
}
if (nameHashCode64 == FnvHash.Constants.NUMTODSINTERVAL) {
if (x.getArguments().size() == 2) {
SQLExpr param0 = x.getArguments().get(0);
SQLExpr param1 = x.getArguments().get(1);
if (param0 instanceof SQLIntegerExpr && param1 instanceof SQLCharExpr) {
String text = ((SQLCharExpr) param1).getText();
if ("DAY".equalsIgnoreCase(text)) {
SQLIntervalExpr intervalExpr = new SQLIntervalExpr();
intervalExpr.setValue(new SQLCharExpr(param0.toString() + " DAYS"));
intervalExpr.setParent(x.getParent());
return intervalExpr;
}
}
}
}
return x;
}
public static SQLTableSource transformOracleToPostgresql(SQLTableSource x) {
if (x instanceof OracleSelectTableReference) {
OracleSelectTableReference xx = (OracleSelectTableReference) x;
SQLExprTableSource y = new SQLExprTableSource();
xx.cloneTo(y);
y.setParent(x.getParent());
return y;
}
if (x instanceof OracleSelectJoin) {
OracleSelectJoin xx = (OracleSelectJoin) x;
SQLJoinTableSource y = new SQLJoinTableSource();
xx.cloneTo(y);
y.setLeft(transformOracleToPostgresql(y.getLeft()));
y.setRight(transformOracleToPostgresql(y.getRight()));
y.setParent(x.getParent());
return y;
}
if (x instanceof OracleSelectSubqueryTableSource) {
OracleSelectSubqueryTableSource xx = (OracleSelectSubqueryTableSource) x;
SQLSubqueryTableSource y = new SQLSubqueryTableSource();
xx.cloneTo(y);
y.setParent(x.getParent());
return y;
}
if (x instanceof OracleWithSubqueryEntry) {
SQLWithSubqueryClause.Entry entry = new SQLWithSubqueryClause.Entry();
((OracleWithSubqueryEntry) x).cloneTo(entry);
entry.setParent(x.getParent());
return entry;
}
return x;
}
public static SQLSelectQueryBlock transformOracleToPostgresql(SQLSelectQueryBlock x) {
if (x instanceof OracleSelectQueryBlock) {
OracleSelectQueryBlock xx = (OracleSelectQueryBlock) x;
SQLSelectQueryBlock y = new SQLSelectQueryBlock();
xx.cloneTo(y);
y.setFrom(transformOracleToPostgresql(y.getFrom()));
y.setParent(x.getParent());
return y;
}
return x;
}
public static SQLDataType transformOracleToPPAS(SQLDataType x) {
final String name = x.getName();
final long nameHash = x.nameHashCode64();
if (name == null) {
return x;
}
List<SQLExpr> argumentns = x.getArguments();
SQLDataType dataType;
if (nameHash == FnvHash.Constants.UROWID) {
int len = 4000;
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLIntegerExpr) {
len = ((SQLIntegerExpr) arg0).getNumber().intValue();
}
}
dataType = new SQLDataTypeImpl(SQLDataType.Constants.VARCHAR, len);
} else if (nameHash == FnvHash.Constants.ROWID) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.CHAR, 10);
} else if (nameHash == FnvHash.Constants.BOOLEAN || nameHash == FnvHash.Constants.SMALLINT) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.SMALLINT);
} else if (nameHash == FnvHash.Constants.INTEGER
|| nameHash == FnvHash.Constants.INT) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DECIMAL, 38);
} else if (nameHash == FnvHash.Constants.BINARY_FLOAT) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.REAL);
} else if (nameHash == FnvHash.Constants.BINARY_DOUBLE
|| nameHash == FnvHash.Constants.FLOAT
|| nameHash == FnvHash.Constants.DOUBLE
|| nameHash == FnvHash.Constants.REAL
|| nameHash == FnvHash.Constants.DOUBLE_PRECISION) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.DOUBLE_PRECISION);
} else if (nameHash == FnvHash.Constants.NUMBER) {
dataType = x.clone();
if (argumentns.size() > 0) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLAllColumnExpr) {
SQLIntegerExpr precisionExpr = new SQLIntegerExpr(38);
dataType.getArguments().set(0, precisionExpr);
}
}
} else if (nameHash == FnvHash.Constants.DEC
|| nameHash == FnvHash.Constants.DECIMAL) {
dataType = x.clone();
dataType.setName(SQLDataType.Constants.DECIMAL);
int precision = 0;
if (argumentns.size() > 0) {
precision = ((SQLIntegerExpr) argumentns.get(0)).getNumber().intValue();
}
int scale = 0;
if (argumentns.size() > 1) {
scale = ((SQLIntegerExpr) argumentns.get(1)).getNumber().intValue();
if (precision < scale) {
((SQLIntegerExpr) dataType.getArguments().get(1)).setNumber(precision);
}
}
} else if (nameHash == FnvHash.Constants.CHARACTER) {
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
int len;
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
dataType = new SQLCharacterDataType(SQLDataType.Constants.CHAR, len);
} else if (argumentns.isEmpty()) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.CHAR);
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else if (nameHash == FnvHash.Constants.CHAR) {
if (argumentns.size() == 1) {
SQLExpr arg0 = argumentns.get(0);
int len;
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len <= 2000) {
dataType = x;
dataType.setName(SQLDataType.Constants.CHAR);
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.TEXT);
}
} else if (argumentns.isEmpty()) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.CHAR);
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
} else if (nameHash == FnvHash.Constants.NCHAR) {
// no changed
dataType = x;
dataType.setName(SQLDataType.Constants.NCHAR);
} else if (nameHash == FnvHash.Constants.VARCHAR
|| nameHash == FnvHash.Constants.VARCHAR2) {
if (argumentns.size() > 0) {
int len;
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else if (arg0 instanceof SQLVariantRefExpr) {
len = 2000;
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
if (len <= 4000) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR, len);
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.TEXT);
}
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR);
}
} else if (nameHash == FnvHash.Constants.NVARCHAR
|| nameHash == FnvHash.Constants.NVARCHAR2
|| nameHash == FnvHash.Constants.NCHAR_VARYING) {
if (argumentns.size() > 0) {
int len;
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR, len);
} else {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR);
}
} else if (nameHash == FnvHash.Constants.BFILE) {
dataType = new SQLCharacterDataType(SQLDataType.Constants.VARCHAR, 255);
} else if (nameHash == FnvHash.Constants.DATE) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.TIMESTAMP, 0);
} else if (nameHash == FnvHash.Constants.TIMESTAMP) {
x.setName(SQLDataType.Constants.TIMESTAMP);
if (x.isWithLocalTimeZone()) {
x.setWithLocalTimeZone(false);
x.setWithTimeZone(null);
}
dataType = x;
} else if (nameHash == FnvHash.Constants.DATETIME) {
int len = -1;
if (argumentns.size() > 0) {
SQLExpr arg0 = argumentns.get(0);
if (arg0 instanceof SQLNumericLiteralExpr) {
len = ((SQLNumericLiteralExpr) arg0).getNumber().intValue();
} else {
throw new UnsupportedOperationException(SQLUtils.toOracleString(x));
}
}
if (len > 0) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.TIMESTAMP, len);
} else {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.TIMESTAMP);
}
} else if (nameHash == FnvHash.Constants.BLOB
|| nameHash == FnvHash.Constants.LONG_RAW
|| nameHash == FnvHash.Constants.RAW) {
argumentns.clear();
dataType = new SQLDataTypeImpl(SQLDataType.Constants.BYTEA);
} else if (nameHash == FnvHash.Constants.CLOB
|| nameHash == FnvHash.Constants.NCLOB
|| nameHash == FnvHash.Constants.LONG) {
argumentns.clear();
dataType = new SQLCharacterDataType(SQLDataType.Constants.TEXT);
} else if (nameHash == FnvHash.Constants.XMLTYPE) {
dataType = new SQLDataTypeImpl(SQLDataType.Constants.XML);
} else {
dataType = x;
}
if (dataType != x) {
dataType.setParent(x.getParent());
}
return dataType;
}
}

View File

@ -19,7 +19,6 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.ads.visitor.AdsOutputVisitor;
import com.alibaba.druid.sql.dialect.bigquery.visitor.BigQueryOutputVisitor;
import com.alibaba.druid.sql.dialect.blink.vsitor.BlinkOutputVisitor;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKOutputVisitor;
@ -30,11 +29,11 @@ import com.alibaba.druid.sql.dialect.h2.visitor.H2OutputVisitor;
import com.alibaba.druid.sql.dialect.h2.visitor.H2SchemaStatVisitor;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsert;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsertStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveASTVisitorAdapter;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveOutputVisitor;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveSchemaStatVisitor;
import com.alibaba.druid.sql.dialect.holo.visitor.HoloOutputVisitor;
import com.alibaba.druid.sql.dialect.hologres.visitor.HologresOutputVisitor;
import com.alibaba.druid.sql.dialect.impala.visitor.ImpalaOutputVisitor;
import com.alibaba.druid.sql.dialect.infomix.visitor.InformixOutputVisitor;
import com.alibaba.druid.sql.dialect.mysql.ast.MySqlObject;
import com.alibaba.druid.sql.dialect.mysql.ast.clause.MySqlSelectIntoStatement;
@ -72,8 +71,6 @@ import com.alibaba.druid.support.logging.Log;
import com.alibaba.druid.support.logging.LogFactory;
import com.alibaba.druid.util.*;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.List;
@ -83,8 +80,6 @@ import java.util.function.Consumer;
import java.util.function.Predicate;
public class SQLUtils {
public static final Charset UTF8 = StandardCharsets.UTF_8;
private static final SQLParserFeature[] FORMAT_DEFAULT_FEATURES = {
SQLParserFeature.KeepComments,
SQLParserFeature.EnableSQLBinaryOpExprGroup
@ -531,7 +526,7 @@ public class SQLUtils {
case edb:
return new PGOutputVisitor(out);
case hologres:
return new HoloOutputVisitor(out);
return new HologresOutputVisitor(out);
case sqlserver:
case jtds:
return new SQLServerOutputVisitor(out);
@ -545,8 +540,6 @@ public class SQLUtils {
return new InformixOutputVisitor(out);
case hive:
return new HiveOutputVisitor(out);
case ads:
return new AdsOutputVisitor(out);
case blink:
return new BlinkOutputVisitor(out);
case spark:
@ -562,6 +555,8 @@ public class SQLUtils {
return new StarRocksOutputVisitor(out);
case bigquery:
return new BigQueryOutputVisitor(out);
case impala:
return new ImpalaOutputVisitor(out);
default:
return new SQLASTOutputVisitor(out, dbType);
}
@ -1576,11 +1571,6 @@ public class SQLUtils {
return false;
}
@Override
public boolean visit(HiveCreateTableStatement x) {
return false;
}
@Override
public boolean visit(OdpsCreateTableStatement x) {
return false;
@ -1633,11 +1623,6 @@ public class SQLUtils {
return false;
}
@Override
public boolean visit(HiveCreateTableStatement x) {
return false;
}
@Override
public boolean visit(SQLInsertStatement x) {
if (filter == null || filter.test(x)) {
@ -2042,6 +2027,12 @@ public class SQLUtils {
if (parent instanceof SQLSelectStatement) {
((SQLSelectStatement) parent).setSelect(dest);
return true;
} else if (parent instanceof SQLSubqueryTableSource) {
((SQLSubqueryTableSource) parent).setSelect(dest);
return true;
} else if (parent instanceof SQLInsertStatement) {
((SQLInsertStatement) parent).setQuery(dest);
return true;
}
return false;
}

View File

@ -25,6 +25,9 @@ public class SQLPartitionValue extends OracleSegmentAttributesImpl {
protected Operator operator;
protected final List<SQLExpr> items = new ArrayList<SQLExpr>();
public SQLPartitionValue() {
}
public SQLPartitionValue(Operator operator) {
super();
this.operator = operator;
@ -45,8 +48,10 @@ public class SQLPartitionValue extends OracleSegmentAttributesImpl {
return operator;
}
public static enum Operator {
public enum Operator {
LessThan,
LessThanEqual,
Equal,
In,
List
}

View File

@ -16,8 +16,8 @@
package com.alibaba.druid.sql.ast.expr;
public enum SQLAggregateOption {
DISTINCT, ALL, UNIQUE,
DISTINCT,
ALL,
UNIQUE,
DEDUPLICATION // just for nut
}

View File

@ -15,7 +15,6 @@
*/
package com.alibaba.druid.sql.ast.expr;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import com.alibaba.druid.util.MySqlUtils;
@ -201,25 +200,6 @@ public class SQLDateExpr extends SQLExprImpl implements SQLLiteralExpr, SQLValua
return Collections.emptyList();
}
public static long supportDbTypes = DbType.of(
DbType.mysql,
DbType.oracle,
DbType.presto,
DbType.trino,
DbType.postgresql,
DbType.ads,
DbType.hive,
DbType.odps,
DbType.mariadb,
DbType.bigquery,
DbType.spark,
DbType.tidb
);
public static boolean isSupport(DbType dbType) {
return (dbType.mask & supportDbTypes) != 0;
}
public static boolean check(String str) {
final int len;
if (str == null || (len = str.length()) < 8) {

View File

@ -34,8 +34,7 @@ import com.alibaba.druid.util.lang.Consumer;
import java.util.*;
public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLStatement, SQLCreateStatement {
protected boolean ifNotExists;
protected Type type;
protected int features;
protected SQLExprTableSource tableSource;
protected List<SQLTableElement> tableElementList = new ArrayList<SQLTableElement>();
@ -49,7 +48,7 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
protected Boolean logging;
protected SQLName tablespace;
protected SQLPartitionBy partitioning;
protected SQLPartitionBy partitionBy;
protected SQLPartitionOf partitionOf;
protected SQLPartitionBy localPartitioning;
protected SQLExpr storedAs;
@ -58,7 +57,6 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
protected boolean onCommitPreserveRows;
protected boolean onCommitDeleteRows;
protected boolean external;
// for odps & hive
protected SQLExternalRecordFormat rowFormat;
@ -74,8 +72,6 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
protected boolean replace;
protected boolean ignore;
protected boolean single; // polardbx
protected boolean dimension;
protected SQLExpr engine;
protected SQLExpr lifeCycle;
@ -103,7 +99,7 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
this.acceptChild(v, like);
this.acceptChild(v, tablespace);
this.acceptChild(v, partitioning);
this.acceptChild(v, partitionBy);
this.acceptChild(v, localPartitioning);
this.acceptChild(v, storedAs);
this.acceptChild(v, storedBy);
@ -188,20 +184,24 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
setTableSource(new SQLExprTableSource(name));
}
public Type getType() {
return type;
public void config(Feature feature) {
config(feature, true);
}
public void setType(Type type) {
this.type = type;
public boolean isEnabled(Feature feature) {
return feature.isEnabled(this.features);
}
public enum Type {
GLOBAL_TEMPORARY,
LOCAL_TEMPORARY,
TEMPORARY,
SHADOW,
TRANSACTIONAL
public void config(Feature feature, boolean state) {
this.features = feature.config(this.features, state);
}
public boolean isTemporary() {
return Feature.Temporary.isEnabled(features);
}
public void setTemporary(boolean value) {
this.features = Feature.Temporary.config(features, value);
}
public List<SQLTableElement> getTableElementList() {
@ -285,11 +285,11 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
}
public boolean isIfNotExists() {
return ifNotExists;
return Feature.IfNotExists.isEnabled(features);
}
public void setIfNotExiists(boolean ifNotExists) {
this.ifNotExists = ifNotExists;
public void setIfNotExists(boolean value) {
this.features = Feature.IfNotExists.config(this.features, value);
}
public SQLExprTableSource getInherits() {
@ -357,19 +357,19 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
}
public SQLPartitionBy getPartitioning() {
return partitioning;
return partitionBy;
}
public SQLPartitionBy getLocalPartitioning() {
return this.localPartitioning;
}
public void setPartitioning(SQLPartitionBy partitioning) {
if (partitioning != null) {
partitioning.setParent(this);
public void setPartitionBy(SQLPartitionBy partitionBy) {
if (partitionBy != null) {
partitionBy.setParent(this);
}
this.partitioning = partitioning;
this.partitionBy = partitionBy;
}
public SQLPartitionOf getPartitionOf() {
@ -1222,9 +1222,7 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
}
public void cloneTo(SQLCreateTableStatement x) {
x.setExternal(external);
x.ifNotExists = ifNotExists;
x.type = type;
x.features = features;
if (tableSource != null) {
x.setTableSource(tableSource.clone());
@ -1254,8 +1252,8 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
x.setComment(comment.clone());
}
if (partitioning != null) {
x.setPartitioning(partitioning.clone());
if (partitionBy != null) {
x.setPartitionBy(partitionBy.clone());
}
if (like != null) {
@ -1269,8 +1267,8 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
x.setTablespace(tablespace.clone());
}
if (partitioning != null) {
x.setPartitioning(partitioning.clone());
if (partitionBy != null) {
x.setPartitionBy(partitionBy.clone());
}
if (localPartitioning != null) {
@ -1329,7 +1327,6 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
x.buckets = buckets;
x.shards = shards;
x.dimension = dimension;
}
@ -1401,11 +1398,11 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
// for odps & hive
public boolean isExternal() {
return external;
return Feature.External.isEnabled(features);
}
public void setExternal(boolean external) {
this.external = external;
this.features = Feature.External.config(this.features, external);
}
public ClusteringType getClusteringType() {
@ -1487,11 +1484,11 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
}
public boolean isDimension() {
return dimension;
return Feature.Dimension.isEnabled(features);
}
public void setDimension(boolean dimension) {
this.dimension = dimension;
this.features = Feature.Dimension.config(features, dimension);
}
public SQLExpr getLocation() {
@ -1630,17 +1627,6 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
return false;
}
public SQLExpr getEngine() {
return engine;
}
public void setEngine(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.engine = x;
}
public DDLObjectType getDDLObjectType() {
return DDLObjectType.TABLE;
}
@ -1655,4 +1641,36 @@ public class SQLCreateTableStatement extends SQLStatementImpl implements SQLDDLS
}
this.lifeCycle = x;
}
public enum Feature {
Temporary(1),
Global(1 << 1),
Local(1 << 2),
OrReplace(1 << 3),
IfNotExists(1 << 4),
External(1 << 5),
Transactional(1 << 6),
Shadow(1 << 7),
Dimension(1 << 8);
public final int mask;
Feature(int mask) {
this.mask = mask;
}
public boolean isEnabled(long features) {
return (features & mask) != 0;
}
public int config(int features, boolean state) {
if (state) {
features |= this.mask;
} else {
features &= ~this.mask;
}
return features;
}
}
}

View File

@ -34,7 +34,7 @@ public class SQLSelectQueryBlock extends SQLSelectQueryBase implements SQLReplac
protected final List<SQLSelectItem> selectList = new ArrayList<SQLSelectItem>();
protected SQLTableSource from;
protected List<String> commentsAfaterFrom;
protected List<String> commentsAfterFrom;
protected SQLExprTableSource into;
protected SQLExpr where;
@ -497,12 +497,12 @@ public class SQLSelectQueryBlock extends SQLSelectQueryBase implements SQLReplac
this.setFrom(new SQLSelect(queryBlock), alias);
}
public List<String> getCommentsAfaterFrom() {
return commentsAfaterFrom;
public List<String> getCommentsAfterFrom() {
return commentsAfterFrom;
}
public void setCommentsAfaterFrom(List<String> commentsAfaterFrom) {
this.commentsAfaterFrom = commentsAfaterFrom;
public void setCommentsAfterFrom(List<String> commentsAfterFrom) {
this.commentsAfterFrom = commentsAfterFrom;
}
public void setFrom(SQLSelect select, String alias) {

View File

@ -1,180 +0,0 @@
package com.alibaba.druid.sql.dialect.ads.parser;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
public class AdsCreateTableParser extends SQLCreateTableParser {
public AdsCreateTableParser(String sql) {
super(sql);
}
public AdsCreateTableParser(SQLExprParser exprParser) {
super(exprParser);
}
public SQLCreateTableStatement parseCreateTable() {
SQLCreateTableStatement stmt = newCreateStatement();
if (lexer.hasComment() && lexer.isKeepComments()) {
stmt.addBeforeComment(lexer.readAndResetComments());
}
accept(Token.CREATE);
if (lexer.identifierEquals(FnvHash.Constants.DIMENSION)) {
lexer.nextToken();
stmt.setDimension(true);
}
accept(Token.TABLE);
if (lexer.token() == Token.IF) {
lexer.nextToken();
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
}
stmt.setName(this.exprParser.name());
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (token == Token.IDENTIFIER //
|| token == Token.LITERAL_ALIAS) {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
} else if (token == Token.PRIMARY //
|| token == Token.UNIQUE //
|| token == Token.CHECK //
|| token == Token.CONSTRAINT
|| token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(stmt);
stmt.getTableElementList().add((SQLTableElement) constraint);
} else if (token == Token.TABLESPACE) {
throw new ParserException("TODO " + lexer.info());
} else if (lexer.token() == Token.INDEX) { //skip index
lexer.nextToken();
accept(Token.IDENTIFIER);
accept(Token.IDENTIFIER);
accept(Token.LPAREN);
accept(Token.IDENTIFIER);
for (; ; ) {
if (lexer.token() == Token.COMMA) {
accept(Token.IDENTIFIER);
continue;
}
break;
}
accept(Token.RPAREN);
} else {
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.getTableElementList().add(column);
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
if (lexer.token() == Token.RPAREN) { // compatible for sql server
break;
}
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.token() == Token.AS) {
lexer.nextToken();
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals("PARTITION")) {
lexer.nextToken();
accept(Token.BY);
acceptIdentifier("HASH");
accept(Token.KEY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
acceptIdentifier("PARTITION");
acceptIdentifier("NUM");
accept(Token.LITERAL_INT);
}
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.TABLEGROUP)) {
lexer.nextToken();
accept(Token.IDENTIFIER);
}
if (lexer.identifierEquals(FnvHash.Constants.OPTIONS)) {
parseOptions(stmt);
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
accept(Token.LITERAL_CHARS);
}
return stmt;
}
}

View File

@ -1,78 +0,0 @@
/*
* Copyright 1999-2017 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.sql.dialect.ads.parser;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.SQLArrayExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import java.util.Arrays;
public class AdsExprParser extends SQLExprParser {
private static final String[] AGGREGATE_FUNCTIONS;
private static final long[] AGGREGATE_FUNCTIONS_CODES;
static {
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER"};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
long hash = FnvHash.fnv1a_64_lower(str);
int index = Arrays.binarySearch(AGGREGATE_FUNCTIONS_CODES, hash);
AGGREGATE_FUNCTIONS[index] = str;
}
}
public AdsExprParser(String sql) {
this(new AdsLexer(sql));
this.lexer.nextToken();
}
public AdsExprParser(String sql, SQLParserFeature... features) {
this(new AdsLexer(sql, features));
this.lexer.nextToken();
}
public AdsExprParser(Lexer lexer) {
super(lexer);
this.aggregateFunctions = AGGREGATE_FUNCTIONS;
this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
}
protected SQLExpr parseAliasExpr(String alias) {
String chars = alias.substring(1, alias.length() - 1);
return new SQLCharExpr(chars);
}
public SQLExpr primaryRest(SQLExpr expr) {
if (lexer.token() == Token.LBRACKET) {
SQLArrayExpr array = new SQLArrayExpr();
array.setExpr(expr);
lexer.nextToken();
this.exprList(array.getValues(), array);
accept(Token.RBRACKET);
return primaryRest(array);
}
return super.primaryRest(expr);
}
}

View File

@ -1,47 +0,0 @@
package com.alibaba.druid.sql.dialect.ads.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class AdsLexer extends Lexer {
public static final Keywords DEFAULT_ADS_KEYWORDS;
static {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
map.put("OF", Token.OF);
map.put("CONCAT", Token.CONCAT);
map.put("CONTINUE", Token.CONTINUE);
map.put("MERGE", Token.MERGE);
map.put("USING", Token.USING);
map.put("ROW", Token.ROW);
map.put("LIMIT", Token.LIMIT);
map.put("SHOW", Token.SHOW);
map.put("ALL", Token.ALL);
DEFAULT_ADS_KEYWORDS = new Keywords(map);
}
public AdsLexer(String input) {
super(input);
dbType = DbType.ads;
super.keywords = DEFAULT_ADS_KEYWORDS;
}
public AdsLexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_ADS_KEYWORDS;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
}

View File

@ -1,38 +0,0 @@
/*
* Copyright 1999-2017 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.druid.sql.dialect.ads.parser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLSelectListCache;
import com.alibaba.druid.sql.parser.SQLSelectParser;
public class AdsSelectParser extends SQLSelectParser {
public AdsSelectParser(SQLExprParser exprParser) {
super(exprParser);
}
public AdsSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
super(exprParser, selectListCache);
}
public AdsSelectParser(String sql) {
this(new AdsExprParser(sql));
}
protected SQLExprParser createExprParser() {
return new AdsExprParser(lexer);
}
}

View File

@ -1,108 +0,0 @@
package com.alibaba.druid.sql.dialect.ads.parser;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.util.FnvHash;
public class AdsStatementParser extends SQLStatementParser {
public AdsStatementParser(String sql) {
super(new AdsExprParser(sql));
}
public AdsStatementParser(String sql, SQLParserFeature... features) {
super(new AdsExprParser(sql, features));
}
public AdsStatementParser(Lexer lexer) {
super(new AdsExprParser(lexer));
}
public AdsSelectParser createSQLSelectParser() {
return new AdsSelectParser(this.exprParser, selectListCache);
}
public SQLCreateTableParser getSQLCreateTableParser() {
return new AdsCreateTableParser(this.exprParser);
}
public SQLCreateTableStatement parseCreateTable() {
AdsCreateTableParser parser = new AdsCreateTableParser(this.exprParser);
return parser.parseCreateTable();
}
public SQLStatement parseShow() {
accept(Token.SHOW);
if (lexer.identifierEquals(FnvHash.Constants.DATABASES)) {
lexer.nextToken();
SQLShowDatabasesStatement stmt = parseShowDatabases(false);
return stmt;
}
if (lexer.identifierEquals(FnvHash.Constants.TABLES)) {
lexer.nextToken();
SQLShowTablesStatement stmt = parseShowTables();
return stmt;
}
if (lexer.identifierEquals(FnvHash.Constants.COLUMNS)) {
lexer.nextToken();
SQLShowColumnsStatement stmt = parseShowColumns();
return stmt;
}
if (lexer.identifierEquals(FnvHash.Constants.TABLEGROUPS)) {
lexer.nextToken();
SQLShowTableGroupsStatement stmt = parseShowTableGroups();
return stmt;
}
if (lexer.identifierEquals(FnvHash.Constants.PROCESSLIST)) {
lexer.nextToken();
SQLShowProcessListStatement stmt = new SQLShowProcessListStatement();
if (lexer.identifierEquals(FnvHash.Constants.MPP)) {
lexer.nextToken();
stmt.setMpp(true);
}
return stmt;
}
if (lexer.token() == Token.CREATE) {
lexer.nextToken();
accept(Token.TABLE);
SQLShowCreateTableStatement stmt = new SQLShowCreateTableStatement();
stmt.setName(this.exprParser.name());
return stmt;
}
if (lexer.token() == Token.ALL) {
lexer.nextToken();
if (lexer.token() == Token.CREATE) {
lexer.nextToken();
accept(Token.TABLE);
SQLShowCreateTableStatement stmt = new SQLShowCreateTableStatement();
stmt.setAll(true);
stmt.setName(this.exprParser.name());
return stmt;
}
}
throw new ParserException("TODO " + lexer.info());
}
}

View File

@ -1,67 +0,0 @@
package com.alibaba.druid.sql.dialect.ads.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLShowColumnsStatement;
import com.alibaba.druid.sql.dialect.mysql.ast.MySqlPrimaryKey;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import java.util.List;
public class AdsOutputVisitor extends SQLASTOutputVisitor implements AdsVisitor {
public AdsOutputVisitor(StringBuilder appender) {
super(appender);
}
public AdsOutputVisitor(StringBuilder appender, DbType dbType) {
super(appender, dbType);
}
public AdsOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
}
public boolean visit(SQLCreateTableStatement x) {
printCreateTable(x, true);
List<SQLAssignItem> options = x.getTableOptions();
if (options.size() > 0) {
println();
print0(ucase ? "OPTIONS (" : "options (");
printAndAccept(options, ", ");
print(')');
}
return false;
}
@Override
public boolean visit(SQLAlterTableAddColumn x) {
print0(ucase ? "ADD COLUMN " : "add column ");
printAndAccept(x.getColumns(), ", ");
return false;
}
@Override
public boolean visit(SQLShowColumnsStatement x) {
print0(ucase ? "SHOW COLUMNS" : "show columns");
if (x.getTable() != null) {
print0(ucase ? " IN " : " in ");
x.getTable().accept(this);
}
return false;
}
@Override
public void endVisit(MySqlPrimaryKey x) {
}
@Override
public void endVisit(MySqlCreateTableStatement x) {
}
}

View File

@ -1,15 +0,0 @@
package com.alibaba.druid.sql.dialect.ads.visitor;
import com.alibaba.druid.sql.dialect.mysql.ast.MySqlPrimaryKey;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public interface AdsVisitor extends SQLASTVisitor {
boolean visit(MySqlPrimaryKey x);
void endVisit(MySqlPrimaryKey x);
boolean visit(MySqlCreateTableStatement x);
void endVisit(MySqlCreateTableStatement x);
}

View File

@ -0,0 +1,75 @@
package com.alibaba.druid.sql.dialect.bigquery.ast;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLObject;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import java.util.Collections;
import java.util.List;
public class BigQueryCharExpr extends SQLCharExpr implements SQLExpr {
private String prefix;
private boolean space;
public BigQueryCharExpr() {
}
public boolean hasPrefix() {
return prefix != null;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
public boolean isSpace() {
return space;
}
public void setSpace(boolean space) {
this.space = space;
}
public BigQueryCharExpr(String text, String prefix) {
this(text, prefix, false);
}
public BigQueryCharExpr(String text, String prefix, boolean space) {
this.prefix = prefix;
this.text = text;
this.space = space;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v instanceof SQLASTOutputVisitor) {
SQLASTOutputVisitor visitor = (SQLASTOutputVisitor) v;
if (hasPrefix()) {
visitor.print(prefix);
}
if (isSpace()) {
visitor.print(" ");
}
visitor.print("'");
visitor.print(text);
visitor.print("'");
}
}
@Override
public List<SQLObject> getChildren() {
return Collections.emptyList();
}
@Override
public BigQueryCharExpr clone() {
BigQueryCharExpr clone = new BigQueryCharExpr();
clone.setPrefix(this.prefix);
clone.setText(this.text);
clone.setSpace(this.space);
return clone;
}
}

View File

@ -0,0 +1,31 @@
package com.alibaba.druid.sql.dialect.bigquery.ast;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import java.util.ArrayList;
import java.util.List;
public class BigQueryCreateTableStatement
extends SQLCreateTableStatement {
protected final List<SQLExpr> partitionBy = new ArrayList<>();
public List<SQLExpr> getPartitionBy() {
return partitionBy;
}
public BigQueryCreateTableStatement clone() {
BigQueryCreateTableStatement x = new BigQueryCreateTableStatement();
cloneTo(x);
return x;
}
protected void cloneTo(BigQueryCreateTableStatement x) {
super.cloneTo(x);
for (SQLExpr item : partitionBy) {
SQLExpr cloned = item.clone();
cloned.setParent(x);
x.partitionBy.add(cloned);
}
}
}

View File

@ -1,10 +1,8 @@
package com.alibaba.druid.sql.dialect.bigquery.parser;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQueryCreateTableStatement;
import com.alibaba.druid.sql.dialect.db2.parser.DB2ExprParser;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
@ -20,33 +18,17 @@ public class BigQueryCreateTableParser extends SQLCreateTableParser {
super(exprParser);
}
protected void parseCreateTableRest(SQLCreateTableStatement stmt) {
protected SQLCreateTableStatement newCreateStatement() {
return new BigQueryCreateTableStatement();
}
protected void parseCreateTableRest(SQLCreateTableStatement x) {
BigQueryCreateTableStatement stmt = (BigQueryCreateTableStatement) x;
for (;;) {
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
if (lexer.nextIf(Token.PARTITION)) {
accept(Token.BY);
boolean brace = lexer.nextIf(Token.LPAREN);
for (; ; ) {
SQLName name;
name = exprParser.name();
if (name instanceof SQLIdentifierExpr
&& ((SQLIdentifierExpr) name).getName().equalsIgnoreCase("DATE")
&& lexer.nextIf(Token.LPAREN)
) {
name = exprParser.name();
accept(Token.RPAREN);
name.putAttribute("function", "DATE");
}
stmt.addPartitionColumn(new SQLColumnDefinition(name));
if (lexer.nextIf(Token.COMMA)) {
continue;
}
break;
}
if (brace) {
accept(Token.RPAREN);
}
this.exprParser.exprList(stmt.getPartitionBy(), stmt);
continue;
}
@ -94,12 +76,12 @@ public class BigQueryCreateTableParser extends SQLCreateTableParser {
protected void createTableBefore(SQLCreateTableStatement createTable) {
if (lexer.nextIfIdentifier("TEMPORARY") || lexer.nextIfIdentifier("TEMP")) {
createTable.setType(SQLCreateTableStatement.Type.TEMPORARY);
createTable.setTemporary(true);
}
if (lexer.nextIf(Token.OR)) {
accept(Token.REPLACE);
createTable.setReplace(true);
createTable.config(SQLCreateTableStatement.Feature.OrReplace);
}
}
}

View File

@ -5,6 +5,7 @@ import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQueryCharExpr;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQuerySelectAsStruct;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLExprParser;
@ -22,8 +23,26 @@ public class BigQueryExprParser extends SQLExprParser {
private static final long[] AGGREGATE_FUNCTIONS_CODES;
static {
String[] strings = {"ARRAY_AGG", "AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
"ROWNUMBER"};
String[] strings = {
"ANY_VALUE",
"ARRAY_AGG",
"ARRAY_CONCAT_AGG",
"AVG",
"BIT_AND",
"BIT_OR",
"BIT_XOR",
"COUNT",
"COUNTIF",
"GROUPING",
"LOGICAL_AND",
"LOGICAL_OR",
"MAX",
"MAX_BY",
"MIN",
"MIN_BY",
"STRING_AGG",
"SUM"
};
AGGREGATE_FUNCTIONS_CODES = fnv1a_64_lower(strings, true);
AGGREGATE_FUNCTIONS = new String[AGGREGATE_FUNCTIONS_CODES.length];
for (String str : strings) {
@ -217,6 +236,23 @@ public class BigQueryExprParser extends SQLExprParser {
}
}
}
if (expr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identifierExpr = (SQLIdentifierExpr) expr;
String ident = identifierExpr.getName();
if (ident.equalsIgnoreCase("b") && lexer.token() == Token.LITERAL_CHARS) {
String charValue = lexer.stringVal();
lexer.nextToken();
expr = new SQLBinaryExpr(charValue);
} else if (ident.equalsIgnoreCase("r") && lexer.token() == Token.LITERAL_CHARS) {
String charValue = lexer.stringVal();
lexer.nextToken();
expr = new BigQueryCharExpr(charValue, "r");
} else if (ident.equalsIgnoreCase("json") && lexer.token() == Token.LITERAL_CHARS) {
String charValue = lexer.stringVal();
lexer.nextToken();
expr = new BigQueryCharExpr(charValue, "JSON", true);
}
}
return super.primaryRest(expr);
}

View File

@ -9,19 +9,20 @@ import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class BigQueryLexer extends Lexer {
public static final Keywords DEFAULT_BIG_QUERY_KEYWORDS;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.SQLDateExpr;
static {
public class BigQueryLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
// map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
// map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
map.put("ALL", Token.ALL);
map.put("AND", Token.AND);
map.put("ANY", Token.ANY);
map.put("ALTER", Token.ALTER);
// map.put("ARRAY", Token.ARRAY);
// map.put("ARRAY", Token.ARRAY);
map.put("AS", Token.AS);
map.put("ASC", Token.ASC);
map.put("BETWEEN", Token.BETWEEN);
@ -98,19 +99,14 @@ public class BigQueryLexer extends Lexer {
map.put("WINDOW", Token.WINDOW);
map.put("WITH", Token.WITH);
DEFAULT_BIG_QUERY_KEYWORDS = new Keywords(map);
}
{
dbType = DbType.bigquery;
return new Keywords(map);
}
public BigQueryLexer(String input, SQLParserFeature... features) {
super(input);
dbType = DbType.hive;
dbType = DbType.bigquery;
this.skipComment = true;
this.keepComments = true;
super.keywords = DEFAULT_BIG_QUERY_KEYWORDS;
this.features |= SQLParserFeature.SupportUnicodeCodePoint.mask;
for (SQLParserFeature feature : features) {
config(feature, true);
@ -139,4 +135,10 @@ public class BigQueryLexer extends Lexer {
}
super.scanAlias();
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(SQLDateExpr);
}
}

View File

@ -6,6 +6,7 @@ import com.alibaba.druid.sql.ast.expr.SQLCastExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQueryAssertStatement;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQueryCreateTableStatement;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQuerySelectAsStruct;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQuerySelectQueryBlock;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
@ -24,35 +25,18 @@ public class BigQueryOutputVisitor extends SQLASTOutputVisitor
}
protected void printPartitionedBy(SQLCreateTableStatement x) {
List<SQLColumnDefinition> partitionColumns = x.getPartitionColumns();
int partitionSize = partitionColumns.size();
if (partitionSize == 0) {
List<SQLExpr> partitionBy;
if (!(x instanceof BigQueryCreateTableStatement)) {
return;
} else {
partitionBy = ((BigQueryCreateTableStatement) x).getPartitionBy();
}
if (partitionBy.isEmpty()) {
return;
}
println();
print0(ucase ? "PARTITION BY (" : "partition by (");
this.indentCount++;
println();
for (int i = 0; i < partitionSize; ++i) {
SQLColumnDefinition column = partitionColumns.get(i);
printPartitoinedByColumn(column);
if (i != partitionSize - 1) {
print(',');
}
if (this.isPrettyFormat() && column.hasAfterComment()) {
print(' ');
printlnComment(column.getAfterCommentsDirect());
}
if (i != partitionSize - 1) {
println();
}
}
this.indentCount--;
println();
print(')');
print0(ucase ? "PARTITION BY " : "partition by ");
printAndAccept(((BigQueryCreateTableStatement) x).getPartitionBy(), ",");
}
protected void printPartitoinedByColumn(SQLColumnDefinition column) {

View File

@ -1,6 +1,7 @@
package com.alibaba.druid.sql.dialect.bigquery.visitor;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQueryAssertStatement;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQueryCreateTableStatement;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQuerySelectAsStruct;
import com.alibaba.druid.sql.dialect.bigquery.ast.BigQuerySelectQueryBlock;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
@ -33,4 +34,11 @@ public interface BigQueryVisitor extends SQLASTVisitor {
default void endVisit(BigQueryAssertStatement x) {
}
default boolean visit(BigQueryCreateTableStatement x) {
return true;
}
default void endVisit(BigQueryCreateTableStatement x) {
}
}

View File

@ -40,8 +40,7 @@ public class BlinkCreateTableParser extends SQLCreateTableParser {
accept(Token.CREATE);
if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
lexer.nextToken();
if (lexer.nextIfIdentifier(FnvHash.Constants.EXTERNAL)) {
stmt.setExternal(true);
}
@ -52,7 +51,7 @@ public class BlinkCreateTableParser extends SQLCreateTableParser {
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
stmt.setIfNotExists(true);
}
stmt.setName(this.exprParser.name());

View File

@ -10,9 +10,8 @@ import java.util.HashMap;
import java.util.Map;
public class BlinkLexer extends Lexer {
public static final Keywords DEFAULT_BLINK_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -28,18 +27,16 @@ public class BlinkLexer extends Lexer {
map.put("IF", Token.IF);
map.put("PERIOD", Token.PERIOD);
DEFAULT_BLINK_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public BlinkLexer(String input) {
super(input);
super.keywords = DEFAULT_BLINK_KEYWORDS;
dbType = DbType.blink;
}
public BlinkLexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_BLINK_KEYWORDS;
dbType = DbType.blink;
for (SQLParserFeature feature : features) {
config(feature, true);

View File

@ -4,7 +4,7 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.statement.SQLUpdateSetItem;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKVisitor;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKASTVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import java.util.ArrayList;
@ -41,8 +41,8 @@ public class CKAlterTableUpdateStatement extends CKAlterTableStatement {
@Override
protected void accept0(SQLASTVisitor v) {
if (v instanceof CKVisitor) {
CKVisitor vv = (CKVisitor) v;
if (v instanceof CKASTVisitor) {
CKASTVisitor vv = (CKASTVisitor) v;
if (vv.visit(this)) {
acceptChild(vv, this.getTableName());
acceptChild(vv, this.getClusterName());

View File

@ -5,7 +5,8 @@ import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKVisitor;
import com.alibaba.druid.sql.ast.statement.SQLPrimaryKey;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKASTVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
import java.util.ArrayList;
@ -14,9 +15,13 @@ import java.util.List;
public class CKCreateTableStatement extends SQLCreateTableStatement {
protected final List<SQLAssignItem> settings = new ArrayList<SQLAssignItem>();
private SQLOrderBy orderBy;
private SQLExpr partitionBy;
private SQLPrimaryKey primaryKey;
private SQLExpr sampleBy;
private SQLExpr ttl;
private String onClusterName;
private SQLExpr engine;
public CKCreateTableStatement() {
super(DbType.clickhouse);
}
@ -33,18 +38,6 @@ public class CKCreateTableStatement extends SQLCreateTableStatement {
this.orderBy = x;
}
public SQLExpr getPartitionBy() {
return partitionBy;
}
public void setPartitionBy(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.partitionBy = x;
}
public SQLExpr getSampleBy() {
return sampleBy;
}
@ -61,10 +54,51 @@ public class CKCreateTableStatement extends SQLCreateTableStatement {
return settings;
}
public SQLPrimaryKey getPrimaryKey() {
return primaryKey;
}
public void setPrimaryKey(SQLPrimaryKey primaryKey) {
if (primaryKey != null) {
primaryKey.setParent(this);
}
this.primaryKey = primaryKey;
}
public SQLExpr getTtl() {
return ttl;
}
public void setTtl(SQLExpr ttl) {
if (ttl != null) {
ttl.setParent(this);
}
this.ttl = ttl;
}
public String getOnClusterName() {
return onClusterName;
}
public void setOnClusterName(String onClusterName) {
this.onClusterName = onClusterName;
}
public SQLExpr getEngine() {
return engine;
}
public void setEngine(SQLExpr x) {
if (x != null) {
x.setParent(this);
}
this.engine = x;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v instanceof CKVisitor) {
CKVisitor vv = (CKVisitor) v;
if (v instanceof CKASTVisitor) {
CKASTVisitor vv = (CKASTVisitor) v;
if (vv.visit(this)) {
acceptChild(vv);
}

View File

@ -0,0 +1,37 @@
package com.alibaba.druid.sql.dialect.clickhouse.ast;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKASTVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public class ClickhouseColumnCodec extends ClickhouseColumnConstraint {
private SQLExpr expr;
public ClickhouseColumnCodec() {
super();
}
public SQLExpr getExpr() {
return expr;
}
public void setExpr(SQLExpr expr) {
this.expr = expr;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v instanceof CKASTVisitor) {
CKASTVisitor vv = (CKASTVisitor) v;
if (vv.visit(this)) {
acceptChild(vv, expr);
}
vv.endVisit(this);
}
}
@Override
public ClickhouseColumnCodec clone() {
ClickhouseColumnCodec clickhouseColumnCodec = new ClickhouseColumnCodec();
super.cloneTo(clickhouseColumnCodec);
clickhouseColumnCodec.setExpr(expr.clone());
return clickhouseColumnCodec;
}
}

View File

@ -0,0 +1,26 @@
package com.alibaba.druid.sql.dialect.clickhouse.ast;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLColumnConstraint;
import com.alibaba.druid.sql.ast.statement.SQLConstraintImpl;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public class ClickhouseColumnConstraint extends SQLConstraintImpl implements SQLColumnConstraint {
public ClickhouseColumnConstraint() {
dbType = DbType.clickhouse;
}
@Override
protected void accept0(SQLASTVisitor v) {}
@Override
public ClickhouseColumnConstraint clone() {
ClickhouseColumnConstraint clickhouseColumnConstraint = new ClickhouseColumnConstraint();
cloneTo(clickhouseColumnConstraint);
return clickhouseColumnConstraint;
}
public void cloneTo(ClickhouseColumnConstraint x) {
super.cloneTo(x);
x.dbType = dbType;
}
}

View File

@ -0,0 +1,38 @@
package com.alibaba.druid.sql.dialect.clickhouse.ast;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.dialect.clickhouse.visitor.CKASTVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public class ClickhouseColumnTTL extends ClickhouseColumnConstraint{
private SQLExpr expr;
public ClickhouseColumnTTL() {
super();
}
public SQLExpr getExpr() {
return expr;
}
public void setExpr(SQLExpr expr) {
this.expr = expr;
}
@Override
protected void accept0(SQLASTVisitor v) {
if (v instanceof CKASTVisitor) {
CKASTVisitor vv = (CKASTVisitor) v;
if (vv.visit(this)) {
acceptChild(vv, expr);
}
vv.endVisit(this);
}
}
public ClickhouseColumnTTL clone() {
ClickhouseColumnTTL clickhouseColumnTTL = new ClickhouseColumnTTL();
super.cloneTo(clickhouseColumnTTL);
clickhouseColumnTTL.setExpr(expr.clone());
return clickhouseColumnTTL;
}
}

View File

@ -2,9 +2,13 @@ package com.alibaba.druid.sql.dialect.clickhouse.parser;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLOrderBy;
import com.alibaba.druid.sql.ast.SQLPartitionBy;
import com.alibaba.druid.sql.ast.SQLPartitionByList;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLPrimaryKey;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKCreateTableStatement;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.Token;
@ -19,6 +23,30 @@ public class CKCreateTableParser extends SQLCreateTableParser {
return new CKCreateTableStatement();
}
@Override
public SQLPartitionBy parsePartitionBy() {
lexer.nextToken();
accept(Token.BY);
SQLPartitionBy sqlPartitionBy = new SQLPartitionByList();
boolean hasParen = false;
if (lexer.nextIf(Token.LPAREN)) {
hasParen = true;
}
for (; ; ) {
sqlPartitionBy.addColumn(this.exprParser.expr());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
if (hasParen) {
accept(Token.RPAREN);
}
return sqlPartitionBy;
}
protected void parseCreateTableRest(SQLCreateTableStatement stmt) {
CKCreateTableStatement ckStmt = (CKCreateTableStatement) stmt;
if (lexer.identifierEquals(FnvHash.Constants.ENGINE)) {
@ -26,16 +54,13 @@ public class CKCreateTableParser extends SQLCreateTableParser {
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.setEngine(
ckStmt.setEngine(
this.exprParser.expr()
);
}
if (lexer.token() == Token.PARTITION) {
lexer.nextToken();
accept(Token.BY);
SQLExpr expr = this.exprParser.expr();
ckStmt.setPartitionBy(expr);
ckStmt.setPartitionBy(parsePartitionBy());
}
if (lexer.token() == Token.ORDER) {
@ -43,6 +68,11 @@ public class CKCreateTableParser extends SQLCreateTableParser {
ckStmt.setOrderBy(orderBy);
}
if (lexer.token() == Token.PRIMARY) {
SQLPrimaryKey sqlPrimaryKey = this.exprParser.parsePrimaryKey();
ckStmt.setPrimaryKey(sqlPrimaryKey);
}
if (lexer.identifierEquals("SAMPLE")) {
lexer.nextToken();
accept(Token.BY);
@ -50,6 +80,12 @@ public class CKCreateTableParser extends SQLCreateTableParser {
ckStmt.setSampleBy(expr);
}
if (lexer.token() == Token.TTL) {
lexer.nextToken();
SQLExpr expr = this.exprParser.expr();
ckStmt.setTtl(expr);
}
if (lexer.token() == Token.SETTINGS) {
lexer.nextToken();
for (; ; ) {
@ -66,4 +102,21 @@ public class CKCreateTableParser extends SQLCreateTableParser {
}
}
}
@Override
protected void createTableAfterName(SQLCreateTableStatement createTable) {
if (lexer.token() == Token.ON) {
lexer.nextToken();
acceptIdentifier("CLUSTER");
if (lexer.token() == Token.IDENTIFIER) {
String clusterName = lexer.stringVal();
CKCreateTableStatement ckStmt = (CKCreateTableStatement) createTable;
ckStmt.setOnClusterName(clusterName);
lexer.nextToken();
} else {
setErrorEndPos(lexer.pos());
throw new ParserException("syntax error, expect IDENTIFIER, actual " + lexer.token() + ", " + lexer.info());
}
}
}
}

View File

@ -15,20 +15,33 @@
*/
package com.alibaba.druid.sql.dialect.clickhouse.parser;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLStructDataType;
import com.alibaba.druid.sql.ast.expr.SQLArrayExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseColumnCodec;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseColumnTTL;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
import com.google.common.collect.Lists;
import java.util.Arrays;
import java.util.List;
import static com.alibaba.druid.sql.parser.Token.LPAREN;
import static com.alibaba.druid.sql.parser.Token.RPAREN;
public class CKExprParser extends SQLExprParser {
private static final String[] AGGREGATE_FUNCTIONS;
private static final long[] AGGREGATE_FUNCTIONS_CODES;
private static final List<String> NESTED_DATA_TYPE;
static {
String[] strings = {"AVG", "COUNT", "MAX", "MIN", "STDDEV", "SUM", "ROW_NUMBER",
@ -40,6 +53,7 @@ public class CKExprParser extends SQLExprParser {
int index = Arrays.binarySearch(AGGREGATE_FUNCTIONS_CODES, hash);
AGGREGATE_FUNCTIONS[index] = str;
}
NESTED_DATA_TYPE = Lists.newArrayList("array", "tuple", "nullable", "lowcardinality", "variant");
}
public CKExprParser(String sql) {
@ -56,6 +70,7 @@ public class CKExprParser extends SQLExprParser {
super(lexer);
this.aggregateFunctions = AGGREGATE_FUNCTIONS;
this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
this.nestedDataType = NESTED_DATA_TYPE;
}
protected SQLExpr parseAliasExpr(String alias) {
@ -75,4 +90,77 @@ public class CKExprParser extends SQLExprParser {
return super.primaryRest(expr);
}
@Override
protected SQLColumnDefinition parseColumnSpecific(SQLColumnDefinition column) {
switch (lexer.token()) {
case CODEC: {
lexer.nextToken();
accept(LPAREN);
SQLExpr codecExpr = expr();
accept(RPAREN);
ClickhouseColumnCodec sqlColumnCodec = new ClickhouseColumnCodec();
sqlColumnCodec.setExpr(codecExpr);
column.addConstraint(sqlColumnCodec);
return parseColumnRest(column);
}
case TTL: {
lexer.nextToken();
ClickhouseColumnTTL clickhouseColumnTTL = new ClickhouseColumnTTL();
clickhouseColumnTTL.setExpr(expr());
column.addConstraint(clickhouseColumnTTL);
return parseColumnRest(column);
}
default:
return column;
}
}
@Override
protected SQLExpr primaryDefaultRest() {
return new SQLIdentifierExpr(lexer.stringVal());
}
@Override
protected SQLDataType parseDataTypeNested() {
lexer.nextToken();
accept(Token.LPAREN);
SQLStructDataType struct = new SQLStructDataType(dbType);
for (; ; ) {
SQLName name;
switch (lexer.token()) {
case GROUP:
case ORDER:
case FROM:
case TO:
name = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
break;
default:
name = this.name();
break;
}
SQLDataType dataType = this.parseDataType();
SQLStructDataType.Field field = struct.addField(name, dataType);
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLCharExpr chars = (SQLCharExpr) this.primary();
field.setComment(chars.getText());
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
return struct;
}
}

View File

@ -9,10 +9,11 @@ import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class CKLexer extends Lexer {
public static final Keywords DEFAULT_KEYWORDS;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
static {
public class CKLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -36,21 +37,32 @@ public class CKLexer extends Lexer {
map.put("FORMAT", Token.FORMAT);
map.put("SETTINGS", Token.SETTINGS);
map.put("FINAL", Token.FINAL);
map.put("TTL", Token.TTL);
map.put("CODEC", Token.CODEC);
DEFAULT_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public CKLexer(String input) {
super(input);
dbType = DbType.clickhouse;
super.keywords = DEFAULT_KEYWORDS;
}
public CKLexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_KEYWORDS;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
AsofJoin,
GlobalJoin,
JoinRightTableAlias,
ParseLimitBy,
TableAliasAsof
);
}
}

View File

@ -1,6 +1,5 @@
package com.alibaba.druid.sql.dialect.clickhouse.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
@ -151,7 +150,7 @@ public class CKSelectParser
@Override
protected void parseAfterOrderBy(SQLSelectQueryBlock queryBlock) {
if (lexer.token() == Token.WITH && DbType.clickhouse == dbType) {
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("FILL");
((CKSelectQueryBlock) queryBlock).setWithFill(true);

View File

@ -1,14 +1,20 @@
package com.alibaba.druid.sql.dialect.clickhouse.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.statement.SQLAlterStatement;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLCreateViewStatement;
import com.alibaba.druid.sql.ast.statement.SQLUpdateSetItem;
import com.alibaba.druid.sql.ast.statement.SQLWithSubqueryClause;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKAlterTableUpdateStatement;
import com.alibaba.druid.sql.parser.*;
import static com.alibaba.druid.sql.parser.Token.ALTER;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
import static com.alibaba.druid.sql.parser.Token.ON;
import static com.alibaba.druid.sql.parser.Token.TABLE;
import static com.alibaba.druid.sql.parser.Token.TO;
public class CKStatementParser extends SQLStatementParser {
public CKStatementParser(String sql) {
@ -79,4 +85,34 @@ public class CKStatementParser extends SQLStatementParser {
lexer.reset(mark);
return super.alterTable();
}
@Override
protected SQLAlterStatement alterTableAfterName(SQLAlterTableStatement stmt) {
if (lexer.token() == ON) {
lexer.nextToken();
acceptIdentifier("CLUSTER");
stmt.setOn(this.exprParser.name());
}
return super.alterTableAfterName(stmt);
}
@Override
public void parseCreateViewAfterName(SQLCreateViewStatement createView) {
if (dbType == DbType.clickhouse) {
if (lexer.token() == Token.ON) {
lexer.nextToken();
acceptIdentifier("CLUSTER");
createView.setOnCluster(true);
}
if (lexer.token() == LITERAL_CHARS) {
SQLName to = this.exprParser.name();
createView.setTo(to);
} else if (lexer.token() == TO) {
lexer.nextToken();
SQLName to = this.exprParser.name();
createView.setTo(to);
}
}
}
}

View File

@ -0,0 +1,45 @@
package com.alibaba.druid.sql.dialect.clickhouse.visitor;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKAlterTableUpdateStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKSelectQueryBlock;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseColumnCodec;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseColumnTTL;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public interface CKASTVisitor extends SQLASTVisitor {
default boolean visit(CKCreateTableStatement x) {
return true;
}
default void endVisit(CKCreateTableStatement x) {
}
default boolean visit(CKSelectQueryBlock x) {
return true;
}
default void endVisit(CKSelectQueryBlock x) {
}
default boolean visit(CKAlterTableUpdateStatement x) {
return true;
}
default void endVisit(CKAlterTableUpdateStatement x) {
}
default boolean visit(ClickhouseColumnCodec x) {
return true;
}
default void endVisit(ClickhouseColumnCodec x) {
}
default boolean visit(ClickhouseColumnTTL x) {
return true;
}
default void endVisit(ClickhouseColumnTTL x) {
}
}

View File

@ -6,11 +6,14 @@ import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKAlterTableUpdateStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKCreateTableStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKSelectQueryBlock;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseColumnCodec;
import com.alibaba.druid.sql.dialect.clickhouse.ast.ClickhouseColumnTTL;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.alibaba.druid.util.StringUtils;
import java.util.List;
public class CKOutputVisitor extends SQLASTOutputVisitor implements CKVisitor {
public class CKOutputVisitor extends SQLASTOutputVisitor implements CKASTVisitor {
public CKOutputVisitor(StringBuilder appender) {
super(appender, DbType.clickhouse);
}
@ -84,16 +87,31 @@ public class CKOutputVisitor extends SQLASTOutputVisitor implements CKVisitor {
return false;
}
@Override
public boolean visit(SQLPartitionByList x) {
if (x.getColumns().size() == 1) {
x.getColumns().get(0).accept(this);
} else {
print('(');
printAndAccept(x.getColumns(), ", ");
print0(")");
}
printPartitionsCountAndSubPartitions(x);
printSQLPartitions(x.getPartitions());
return false;
}
@Override
public boolean visit(CKCreateTableStatement x) {
super.visit((SQLCreateTableStatement) x);
SQLExpr partitionBy = x.getPartitionBy();
if (partitionBy != null) {
println();
print0(ucase ? "PARTITION BY " : "partition by ");
partitionBy.accept(this);
}
// SQLPartitionBy partitionBy = x.getPartitioning();
// if (partitionBy != null) {
// println();
// print0(ucase ? "PARTITION BY " : "partition by ");
// partitionBy.accept(this);
// }
SQLOrderBy orderBy = x.getOrderBy();
if (orderBy != null) {
@ -101,6 +119,12 @@ public class CKOutputVisitor extends SQLASTOutputVisitor implements CKVisitor {
orderBy.accept(this);
}
SQLPrimaryKey primaryKey = x.getPrimaryKey();
if (primaryKey != null) {
println();
primaryKey.accept(this);
}
SQLExpr sampleBy = x.getSampleBy();
if (sampleBy != null) {
println();
@ -108,6 +132,13 @@ public class CKOutputVisitor extends SQLASTOutputVisitor implements CKVisitor {
sampleBy.accept(this);
}
SQLExpr ttl = x.getTtl();
if (ttl != null) {
println();
print0(ucase ? "TTL " : "ttl ");
ttl.accept(this);
}
List<SQLAssignItem> settings = x.getSettings();
if (!settings.isEmpty()) {
println();
@ -186,6 +217,20 @@ public class CKOutputVisitor extends SQLASTOutputVisitor implements CKVisitor {
return false;
}
@Override
public boolean visit(ClickhouseColumnCodec x) {
print0(ucase ? "CODEC(" : "codec(");
printExpr(x.getExpr());
print(")");
return false;
}
public boolean visit(ClickhouseColumnTTL x) {
print0(ucase ? " TTL " : " ttl ");
printExpr(x.getExpr());
return false;
}
@Override
protected void printAfterFetch(SQLSelectQueryBlock queryBlock) {
if (queryBlock instanceof CKSelectQueryBlock) {
@ -259,4 +304,41 @@ public class CKOutputVisitor extends SQLASTOutputVisitor implements CKVisitor {
print0(ucase ? " WITH TIES" : " with ties");
}
}
@Override
protected void printCreateTableAfterName(SQLCreateTableStatement x) {
if (x instanceof CKCreateTableStatement) {
CKCreateTableStatement ckStmt = (CKCreateTableStatement) x;
if (!StringUtils.isEmpty(ckStmt.getOnClusterName())) {
print0(ucase ? " ON CLUSTER " : " on cluster ");
print(ckStmt.getOnClusterName());
}
}
}
@Override
protected void printEngine(SQLCreateTableStatement x) {
if (x instanceof CKCreateTableStatement) {
SQLExpr engine = ((CKCreateTableStatement) x).getEngine();
if (engine != null) {
print0(ucase ? " ENGINE = " : " engine = ");
engine.accept(this);
}
}
}
@Override
public boolean visit(SQLMapDataType x) {
print0(ucase ? "MAP(" : "map(");
SQLDataType keyType = x.getKeyType();
SQLDataType valueType = x.getValueType();
keyType.accept(this);
print0(", ");
valueType.accept(this);
print(')');
return false;
}
}

View File

@ -7,7 +7,7 @@ import com.alibaba.druid.sql.repository.SchemaRepository;
import com.alibaba.druid.sql.visitor.SchemaStatVisitor;
import com.alibaba.druid.stat.TableStat;
public class CKStatVisitor extends SchemaStatVisitor implements CKVisitor {
public class CKStatVisitor extends SchemaStatVisitor implements CKASTVisitor {
{
dbType = DbType.spark;
}

View File

@ -1,21 +0,0 @@
package com.alibaba.druid.sql.dialect.clickhouse.visitor;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKAlterTableUpdateStatement;
import com.alibaba.druid.sql.dialect.clickhouse.ast.CKCreateTableStatement;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
public interface CKVisitor extends SQLASTVisitor {
default boolean visit(CKCreateTableStatement x) {
return true;
}
default void endVisit(CKCreateTableStatement x) {
}
default boolean visit(CKAlterTableUpdateStatement x) {
return true;
}
default void endVisit(CKAlterTableUpdateStatement x) {
}
}

View File

@ -69,7 +69,7 @@ public class DB2CreateTableParser extends SQLCreateTableParser {
accept(Token.RPAREN);
accept(Token.USING);
acceptIdentifier("HASHING");
createTable.setPartitioning(partitionBy);
createTable.setPartitionBy(partitionBy);
continue;
} else if (lexer.nextIfIdentifier(FnvHash.Constants.VALIDPROC)) {
SQLName validproc = this.exprParser.name();

View File

@ -23,10 +23,11 @@ import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class DB2Lexer extends Lexer {
public static final Keywords DEFAULT_DB2_KEYWORDS;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.ParseAssignItemSkip;
static {
public class DB2Lexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -43,19 +44,23 @@ public class DB2Lexer extends Lexer {
map.put("USING", Token.USING);
map.put("MATCHED", Token.MATCHED);
DEFAULT_DB2_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public DB2Lexer(String input) {
super(input);
super.keywords = DEFAULT_DB2_KEYWORDS;
}
public DB2Lexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_DB2_KEYWORDS;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(ParseAssignItemSkip);
}
}

View File

@ -18,7 +18,6 @@ package com.alibaba.druid.sql.dialect.db2.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLPartitionBy;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
@ -121,12 +120,7 @@ public class DB2OutputVisitor extends SQLASTOutputVisitor implements DB2ASTVisit
validproc.accept(this);
}
SQLPartitionBy partitionBy = x.getPartitioning();
if (partitionBy != null) {
println();
print0(ucase ? "PARTITION BY " : "partition by ");
partitionBy.accept(this);
}
printPartitionBy(x);
Boolean compress = x.getCompress();
if (compress != null) {

View File

@ -25,9 +25,8 @@ import java.util.HashMap;
import java.util.Map;
public class H2Lexer extends Lexer {
public static final Keywords DEFAULT_H2_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -42,17 +41,15 @@ public class H2Lexer extends Lexer {
map.put("LIMIT", Token.LIMIT);
map.put("IF", Token.IF);
DEFAULT_H2_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public H2Lexer(String input) {
super(input, null, DbType.h2);
super.keywords = DEFAULT_H2_KEYWORDS;
}
public H2Lexer(String input, SQLParserFeature... features) {
super(input, null, DbType.h2);
super.keywords = DEFAULT_H2_KEYWORDS;
for (SQLParserFeature feature : features) {
config(feature, true);
}

View File

@ -19,15 +19,21 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLQueryExpr;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableAddColumn;
import com.alibaba.druid.sql.ast.statement.SQLAlterTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLInsertInto;
import com.alibaba.druid.sql.ast.statement.SQLInsertStatement;
import com.alibaba.druid.sql.ast.statement.SQLReplaceStatement;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
import static com.alibaba.druid.sql.parser.Token.LPAREN;
import static com.alibaba.druid.sql.parser.Token.RPAREN;
public class H2StatementParser extends SQLStatementParser {
public H2StatementParser(String sql) {
super(new H2ExprParser(sql));
@ -108,4 +114,16 @@ public class H2StatementParser extends SQLStatementParser {
}
}
}
@Override
protected void alterTableAddRestSpecific(SQLAlterTableStatement stmt) {
if (lexer.token() == LPAREN) {
lexer.nextToken();
SQLAlterTableAddColumn item = parseAlterTableAddColumn();
stmt.addItem(item);
accept(RPAREN);
return;
}
throw new ParserException("TODO " + lexer.info());
}
}

View File

@ -19,7 +19,6 @@ import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.expr.SQLListExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement.Type;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInputOutputFormat;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.parser.*;
@ -35,19 +34,16 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
}
protected void createTableBefore(SQLCreateTableStatement stmt) {
if (lexer.identifierEquals(FnvHash.Constants.EXTERNAL)) {
lexer.nextToken();
if (lexer.nextIfIdentifier(FnvHash.Constants.EXTERNAL)) {
stmt.setExternal(true);
}
if (lexer.identifierEquals(FnvHash.Constants.TEMPORARY)) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.TEMPORARY);
if (lexer.nextIfIdentifier(FnvHash.Constants.TEMPORARY)) {
stmt.setTemporary(true);
}
if (lexer.stringVal().equalsIgnoreCase("TRANSACTIONAL")) {
lexer.nextToken();
stmt.setType(Type.TRANSACTIONAL);
if (lexer.nextIfIdentifier("TRANSACTIONAL")) {
stmt.config(SQLCreateTableStatement.Feature.Transactional);
}
}
@ -104,6 +100,30 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
}
}
protected void parseCreateTableWithSerderPropertie(HiveCreateTableStatement stmt) {
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("SERDEPROPERTIES");
accept(Token.LPAREN);
for (; ; ) {
String key = lexer.stringVal();
lexer.nextToken();
accept(Token.EQ);
SQLExpr value = this.exprParser.primary();
stmt.getSerdeProperties().put(key, value);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
}
protected void parseCreateTableRest(SQLCreateTableStatement createTable) {
HiveCreateTableStatement stmt = (HiveCreateTableStatement) createTable;
if (lexer.nextIfIdentifier(FnvHash.Constants.ENGINE)) {
@ -184,6 +204,21 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.nextIfIdentifier(FnvHash.Constants.SKEWED)) {
accept(Token.BY);
accept(Token.LPAREN);
@ -209,30 +244,11 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
break;
}
accept(Token.RPAREN);
if (lexer.nextIfIdentifier(FnvHash.Constants.STORED)) {
accept(Token.AS);
acceptIdentifier("DIRECTORIES");
stmt.setSkewedByStoreAsDirectories(true);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (lexer.token() == Token.ROW
|| lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
parseSortedBy(stmt);
}
if (stmt.getClusteredBy().size() > 0 || stmt.getSortedBy().size() > 0) {
accept(Token.INTO);
if (lexer.token() == Token.LITERAL_INT) {
stmt.setBuckets(lexer.integerValue().intValue());
lexer.nextToken();
} else {
throw new ParserException("into buckets must be integer. " + lexer.info());
}
acceptIdentifier("BUCKETS");
}
if (lexer.token() == Token.ROW
@ -250,6 +266,8 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
accept(Token.BY);
SQLName name = this.exprParser.name();
stmt.setStoredBy(name);
parseCreateTableWithSerderPropertie(stmt);
} else {
accept(Token.AS);
@ -381,7 +399,7 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
}
}
private void parseSortedBy(HiveCreateTableStatement stmt) {
protected void parseSortedBy(HiveCreateTableStatement stmt) {
lexer.nextToken();
accept(Token.BY);
accept(Token.LPAREN);
@ -397,31 +415,10 @@ public class HiveCreateTableParser extends SQLCreateTableParser {
accept(Token.RPAREN);
}
private void parseRowFormat(HiveCreateTableStatement stmt) {
protected void parseRowFormat(HiveCreateTableStatement stmt) {
SQLExternalRecordFormat format = this.getExprParser().parseRowFormat();
stmt.setRowFormat(format);
if (lexer.token() == Token.WITH) {
lexer.nextToken();
acceptIdentifier("SERDEPROPERTIES");
accept(Token.LPAREN);
for (; ; ) {
String name = lexer.stringVal();
lexer.nextToken();
accept(Token.EQ);
SQLExpr value = this.exprParser.primary();
stmt.getSerdeProperties().put(name, value);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
parseCreateTableWithSerderPropertie(stmt);
}
@Override

View File

@ -59,6 +59,143 @@ public class HiveExprParser extends SQLExprParser {
this.aggregateFunctionHashCodes = AGGREGATE_FUNCTIONS_CODES;
}
@Override
protected SQLExpr primaryCommon(SQLExpr sqlExpr) {
sqlExpr = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
return sqlExpr;
}
@Override
protected String doRestSpecific(SQLExpr expr) {
String name = null;
if ((lexer.token() == Token.LITERAL_INT || lexer.token() == Token.LITERAL_FLOAT)) {
name = lexer.numberString();
lexer.nextToken();
}
return name;
}
@Override
protected SQLExpr relationalRestEqeq(SQLExpr expr) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
SQLExpr rightExp;
try {
if (lexer.token() == Token.SEMI) {
lexer.reset(mark);
return expr;
}
rightExp = bitOr();
} catch (EOFParserException e) {
throw new ParserException("EOF, " + expr + "=", e);
}
if (lexer.token() == Token.COLONEQ) {
lexer.nextToken();
SQLExpr colonExpr = expr();
rightExp = new SQLBinaryOpExpr(rightExp, SQLBinaryOperator.Assignment, colonExpr, dbType);
}
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.Equality, rightExp, dbType);
}
@Override
protected SQLExpr parseAssignItemOnColon(SQLExpr sqlExpr) {
if (lexer.token() == Token.COLON) {
lexer.nextToken();
String str = sqlExpr.toString() + ':';
str += lexer.numberString();
lexer.nextToken();
sqlExpr = new SQLIdentifierExpr(str);
}
return sqlExpr;
}
@Override
protected SQLExpr parseSelectItemRest(String ident, long hash_lower) {
SQLExpr expr = null;
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)
&& lexer.stringVal().charAt(0) != '`'
) {
lexer.nextToken();
String collate = lexer.stringVal();
lexer.nextToken();
SQLBinaryOpExpr binaryExpr = new SQLBinaryOpExpr(
new SQLIdentifierExpr(ident),
SQLBinaryOperator.COLLATE,
new SQLIdentifierExpr(collate), dbType
);
expr = binaryExpr;
} else if (FnvHash.Constants.TIMESTAMP == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token() == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLTimestampExpr ts = new SQLTimestampExpr(literal);
expr = ts;
if (lexer.identifierEquals(FnvHash.Constants.AT)) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
String timeZone = null;
if (lexer.identifierEquals(FnvHash.Constants.TIME)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.ZONE)) {
lexer.nextToken();
timeZone = lexer.stringVal();
lexer.nextToken();
}
}
if (timeZone == null) {
lexer.reset(mark);
} else {
ts.setTimeZone(timeZone);
}
}
} else if (FnvHash.Constants.DATETIME == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token() == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLDateTimeExpr ts = new SQLDateTimeExpr(literal);
expr = ts;
} else if (FnvHash.Constants.CURRENT_DATE == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURRENT_DATE);
} else if (FnvHash.Constants.CURRENT_TIMESTAMP == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURRENT_TIMESTAMP);
} else if (FnvHash.Constants.CURRENT_TIME == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURRENT_TIME);
} else if (FnvHash.Constants.CURDATE == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURDATE);
} else if (FnvHash.Constants.LOCALTIME == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.LOCALTIME);
} else if (FnvHash.Constants.LOCALTIMESTAMP == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.LOCALTIMESTAMP);
}
return expr;
}
public SQLExpr primaryRest(SQLExpr expr) {
if (lexer.token() == Token.COLON) {
lexer.nextToken();

View File

@ -18,16 +18,23 @@ package com.alibaba.druid.sql.dialect.hive.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.NotAllowCommentException;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class HiveLexer extends Lexer {
public static final Keywords DEFAULT_HIVE_KEYWORDS;
import static com.alibaba.druid.sql.parser.CharTypes.isWhitespace;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.*;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
static {
public class HiveLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -44,7 +51,7 @@ public class HiveLexer extends Lexer {
map.put("PARTITIONED", Token.PARTITIONED);
map.put("PARTITION", Token.PARTITION);
map.put("OVERWRITE", Token.OVERWRITE);
// map.put("SORT", Token.SORT);
// map.put("SORT", Token.SORT);
map.put("IF", Token.IF);
map.put("TRUE", Token.TRUE);
map.put("FALSE", Token.FALSE);
@ -52,7 +59,7 @@ public class HiveLexer extends Lexer {
map.put("CONSTRAINT", Token.CONSTRAINT);
map.put("DIV", Token.DIV);
DEFAULT_HIVE_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public HiveLexer(String input) {
@ -61,7 +68,6 @@ public class HiveLexer extends Lexer {
this.keepComments = true;
dbType = DbType.hive;
this.features |= SQLParserFeature.SupportUnicodeCodePoint.mask;
super.keywords = DEFAULT_HIVE_KEYWORDS;
}
public HiveLexer(String input, SQLParserFeature... features) {
@ -69,18 +75,332 @@ public class HiveLexer extends Lexer {
dbType = DbType.hive;
this.skipComment = true;
this.keepComments = true;
super.keywords = DEFAULT_HIVE_KEYWORDS;
this.features |= SQLParserFeature.SupportUnicodeCodePoint.mask;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
protected final void scanString() {
scanString2();
@Override
protected void scanString() {
{
boolean hasSpecial = false;
int startIndex = pos + 1;
int endIndex = -1; // text.indexOf('\'', startIndex);
for (int i = startIndex; i < text.length(); ++i) {
final char ch = text.charAt(i);
if (ch == '\\') {
hasSpecial = true;
continue;
}
if (ch == '\'') {
endIndex = i;
break;
}
}
if (endIndex == -1) {
throw new ParserException("unclosed str. " + info());
}
String stringVal;
if (token == Token.AS) {
stringVal = text.substring(pos, endIndex + 1);
} else {
if (startIndex == endIndex) {
stringVal = "";
} else {
stringVal = text.substring(startIndex, endIndex);
}
}
// hasSpecial = stringVal.indexOf('\\') != -1;
if (!hasSpecial) {
this.stringVal = stringVal;
int pos = endIndex + 1;
char ch = charAt(pos);
if (ch != '\'') {
this.pos = pos;
this.ch = ch;
token = LITERAL_CHARS;
return;
}
}
}
mark = pos;
boolean hasSpecial = false;
for (; ; ) {
if (isEOF()) {
lexError("unclosed.str.lit");
return;
}
ch = charAt(++pos);
if (ch == '\\') {
scanChar();
if (!hasSpecial) {
initBuff(bufPos);
arraycopy(mark + 1, buf, 0, bufPos);
hasSpecial = true;
}
switch (ch) {
case '0':
putChar('\0');
break;
case '\'':
putChar('\'');
break;
case '"':
putChar('"');
break;
case 'b':
putChar('\b');
break;
case 'n':
putChar('\n');
break;
case 'r':
putChar('\r');
break;
case 't':
putChar('\t');
break;
case '\\':
putChar('\\');
break;
case 'Z':
putChar((char) 0x1A); // ctrl + Z
break;
case '%':
putChar('%');
break;
case '_':
putChar('_');
break;
case 'u':
if ((features & SQLParserFeature.SupportUnicodeCodePoint.mask) != 0) {
char c1 = charAt(++pos);
char c2 = charAt(++pos);
char c3 = charAt(++pos);
char c4 = charAt(++pos);
int intVal = Integer.parseInt(new String(new char[]{c1, c2, c3, c4}), 16);
putChar((char) intVal);
} else {
putChar(ch);
}
break;
default:
putChar(ch);
break;
}
continue;
}
if (ch == '\'') {
scanChar();
if (ch != '\'') {
token = LITERAL_CHARS;
break;
} else {
if (!hasSpecial) {
initBuff(bufPos);
arraycopy(mark + 1, buf, 0, bufPos);
hasSpecial = true;
}
putChar('\'');
continue;
}
}
if (!hasSpecial) {
bufPos++;
continue;
}
if (bufPos == buf.length) {
putChar(ch);
} else {
buf[bufPos++] = ch;
}
}
if (!hasSpecial) {
stringVal = subString(mark + 1, bufPos);
} else {
stringVal = new String(buf, 0, bufPos);
}
}
public void scanComment() {
scanHiveComment();
if (ch != '/' && ch != '-') {
throw new IllegalStateException();
}
Token lastToken = this.token;
mark = pos;
bufPos = 0;
scanChar();
if (ch == ' ') {
mark = pos;
bufPos = 0;
scanChar();
if (dialectFeatureEnabled(ScanHiveCommentDoubleSpace) && ch == ' ') {
mark = pos;
bufPos = 0;
scanChar();
}
}
// /*+ */
if (ch == '*') {
scanChar();
bufPos++;
while (ch == ' ') {
scanChar();
bufPos++;
}
boolean isHint = false;
int startHintSp = bufPos + 1;
if (ch == '+') {
isHint = true;
scanChar();
bufPos++;
}
for (; ; ) {
if (ch == '*') {
if (charAt(pos + 1) == '/') {
bufPos += 2;
scanChar();
scanChar();
break;
} else if (isWhitespace(charAt(pos + 1))) {
int i = 2;
for (; i < 1024 * 1024; ++i) {
if (!isWhitespace(charAt(pos + i))) {
break;
}
}
if (charAt(pos + i) == '/') {
bufPos += 2;
pos += (i + 1);
ch = charAt(pos);
break;
}
}
}
scanChar();
if (ch == EOI) {
break;
}
bufPos++;
}
if (isHint) {
stringVal = subString(mark + startHintSp, (bufPos - startHintSp) - 1);
token = Token.HINT;
} else {
stringVal = subString(mark, bufPos + 1);
token = Token.MULTI_LINE_COMMENT;
commentCount++;
if (keepComments) {
addComment(stringVal);
}
}
if (commentHandler != null && commentHandler.handle(lastToken, stringVal)) {
return;
}
if (token != Token.HINT && !isAllowComment()) {
throw new NotAllowCommentException();
}
return;
}
if (!isAllowComment()) {
throw new NotAllowCommentException();
}
if (ch == '/' || ch == '-') {
scanChar();
bufPos++;
for (; ; ) {
if (ch == '\r') {
if (charAt(pos + 1) == '\n') {
line++;
bufPos += 2;
scanChar();
break;
}
bufPos++;
break;
} else if (ch == EOI) {
if (pos >= text.length()) {
break;
}
}
if (ch == '\n') {
line++;
scanChar();
bufPos++;
break;
}
scanChar();
bufPos++;
}
stringVal = subString(mark, ch != EOI ? bufPos : bufPos + 1);
token = Token.LINE_COMMENT;
commentCount++;
if (keepComments) {
addComment(stringVal);
}
endOfComment = isEOF();
if (commentHandler != null && commentHandler.handle(lastToken, stringVal)) {
return;
}
return;
}
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanSQLTypeWithFrom,
NextTokenColon,
ScanAliasU,
JoinRightTableFrom,
GroupByAll,
SQLDateExpr,
ParseAssignItemRparenCommaSetReturn,
TableAliasLock,
TableAliasPartition,
AsSkip,
AsSequence,
AsDatabase,
AsDefault
);
this.dialectFeature.unconfigFeature(
PrimaryBangBangSupport
);
}
}

View File

@ -29,6 +29,8 @@ import com.alibaba.druid.util.FnvHash.Constants;
import java.util.List;
import static com.alibaba.druid.sql.parser.Token.LPAREN;
public class HiveStatementParser extends SQLStatementParser {
{
dbType = DbType.hive;
@ -50,7 +52,7 @@ public class HiveStatementParser extends SQLStatementParser {
super(exprParser);
}
public HiveSelectParser createSQLSelectParser() {
public SQLSelectParser createSQLSelectParser() {
return new HiveSelectParser(this.exprParser, selectListCache);
}
@ -588,4 +590,53 @@ public class HiveStatementParser extends SQLStatementParser {
public HiveExprParser getExprParser() {
return (HiveExprParser) exprParser;
}
@Override
protected boolean alterTableAfterNameRest(SQLAlterTableStatement stmt) {
if (lexer.identifierEquals(Constants.RECOVER)) {
lexer.nextToken();
acceptIdentifier("PARTITIONS");
stmt.addItem(new SQLAlterTableRecoverPartitions());
} else {
return true;
}
return false;
}
@Override
protected boolean alterTableSetRest(SQLAlterTableStatement stmt) {
if (lexer.identifierEquals("FILEFORMAT")) {
lexer.nextToken();
SQLAlterTableSetFileFormat item = new SQLAlterTableSetFileFormat();
item.setValue(this.exprParser.primary());
stmt.addItem(item);
} else {
return true;
}
return false;
}
@Override
public void parseCreateTableSupportSchema() {
if (lexer.token() == Token.SCHEMA) {
lexer.nextToken();
} else {
accept(Token.DATABASE);
}
}
@Override
protected boolean parseAlterTableAddColumnBefore() {
if (lexer.identifierEquals("COLUMNS")) {
lexer.nextToken();
if (lexer.token() == LPAREN) {
lexer.nextToken();
return true;
}
} else if (lexer.token() == LPAREN) {
lexer.nextToken();
return true;
}
return false;
}
}

View File

@ -30,6 +30,7 @@ import java.util.Map;
public class HiveCreateTableStatement extends SQLCreateTableStatement {
protected List<SQLExpr> skewedBy = new ArrayList<SQLExpr>();
protected List<SQLExpr> skewedByOn = new ArrayList<SQLExpr>();
protected boolean skewedByStoreAsDirectories;
protected Map<String, SQLObject> serdeProperties = new LinkedHashMap<String, SQLObject>();
protected boolean likeQuery; // for DLA
@ -139,6 +140,14 @@ public class HiveCreateTableStatement extends SQLCreateTableStatement {
this.skewedByOn.add(item);
}
public void setSkewedByStoreAsDirectories(boolean skewedByStoreAsDirectories) {
this.skewedByStoreAsDirectories = skewedByStoreAsDirectories;
}
public boolean isSkewedByStoreAsDirectories() {
return skewedByStoreAsDirectories;
}
public Map<String, SQLObject> getSerdeProperties() {
return serdeProperties;
}

View File

@ -20,6 +20,7 @@ import com.alibaba.druid.sql.dialect.hive.ast.HiveInsert;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsertStatement;
import com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveMsckRepairStatement;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
@ -73,4 +74,11 @@ public interface HiveASTVisitor extends SQLASTVisitor {
default void endVisit(HiveAddJarStatement x) {
}
default boolean visit(HiveCreateTableStatement x) {
return true;
}
default void endVisit(HiveCreateTableStatement x) {
}
}

View File

@ -16,22 +16,27 @@
package com.alibaba.druid.sql.dialect.hive.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLAdhocTableSource;
import com.alibaba.druid.sql.ast.SQLCommentHint;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLObject;
import com.alibaba.druid.sql.ast.expr.SQLBinaryOpExpr;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.hive.ast.HiveAddJarStatement;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsert;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsertStatement;
import com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveMsckRepairStatement;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import java.util.List;
import java.util.Map;
public class HiveOutputVisitor extends SQLASTOutputVisitor implements HiveASTVisitor {
{
@ -511,4 +516,184 @@ public class HiveOutputVisitor extends SQLASTOutputVisitor implements HiveASTVis
incrementIndent();
println();
}
@Override
public boolean visit(HiveCreateTableStatement x) {
printCreateTable(x, true, true);
return false;
}
@Override
public boolean visit(SQLCreateTableStatement x) {
if (x instanceof HiveCreateTableStatement) {
return visit((HiveCreateTableStatement) x);
}
return super.visit(x);
}
protected void printCreateTable(HiveCreateTableStatement x, boolean printSelect,
boolean printCommentAdvance) {
final SQLObject parent = x.getParent();
if (x.hasBeforeComment()) {
printlnComments(x.getBeforeCommentsDirect());
}
if (parent instanceof SQLAdhocTableSource) {
// skip
} else {
print0(ucase ? "CREATE " : "create ");
}
printCreateTableFeatures(x);
print0(ucase ? "TABLE " : "table ");
if (x.isIfNotExists()) {
print0(ucase ? "IF NOT EXISTS " : "if not exists ");
}
printTableSourceExpr(x.getName());
printTableElements(x.getTableElementList());
SQLExprTableSource inherits = x.getInherits();
if (inherits != null) {
print0(ucase ? " INHERITS (" : " inherits (");
inherits.accept(this);
print(')');
}
SQLExpr using = x.getUsing();
if (using != null) {
println();
print0(ucase ? "USING " : "using ");
using.accept(this);
}
if (printCommentAdvance) {
printComment(x.getComment());
}
List<SQLAssignItem> mappedBy = x.getMappedBy();
if (mappedBy != null && mappedBy.size() > 0) {
println();
print0(ucase ? "MAPPED BY (" : "mapped by (");
printAndAccept(mappedBy, ", ");
print0(ucase ? ")" : ")");
}
printPartitionedBy(x);
List<SQLSelectOrderByItem> clusteredBy = x.getClusteredBy();
if (clusteredBy.size() > 0) {
println();
print0(ucase ? "CLUSTERED BY (" : "clustered by (");
printAndAccept(clusteredBy, ",");
print(')');
}
List<SQLSelectOrderByItem> sortedBy = x.getSortedBy();
printSortedBy(sortedBy);
int buckets = x.getBuckets();
if (buckets > 0) {
println();
print0(ucase ? "INTO " : "into ");
print(buckets);
print0(ucase ? " BUCKETS" : " buckets");
}
List<SQLExpr> skewedBy = x.getSkewedBy();
if (skewedBy.size() > 0) {
println();
print0(ucase ? "SKEWED BY (" : "skewed by (");
printAndAccept(skewedBy, ",");
print(')');
List<SQLExpr> skewedByOn = x.getSkewedByOn();
if (skewedByOn.size() > 0) {
print0(ucase ? " ON (" : " on (");
printAndAccept(skewedByOn, ",");
print(')');
}
if (x.isSkewedByStoreAsDirectories()) {
print(ucase ? " STORED AS DIRECTORIES" : " stored as directories");
}
}
if (!printCommentAdvance) {
printComment(x.getComment());
}
printPartitionBy(x);
SQLExternalRecordFormat format = x.getRowFormat();
SQLExpr storedBy = x.getStoredBy();
if (format != null) {
println();
print0(ucase ? "ROW FORMAT" : "row format");
if (format.getSerde() == null) {
print0(ucase ? " DELIMITED" : " delimited ");
}
visit(format);
if (storedBy == null) {
printSerdeProperties(x.getSerdeProperties());
}
}
printCreateTableLike(x);
SQLExpr storedAs = x.getStoredAs();
if (storedAs != null) {
println();
if (x.isLbracketUse()) {
print("[");
}
print0(ucase ? "STORED AS" : "stored as");
if (storedAs instanceof SQLIdentifierExpr) {
print(' ');
printExpr(storedAs, parameterized);
} else {
incrementIndent();
println();
printExpr(storedAs, parameterized);
decrementIndent();
}
if (x.isRbracketUse()) {
print("]");
}
}
if (storedBy != null) {
println();
print0(ucase ? "STORED BY " : "STORED by ");
printExpr(storedBy, parameterized);
Map<String, SQLObject> serdeProperties = x.getSerdeProperties();
printSerdeProperties(serdeProperties);
}
SQLExpr location = x.getLocation();
if (location != null) {
println();
print0(ucase ? "LOCATION " : "location ");
printExpr(location, parameterized);
}
printCached(x);
printTableOptions(x);
printLifeCycle(x.getLifeCycle());
SQLSelect select = x.getSelect();
if (printSelect && select != null) {
println();
if (x.isLikeQuery()) { // for dla
print0(ucase ? "LIKE" : "like");
} else {
print0(ucase ? "AS" : "as");
}
println();
visit(select);
}
}
protected void printCached(SQLCreateTableStatement x) {
// do nothing
}
}

View File

@ -1,13 +0,0 @@
package com.alibaba.druid.sql.dialect.holo.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGLexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class HoloLexer
extends PGLexer {
public HoloLexer(String input, SQLParserFeature... features) {
super(input, features);
dbType = DbType.hologres;
}
}

View File

@ -1,26 +0,0 @@
package com.alibaba.druid.sql.dialect.holo.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class HoloStatementParser
extends PGSQLStatementParser {
public HoloStatementParser(HoloExprParser parser) {
super(parser);
dbType = DbType.hologres;
}
public HoloStatementParser(String sql, SQLParserFeature... features) {
this(new HoloExprParser(sql, features));
}
@Override
public HoloSelectParser createSQLSelectParser() {
return new HoloSelectParser(this.exprParser, selectListCache);
}
public HoloCreateTableParser getSQLCreateTableParser() {
return new HoloCreateTableParser(this.exprParser);
}
}

View File

@ -1,16 +0,0 @@
package com.alibaba.druid.sql.dialect.holo.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.visitor.PGOutputVisitor;
public class HoloOutputVisitor extends PGOutputVisitor {
public HoloOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.hologres;
}
public HoloOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.hologres;
}
}

View File

@ -1,12 +1,12 @@
package com.alibaba.druid.sql.dialect.holo.parser;
package com.alibaba.druid.sql.dialect.hologres.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGCreateTableParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
public class HoloCreateTableParser
public class HologresCreateTableParser
extends PGCreateTableParser {
public HoloCreateTableParser(SQLExprParser exprParser) {
public HologresCreateTableParser(SQLExprParser exprParser) {
super(exprParser);
dbType = DbType.hologres;
}

View File

@ -1,19 +1,21 @@
package com.alibaba.druid.sql.dialect.holo.parser;
package com.alibaba.druid.sql.dialect.hologres.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGExprParser;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class HoloExprParser
public class HologresExprParser
extends PGExprParser {
public HoloExprParser(String sql, SQLParserFeature... features) {
super(sql, features);
public HologresExprParser(String sql, SQLParserFeature... features) {
super(new HologresLexer(sql, features));
lexer.nextToken();
dbType = DbType.hologres;
}
public HoloExprParser(Lexer lexer) {
public HologresExprParser(Lexer lexer) {
super(lexer);
dbType = DbType.hologres;
}
}

View File

@ -0,0 +1,19 @@
package com.alibaba.druid.sql.dialect.hologres.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGLexer;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class HologresLexer
extends PGLexer {
@Override
protected Keywords loadKeywords() {
return super.loadKeywords();
}
public HologresLexer(String input, SQLParserFeature... features) {
super(input, features);
dbType = DbType.hologres;
}
}

View File

@ -1,13 +1,13 @@
package com.alibaba.druid.sql.dialect.holo.parser;
package com.alibaba.druid.sql.dialect.hologres.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSelectParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLSelectListCache;
public class HoloSelectParser
public class HologresSelectParser
extends PGSelectParser {
public HoloSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
public HologresSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
super(exprParser, selectListCache);
dbType = DbType.hologres;
}

View File

@ -0,0 +1,26 @@
package com.alibaba.druid.sql.dialect.hologres.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.parser.PGSQLStatementParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class HologresStatementParser
extends PGSQLStatementParser {
public HologresStatementParser(HologresExprParser parser) {
super(parser);
dbType = DbType.hologres;
}
public HologresStatementParser(String sql, SQLParserFeature... features) {
this(new HologresExprParser(sql, features));
}
@Override
public HologresSelectParser createSQLSelectParser() {
return new HologresSelectParser(this.exprParser, selectListCache);
}
public HologresCreateTableParser getSQLCreateTableParser() {
return new HologresCreateTableParser(this.exprParser);
}
}

View File

@ -0,0 +1,16 @@
package com.alibaba.druid.sql.dialect.hologres.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.postgresql.visitor.PGOutputVisitor;
public class HologresOutputVisitor extends PGOutputVisitor {
public HologresOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.hologres;
}
public HologresOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.hologres;
}
}

View File

@ -0,0 +1,53 @@
package com.alibaba.druid.sql.dialect.impala.ast;
import com.alibaba.druid.sql.ast.SQLPartitionValue;
public class ImpalaSQLPartitionValue extends SQLPartitionValue {
private Integer leftBound;
private Integer rightBound;
private Operator leftOperator;
private Operator rightOperator;
public void setOperator(Operator operator) {
this.operator = operator;
}
public Integer getLeftBound() {
return leftBound;
}
public void setLeftBound(Integer leftBound) {
this.leftBound = leftBound;
}
public Integer getRightBound() {
return rightBound;
}
public void setRightBound(Integer rightBound) {
this.rightBound = rightBound;
}
public String constructPartitionName() {
StringBuilder sb = new StringBuilder();
sb.append("partition_").append(leftBound != null ? leftBound.toString() : "")
.append("_").append(rightBound != null ? rightBound.toString() : "");
return sb.toString();
}
public Operator getLeftOperator() {
return leftOperator;
}
public void setLeftOperator(Operator leftOperator) {
this.leftOperator = leftOperator;
}
public Operator getRightOperator() {
return rightOperator;
}
public void setRightOperator(Operator rightOperator) {
this.rightOperator = rightOperator;
}
}

View File

@ -0,0 +1,243 @@
package com.alibaba.druid.sql.dialect.impala.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.hive.parser.HiveCreateTableParser;
import com.alibaba.druid.sql.dialect.impala.stmt.ImpalaCreateTableStatement;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.util.FnvHash;
public class ImpalaCreateTableParser extends HiveCreateTableParser {
@Override
public SQLCreateTableParser getSQLCreateTableParser() {
return new ImpalaCreateTableParser(this.exprParser);
}
public ImpalaCreateTableParser(SQLExprParser exprParser) {
super(exprParser);
dbType = DbType.impala;
}
public ImpalaCreateTableParser(Lexer lexer) {
super(lexer);
dbType = DbType.impala;
}
protected ImpalaCreateTableStatement newCreateStatement() {
return new ImpalaCreateTableStatement();
}
@Override
protected void parseCreateTableRest(SQLCreateTableStatement createTable) {
ImpalaCreateTableStatement stmt = (ImpalaCreateTableStatement) createTable;
if (lexer.nextIf(Token.PARTITIONED)) {
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
SQLColumnDefinition column = this.exprParser.parseColumn();
stmt.addPartitionColumn(column);
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
if (lexer.token() != Token.COMMA) {
break;
} else {
lexer.nextToken();
if (lexer.isKeepComments() && lexer.hasComment()) {
column.addAfterComment(lexer.readAndResetComments());
}
}
}
accept(Token.RPAREN);
}
if (lexer.identifierEquals(FnvHash.Constants.SORT)) {
parseSortedBy(stmt);
}
// for kudu table
SQLPartitionBy partitionClause = parsePartitionBy();
stmt.setPartitionBy(partitionClause);
if (lexer.nextIf(Token.COMMENT)) {
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.token() == Token.ROW
|| lexer.identifierEquals(FnvHash.Constants.ROW)) {
parseRowFormat(stmt);
}
if (Token.LBRACKET.equals(lexer.token())) {
stmt.setLbracketUse(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.STORED)) {
lexer.nextToken();
accept(Token.AS);
SQLName name = this.exprParser.name();
stmt.setStoredAs(name);
}
if (Token.RBRACKET.equals(lexer.token())) {
stmt.setRbracketUse(true);
lexer.nextToken();
}
if (lexer.identifierEquals(FnvHash.Constants.LOCATION)) {
lexer.nextToken();
SQLExpr location = this.exprParser.primary();
stmt.setLocation(location);
}
if (lexer.identifierEquals(FnvHash.Constants.UNCACHED)) {
lexer.nextToken();
stmt.setUnCached(true);
}
if (lexer.identifierEquals(FnvHash.Constants.CACHED)) {
lexer.nextToken();
accept(Token.IN);
SQLExpr poolName = this.exprParser.primary();
stmt.setCachedPool(poolName);
if (lexer.hashLCase() == FnvHash.Constants.WITH) {
lexer.nextToken();
acceptIdentifier("REPLICATION");
accept(Token.EQ);
stmt.setCachedReplication(this.exprParser.parseIntValue());
}
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseOptions(stmt);
}
if (lexer.token() == Token.SELECT || lexer.token() == Token.AS) {
if (lexer.token() == Token.AS) {
lexer.nextToken();
}
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
if (lexer.token() == Token.SELECT || lexer.token() == Token.AS) {
if (lexer.token() == Token.AS) {
lexer.nextToken();
}
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
}
if (lexer.token() == Token.LIKE) {
lexer.nextToken();
Lexer.SavePoint mark = lexer.mark();
if (lexer.token() == Token.SELECT) {
stmt.setLikeQuery(true);
SQLSelect select = this.createSQLSelectParser().select();
stmt.setSelect(select);
} else {
lexer.reset(mark);
if (lexer.identifierEquals(FnvHash.Constants.MAPPING)) {
SQLExpr like = this.exprParser.primary();
stmt.setLike(new SQLExprTableSource(like));
} else {
SQLName name = this.exprParser.name();
stmt.setLike(name);
}
}
}
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
SQLExpr comment = this.exprParser.expr();
stmt.setComment(comment);
}
if (lexer.identifierEquals(FnvHash.Constants.USING) || lexer.token() == Token.USING) {
lexer.nextToken();
SQLExpr using = this.exprParser.expr();
stmt.setUsing(using);
}
if (lexer.identifierEquals(FnvHash.Constants.TBLPROPERTIES)) {
parseOptions(stmt);
}
}
public SQLPartitionBy parsePartitionBy() {
if (lexer.nextIf(Token.PARTITION)) {
accept(Token.BY);
if (lexer.nextIfIdentifier(FnvHash.Constants.HASH)) {
SQLPartitionBy hashPartition = new SQLPartitionByHash();
if (lexer.nextIf(Token.LPAREN)) {
// e.g. partition by hash(id,name) partitions 16
// TODO: 'partition by hash(id) partitions 4, hash(name) partitions 4' not supported yet
if (lexer.token() != Token.IDENTIFIER) {
throw new ParserException("expect identifier. " + lexer.info());
}
for (; ; ) {
hashPartition.addColumn(this.exprParser.name());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
acceptIdentifier("PARTITIONS");
hashPartition.setPartitionsCount(acceptInteger());
return hashPartition;
} else {
// e.g. partition by hash partitions 16
acceptIdentifier("PARTITIONS");
int numPartitions = acceptInteger();
hashPartition.setPartitionsCount(numPartitions);
return hashPartition;
}
} else if (lexer.nextIfIdentifier(FnvHash.Constants.RANGE)) {
return partitionByRange();
}
}
return null;
}
private SQLPartitionByRange partitionByRange() {
SQLPartitionByRange rangePartition = new SQLPartitionByRange();
accept(Token.LPAREN);
for (; ; ) {
rangePartition.addColumn(this.exprParser.name());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
accept(Token.LPAREN);
for (; ; ) {
rangePartition.addPartition(this.getExprParser().parsePartition());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
return rangePartition;
}
@Override
public ImpalaExprParser getExprParser() {
return (ImpalaExprParser) exprParser;
}
}

View File

@ -0,0 +1,96 @@
package com.alibaba.druid.sql.dialect.impala.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLPartition;
import com.alibaba.druid.sql.ast.SQLPartitionValue;
import com.alibaba.druid.sql.ast.expr.SQLCharExpr;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLIntegerExpr;
import com.alibaba.druid.sql.dialect.hive.parser.HiveExprParser;
import com.alibaba.druid.sql.dialect.impala.ast.ImpalaSQLPartitionValue;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.util.FnvHash;
public class ImpalaExprParser extends HiveExprParser {
public ImpalaExprParser(String sql, SQLParserFeature... features) {
super(new ImpalaLexer(sql, features));
this.lexer.nextToken();
dbType = DbType.impala;
}
public SQLPartition parsePartition() {
accept(Token.PARTITION);
SQLPartition partitionDef = new SQLPartition();
ImpalaSQLPartitionValue values = new ImpalaSQLPartitionValue();
SQLName name;
if (lexer.token() == Token.LITERAL_INT) {
Number number = lexer.integerValue();
lexer.nextToken();
if (lexer.token() == Token.LT || lexer.token() == Token.LTEQ) {
SQLPartitionValue.Operator leftOperator = getOperator(lexer.token());
lexer.nextToken();
values.setLeftOperator(leftOperator);
values.setLeftBound(number.intValue());
accept(Token.VALUES);
if (lexer.token() == Token.LT || lexer.token() == Token.LTEQ) {
SQLPartitionValue.Operator rightOperator = getOperator(lexer.token());
lexer.nextToken();
values.setRightOperator(rightOperator);
values.setRightBound(lexer.integerValue().intValue());
accept(Token.LITERAL_INT);
}
}
} else if (lexer.token() == Token.VALUES) {
accept(Token.VALUES);
values.setRightOperator(getOperator(lexer.token()));
lexer.nextToken();
values.setRightBound(lexer.integerValue().intValue());
accept(Token.LITERAL_INT);
} else if (lexer.identifierEquals(FnvHash.Constants.VALUE)) {
acceptIdentifier("VALUE");
accept(Token.EQ);
values.setOperator(SQLPartitionValue.Operator.Equal);
if (lexer.nextIf(Token.LPAREN)) {
// for multiple values
for (; ; ) {
if (lexer.token() == Token.LITERAL_INT) {
values.addItem(new SQLIntegerExpr(lexer.integerValue().intValue()));
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
values.addItem(new SQLCharExpr(lexer.stringVal()));
lexer.nextToken();
}
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
} else {
// for single specific value
SQLCharExpr charExpr = new SQLCharExpr(lexer.stringVal());
values.addItem(charExpr);
lexer.nextToken();
}
}
partitionDef.setValues(values);
name = new SQLIdentifierExpr(values.constructPartitionName());
partitionDef.setName(name);
return partitionDef;
}
private SQLPartitionValue.Operator getOperator(Token token) {
switch (token) {
case LT:
return SQLPartitionValue.Operator.LessThan;
case LTEQ:
return SQLPartitionValue.Operator.LessThanEqual;
default:
return null;
}
}
}

View File

@ -0,0 +1,23 @@
package com.alibaba.druid.sql.dialect.impala.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.hive.parser.HiveLexer;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class ImpalaLexer extends HiveLexer {
@Override
protected Keywords loadKeywords() {
return super.loadKeywords();
}
public ImpalaLexer(String input) {
super(input);
dbType = DbType.impala;
}
public ImpalaLexer(String input, SQLParserFeature... features) {
super(input, features);
dbType = DbType.impala;
}
}

View File

@ -0,0 +1,35 @@
package com.alibaba.druid.sql.dialect.impala.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLHint;
import com.alibaba.druid.sql.ast.statement.SQLExprHint;
import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource;
import com.alibaba.druid.sql.dialect.hive.parser.HiveSelectParser;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLSelectListCache;
import com.alibaba.druid.sql.parser.Token;
import java.util.ArrayList;
import java.util.List;
public class ImpalaSelectParser extends HiveSelectParser {
{
dbType = DbType.impala;
}
public ImpalaSelectParser(SQLExprParser exprParser, SQLSelectListCache selectListCache) {
super(exprParser, selectListCache);
}
@Override
protected void parseJoinHint(SQLJoinTableSource join) {
List<SQLHint> hints = new ArrayList<>();
if (lexer.token() == Token.HINT) {
this.exprParser.parseHints(hints);
} else if (lexer.token() == Token.LBRACKET) {
lexer.nextToken();
hints.add(new SQLExprHint(expr()));
accept(Token.RBRACKET);
}
join.setHints(hints);
}
}

View File

@ -0,0 +1,30 @@
package com.alibaba.druid.sql.dialect.impala.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.dialect.hive.parser.HiveSelectParser;
import com.alibaba.druid.sql.dialect.hive.parser.HiveStatementParser;
import com.alibaba.druid.sql.parser.SQLCreateTableParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
public class ImpalaStatementParser extends HiveStatementParser {
{
dbType = DbType.impala;
}
public ImpalaStatementParser(String sql, SQLParserFeature... features) {
super(new ImpalaExprParser(sql, features));
}
public HiveSelectParser createSQLSelectParser() {
return new ImpalaSelectParser(this.exprParser, selectListCache);
}
public SQLCreateTableParser getSQLCreateTableParser() {
return new ImpalaCreateTableParser(this.exprParser);
}
public SQLCreateTableStatement parseCreateTable() {
SQLCreateTableParser parser = new ImpalaCreateTableParser(this.exprParser);
return parser.parseCreateTable();
}
}

View File

@ -0,0 +1,38 @@
package com.alibaba.druid.sql.dialect.impala.stmt;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateTableStatement;
public class ImpalaCreateTableStatement extends HiveCreateTableStatement {
private boolean unCached;
private SQLExpr cachedPool;
private int cachedReplication = -1;
public boolean isCached() {
return cachedPool != null;
}
public SQLExpr getCachedPool() {
return cachedPool;
}
public void setCachedPool(SQLExpr cachedPool) {
this.cachedPool = cachedPool;
}
public int getCachedReplication() {
return cachedReplication;
}
public void setCachedReplication(int cachedReplication) {
this.cachedReplication = cachedReplication;
}
public boolean isUnCached() {
return unCached;
}
public void setUnCached(boolean unCached) {
this.unCached = unCached;
}
}

View File

@ -0,0 +1,12 @@
package com.alibaba.druid.sql.dialect.impala.visitor;
import com.alibaba.druid.sql.dialect.impala.stmt.ImpalaCreateTableStatement;
public interface ImpalaASTVisitor {
default boolean visit(ImpalaCreateTableStatement x) {
return true;
}
default void endVisit(ImpalaCreateTableStatement x) {
}
}

View File

@ -0,0 +1,149 @@
package com.alibaba.druid.sql.dialect.impala.visitor;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprHint;
import com.alibaba.druid.sql.ast.statement.SQLJoinTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectOrderByItem;
import com.alibaba.druid.sql.dialect.hive.visitor.HiveOutputVisitor;
import com.alibaba.druid.sql.dialect.impala.ast.ImpalaSQLPartitionValue;
import com.alibaba.druid.sql.dialect.impala.stmt.ImpalaCreateTableStatement;
import java.util.List;
public class ImpalaOutputVisitor extends HiveOutputVisitor implements ImpalaASTVisitor {
public ImpalaOutputVisitor(StringBuilder appender) {
super(appender);
dbType = DbType.impala;
}
public ImpalaOutputVisitor(StringBuilder appender, boolean parameterized) {
super(appender, parameterized);
dbType = DbType.impala;
}
@Override
protected void printJoinHint(SQLJoinTableSource x) {
if (!x.getHints().isEmpty()) {
print(' ');
for (SQLHint joinHint : x.getHints()) {
if (joinHint instanceof SQLCommentHint) {
print0((joinHint).toString());
} else if (joinHint instanceof SQLExprHint) {
print0("[");
joinHint.accept(this);
print0("]");
}
}
}
}
@Override
protected void printCached(SQLCreateTableStatement x) {
ImpalaCreateTableStatement createTable = (ImpalaCreateTableStatement) x;
if (createTable.isCached()) {
println();
print0(ucase ? "CACHED IN " : "cached in ");
createTable.getCachedPool().accept(this);
if (createTable.getCachedReplication() != -1) {
print0(" WITH REPLICATION = ");
print0(String.valueOf(createTable.getCachedReplication()));
}
}
if (createTable.isUnCached()) {
println();
print0(ucase ? "UNCACHED" : "uncached");
}
}
@Override
public boolean visit(ImpalaCreateTableStatement x) {
printCreateTable(x, true, false);
return false;
}
@Override
public boolean visit(SQLCreateTableStatement x) {
if (x instanceof ImpalaCreateTableStatement) {
return visit((ImpalaCreateTableStatement) x);
}
return super.visit(x);
}
@Override
protected void printSortedBy(List<SQLSelectOrderByItem> sortedBy) {
if (sortedBy.size() > 0) {
println();
print0(ucase ? "SORT BY (" : "sort by (");
printAndAccept(sortedBy, ", ");
print(')');
}
}
@Override
protected void printPartitionBy(SQLCreateTableStatement x) {
SQLPartitionBy partitionBy = x.getPartitioning();
if (partitionBy == null) {
return;
}
println();
print0(ucase ? "PARTITION BY " : "partition by ");
partitionBy.accept(this);
}
@Override
public boolean visit(SQLPartition x) {
ImpalaSQLPartitionValue values = (ImpalaSQLPartitionValue) x.getValues();
values.accept(this);
return false;
}
@Override
public boolean visit(SQLPartitionValue x) {
ImpalaSQLPartitionValue partitionValue = (ImpalaSQLPartitionValue) x;
print0(ucase ? " PARTITION " : " partition ");
if (partitionValue.getOperator() == SQLPartitionValue.Operator.Equal) {
print0(ucase ? "VALUE" : "value");
print0(" = ");
if (partitionValue.getItems().size() == 1) {
// for single specific value
printExpr(partitionValue.getItems().get(0), parameterized);
} else {
print("(");
printAndAccept(partitionValue.getItems(), ", ", false);
print(')');
}
} else {
if (partitionValue.getLeftBound() != null) {
print(partitionValue.getLeftBound());
printOperator(partitionValue.getLeftOperator());
}
print0(ucase ? "VALUES" : "values");
if (partitionValue.getRightBound() != null) {
printOperator(partitionValue.getRightOperator());
print(partitionValue.getRightBound());
}
}
return false;
}
private void printOperator(SQLPartitionValue.Operator operator) {
switch (operator) {
case LessThan:
print0(" < ");
break;
case LessThanEqual:
print0(" <= ");
break;
case In:
print0(" IN ");
break;
case List:
print0(" LIST ");
break;
default:
throw new IllegalArgumentException("operator not support");
}
}
}

View File

@ -21,7 +21,6 @@ import com.alibaba.druid.sql.ast.SQLIndex;
import com.alibaba.druid.sql.ast.statement.SQLTableConstraint;
import com.alibaba.druid.sql.ast.statement.SQLUnique;
import com.alibaba.druid.sql.ast.statement.SQLUniqueConstraint;
import com.alibaba.druid.sql.dialect.ads.visitor.AdsVisitor;
import com.alibaba.druid.sql.dialect.mysql.visitor.MySqlASTVisitor;
import com.alibaba.druid.sql.visitor.SQLASTVisitor;
@ -34,20 +33,9 @@ public class MySqlKey extends SQLUnique implements SQLUniqueConstraint, SQLTable
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof MySqlASTVisitor) {
accept0((MySqlASTVisitor) visitor);
} else if (visitor instanceof AdsVisitor) {
accept0((AdsVisitor) visitor);
}
}
protected void accept0(AdsVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.getName());
acceptChild(visitor, this.getColumns());
acceptChild(visitor, this.getName());
}
visitor.endVisit(this);
}
protected void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
acceptChild(visitor, this.getName());

View File

@ -24,7 +24,6 @@ import com.alibaba.druid.sql.ast.SQLObject;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.ads.visitor.AdsOutputVisitor;
import com.alibaba.druid.sql.dialect.mysql.ast.MySqlKey;
import com.alibaba.druid.sql.dialect.mysql.ast.MySqlUnique;
import com.alibaba.druid.sql.dialect.mysql.ast.expr.MySqlExprImpl;
@ -103,44 +102,11 @@ public class MySqlCreateTableStatement extends SQLCreateTableStatement implement
protected void accept0(SQLASTVisitor visitor) {
if (visitor instanceof MySqlASTVisitor) {
accept0((MySqlASTVisitor) visitor);
} else if (visitor instanceof AdsOutputVisitor) {
accept0((AdsOutputVisitor) visitor);
} else {
super.accept0(visitor);
}
}
public void accept0(AdsOutputVisitor visitor) {
if (visitor.visit(this)) {
for (int i = 0; i < hints.size(); i++) {
final SQLCommentHint hint = hints.get(i);
if (hint != null) {
hint.accept(visitor);
}
}
if (tableSource != null) {
tableSource.accept(visitor);
}
for (int i = 0; i < tableElementList.size(); i++) {
final SQLTableElement element = tableElementList.get(i);
if (element != null) {
element.accept(visitor);
}
}
if (like != null) {
like.accept(visitor);
}
if (select != null) {
select.accept(visitor);
}
}
visitor.endVisit(this);
}
public void accept0(MySqlASTVisitor visitor) {
if (visitor.visit(this)) {
for (int i = 0; i < hints.size(); i++) {
@ -262,7 +228,7 @@ public class MySqlCreateTableStatement extends SQLCreateTableStatement implement
super.simplify();
}
public void showCoumns(StringBuilder out) throws IOException {
public void showColumns(StringBuilder out) throws IOException {
this.accept(new MySqlShowColumnOutpuVisitor(out));
}
@ -498,8 +464,8 @@ public class MySqlCreateTableStatement extends SQLCreateTableStatement implement
public void cloneTo(MySqlCreateTableStatement x) {
super.cloneTo(x);
if (partitioning != null) {
x.setPartitioning(partitioning.clone());
if (partitionBy != null) {
x.setPartitionBy(partitionBy.clone());
}
if (localPartitioning != null) {
x.setLocalPartitioning(localPartitioning.clone());

View File

@ -94,12 +94,10 @@ public class MySqlCreateTableParser extends SQLCreateTableParser {
}
accept(Token.CREATE);
if (lexer.identifierEquals("TEMPORARY")) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.GLOBAL_TEMPORARY);
} else if (lexer.identifierEquals("SHADOW")) {
lexer.nextToken();
stmt.setType(SQLCreateTableStatement.Type.SHADOW);
if (lexer.nextIfIdentifier("TEMPORARY")) {
stmt.config(SQLCreateTableStatement.Feature.Temporary);
} else if (lexer.nextIfIdentifier("SHADOW")) {
stmt.config(SQLCreateTableStatement.Feature.Shadow);
}
if (lexer.identifierEquals(FnvHash.Constants.DIMENSION)) {
@ -123,7 +121,7 @@ public class MySqlCreateTableParser extends SQLCreateTableParser {
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
stmt.setIfNotExists(true);
}
stmt.setName(this.exprParser.name());
@ -832,7 +830,7 @@ public class MySqlCreateTableParser extends SQLCreateTableParser {
if (lexer.token() == Token.PARTITION) {
SQLPartitionBy partitionClause = parsePartitionBy();
stmt.setPartitioning(partitionClause);
stmt.setPartitionBy(partitionClause);
continue;
}

View File

@ -28,7 +28,10 @@ import com.alibaba.druid.sql.dialect.mysql.ast.expr.*;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlCreateTableStatement;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.util.FnvHash;
import com.alibaba.druid.util.HexBin;
import com.alibaba.druid.util.MySqlUtils;
import com.alibaba.druid.util.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import java.sql.Types;
import java.util.Arrays;
@ -53,6 +56,8 @@ public class MySqlExprParser extends SQLExprParser {
"COUNT",
"GROUP_CONCAT",
"LISTAGG",
"JSON_ARRAYAGG",
"JSON_OBJECTAGG",
"MAX",
"MIN",
"STD",
@ -60,10 +65,9 @@ public class MySqlExprParser extends SQLExprParser {
"STDDEV_POP",
"STDDEV_SAMP",
"SUM",
"VAR_POP",
"VAR_SAMP",
"VARIANCE",
"JSON_ARRAYAGG",
"JSON_OBJECTAGG",
};
AGGREGATE_FUNCTIONS_CODES = FnvHash.fnv1a_64_lower(strings, true);
@ -163,6 +167,750 @@ public class MySqlExprParser extends SQLExprParser {
this.lexer.nextToken();
}
@Override
protected SQLExpr primaryIdentifierRest(long hash_lower, String ident) {
SQLExpr sqlExpr = null;
if (hash_lower == FnvHash.Constants.VARCHAR && lexer.token() == Token.LITERAL_CHARS) {
MySqlCharExpr mysqlChar = new MySqlCharExpr(lexer.stringVal());
mysqlChar.setType("VARCHAR");
sqlExpr = mysqlChar;
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.CHAR && lexer.token() == Token.LITERAL_CHARS) {
MySqlCharExpr mysqlChar = new MySqlCharExpr(lexer.stringVal());
mysqlChar.setType("CHAR");
sqlExpr = mysqlChar;
lexer.nextToken();
} else if (ident.startsWith("0x") && (ident.length() % 2) == 0) {
sqlExpr = new SQLHexExpr(ident.substring(2));
} else if (hash_lower == FnvHash.Constants.JSON
&& lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLJSONExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.DECIMAL
&& lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLDecimalExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.DOUBLE
&& lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLDoubleExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.FLOAT
&& lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLFloatExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.SMALLINT
&& lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLSmallIntExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.TINYINT && lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLTinyIntExpr(lexer.stringVal());
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.BIGINT && lexer.token() == Token.LITERAL_CHARS) {
String strVal = lexer.stringVal();
if (strVal.startsWith("--")) {
strVal = strVal.substring(2);
}
sqlExpr = new SQLBigIntExpr(strVal);
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.INTEGER && lexer.token() == Token.LITERAL_CHARS) {
String strVal = lexer.stringVal();
if (strVal.startsWith("--")) {
strVal = strVal.substring(2);
}
SQLIntegerExpr integerExpr = SQLIntegerExpr.ofIntOrLong(Long.parseLong(strVal));
integerExpr.setType("INTEGER");
sqlExpr = integerExpr;
lexer.nextToken();
} else if (hash_lower == FnvHash.Constants.REAL && lexer.token() == Token.LITERAL_CHARS) {
sqlExpr = new SQLRealExpr(lexer.stringVal());
lexer.nextToken();
}
return sqlExpr;
}
@Override
protected SQLExpr primaryLiteralCharsRest(SQLExpr sqlExpr) {
lexer.nextTokenValue();
for (; ; ) {
if (lexer.token() == Token.LITERAL_ALIAS) {
String concat = ((SQLCharExpr) sqlExpr).getText();
concat += lexer.stringVal();
lexer.nextTokenValue();
sqlExpr = new SQLCharExpr(concat);
} else if (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_NCHARS) {
String concat = ((SQLCharExpr) sqlExpr).getText();
concat += lexer.stringVal();
lexer.nextTokenValue();
sqlExpr = new SQLCharExpr(concat);
} else {
break;
}
}
return sqlExpr;
}
@Override
protected SQLExpr primaryLiteralNCharsRest(SQLExpr sqlExpr) {
SQLMethodInvokeExpr concat = null;
for (; ; ) {
if (lexer.token() == Token.LITERAL_ALIAS) {
if (concat == null) {
concat = new SQLMethodInvokeExpr("CONCAT");
concat.addArgument(sqlExpr);
sqlExpr = concat;
}
String alias = lexer.stringVal();
lexer.nextToken();
SQLCharExpr concat_right = new SQLCharExpr(alias.substring(1, alias.length() - 1));
concat.addArgument(concat_right);
} else if (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_NCHARS) {
if (concat == null) {
concat = new SQLMethodInvokeExpr("CONCAT");
concat.addArgument(sqlExpr);
sqlExpr = concat;
}
String chars = lexer.stringVal();
lexer.nextToken();
SQLCharExpr concat_right = new SQLCharExpr(chars);
concat.addArgument(concat_right);
} else {
break;
}
}
return sqlExpr;
}
@Override
protected SQLExpr bitXorRestSUBGT() {
if (lexer.token() == Token.LITERAL_CHARS || lexer.token() == Token.LITERAL_ALIAS) {
return primary();
} else {
return expr();
}
}
@Override
protected SQLExpr primarySubLiteralAliasRest() {
return new SQLCharExpr(lexer.stringVal());
}
@Override
protected void primaryQues() {
lexer.nextTokenValue();
}
@Override
protected SQLExpr primaryDistinct(SQLExpr sqlExpr) {
Lexer.SavePoint mark = lexer.mark();
sqlExpr = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
if (lexer.token() != Token.LPAREN) {
lexer.reset(mark);
throw new ParserException("ERROR. " + lexer.info());
}
return sqlExpr;
}
@Override
protected SQLExpr methodRestAllowIdentifierMethodSpecific(String methodName, long hash_lower, SQLMethodInvokeExpr methodInvokeExpr) {
if (hash_lower == FnvHash.Constants.MATCH) {
return parseMatch();
} else if (hash_lower == FnvHash.Constants.EXTRACT) {
return parseExtract();
} else if (hash_lower == FnvHash.Constants.POSITION) {
return parsePosition();
} else if (hash_lower == FnvHash.Constants.CONVERT) {
methodInvokeExpr = new SQLMethodInvokeExpr(methodName, hash_lower);
SQLExpr arg0 = this.expr();
// Fix for using.
Object exprUsing = arg0.getAttributes().get("USING");
if (exprUsing instanceof String) {
String charset = (String) exprUsing;
methodInvokeExpr.setUsing(new SQLIdentifierExpr(charset));
arg0.getAttributes().remove("USING");
}
methodInvokeExpr.addArgument(arg0);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
SQLDataType dataType = this.parseDataType();
SQLDataTypeRefExpr dataTypeRefExpr = new SQLDataTypeRefExpr(dataType);
methodInvokeExpr.addArgument(dataTypeRefExpr);
}
if (lexer.token() == Token.USING || lexer.identifierEquals(FnvHash.Constants.USING)) {
lexer.nextToken();
SQLExpr using;
if (lexer.token() == Token.STAR) {
lexer.nextToken();
using = new SQLAllColumnExpr();
} else if (lexer.token() == Token.BINARY) {
using = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
} else {
using = this.primary();
}
methodInvokeExpr.setUsing(using);
}
accept(Token.RPAREN);
return primaryRest(methodInvokeExpr);
}
return null;
}
@Override
protected void exprListComma() {
lexer.nextTokenValue();
}
@Override
protected SQLBinaryOperator orRestGetOrOperator() {
return !isEnabled(SQLParserFeature.PipesAsConcat) ? SQLBinaryOperator.BooleanOr : SQLBinaryOperator.Concat;
}
protected void parseDataTypeByte(StringBuilder typeName) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
}
@Override
protected void parseDataTypePrecision(StringBuilder typeName) {
if (lexer.identifierEquals(FnvHash.Constants.PRECISION)) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
}
}
@Override
protected SQLExpr parseColumnRestDefault() {
SQLExpr defaultExpr;
if (lexer.token() == Token.LITERAL_CHARS) {
defaultExpr = new SQLCharExpr(lexer.stringVal());
lexer.nextToken();
} else {
defaultExpr = bitOr();
}
return defaultExpr;
}
@Override
protected void parseIndexSpecific(SQLIndexDefinition indexDefinition) {
if (lexer.identifierEquals(FnvHash.Constants.USING)) {
lexer.nextToken();
indexDefinition.getOptions().setIndexType(lexer.stringVal());
lexer.nextToken();
} else if (lexer.identifierEquals("HASHMAP")) {
lexer.nextToken();
indexDefinition.setHashMapType(true);
indexDefinition.getParent().putAttribute("ads.index", Boolean.TRUE);
} else if (lexer.identifierEquals(FnvHash.Constants.HASH)) {
lexer.nextToken();
indexDefinition.setHashType(true);
indexDefinition.getParent().putAttribute("ads.index", Boolean.TRUE);
} else {
indexDefinition.setName(name());
}
}
@Override
protected void parseIndexOptions(SQLIndexDefinition indexDefinition) {
_opts:
while (true) {
if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
indexDefinition.getOptions().setComment(primary());
} else if (lexer.identifierEquals("INVISIBLE")) {
lexer.nextToken();
indexDefinition.getOptions().setInvisible(true);
} else if (lexer.identifierEquals("VISIBLE")) {
lexer.nextToken();
indexDefinition.getOptions().setVisible(true);
} else if (lexer.identifierEquals("GLOBAL")) {
lexer.nextToken();
indexDefinition.getOptions().setGlobal(true);
} else if (lexer.identifierEquals("LOCAL")) {
lexer.nextToken();
indexDefinition.getOptions().setLocal(true);
} else if (lexer.token() == Token.HINT && lexer.stringVal().trim().equals("!80000 INVISIBLE")) {
lexer.nextToken();
indexDefinition.getOptions().setInvisible(true);
} else {
switch (lexer.token()) {
case WITH:
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (lexer.identifierEquals("PARSER")) {
lexer.nextToken();
indexDefinition.getOptions().setParserName(lexer.stringVal());
lexer.nextToken();
break;
}
lexer.reset(mark);
for (; ; ) {
if (lexer.token() == Token.WITH) {
lexer.nextToken();
// Part from original MySqlCreateTableParser.
if (lexer.token() == Token.INDEX) {
lexer.nextToken();
acceptIdentifier("ANALYZER");
indexDefinition.setIndexAnalyzerName(name());
continue;
} else if (lexer.identifierEquals(FnvHash.Constants.QUERY)) {
lexer.nextToken();
acceptIdentifier("ANALYZER");
indexDefinition.setQueryAnalyzerName(name());
continue;
} else if (lexer.identifierEquals(FnvHash.Constants.ANALYZER)) {
lexer.nextToken();
SQLName name = name();
indexDefinition.setAnalyzerName(name);
break;
} else if (lexer.identifierEquals("DICT")) {
lexer.nextToken();
indexDefinition.setWithDicName(name());
continue;
}
}
break;
}
break;
case LOCK:
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
indexDefinition.getOptions().setLock(lexer.stringVal());
lexer.nextToken();
break;
case IDENTIFIER:
if (lexer.identifierEquals(FnvHash.Constants.KEY_BLOCK_SIZE)
|| lexer.identifierEquals(FnvHash.Constants.BLOCK_SIZE)) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
indexDefinition.getOptions().setKeyBlockSize(expr());
} else if (lexer.identifierEquals(FnvHash.Constants.USING)) {
lexer.nextToken();
indexDefinition.getOptions().setIndexType(lexer.stringVal());
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.ALGORITHM)) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
indexDefinition.getOptions().setAlgorithm(lexer.stringVal());
lexer.nextToken();
} else if (lexer.identifierEquals(FnvHash.Constants.DISTANCEMEASURE)) {
// Caution: Not in MySql documents.
SQLExpr key = new SQLIdentifierExpr(lexer.stringVal());
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
SQLAssignItem item = new SQLAssignItem(key, primary());
if (indexDefinition.getParent() != null) {
item.setParent(indexDefinition.getParent());
} else {
item.setParent(indexDefinition);
}
// Add both with same object.
indexDefinition.getOptions().getOtherOptions().add(item);
indexDefinition.getCompatibleOptions().add(item);
} else if (lexer.identifierEquals(FnvHash.Constants.DBPARTITION)) {
lexer.nextToken();
accept(Token.BY);
indexDefinition.setDbPartitionBy(primary());
} else if (lexer.identifierEquals(FnvHash.Constants.TBPARTITION)) {
lexer.nextToken();
accept(Token.BY);
SQLExpr expr = expr();
if (lexer.identifierEquals(FnvHash.Constants.STARTWITH)) {
lexer.nextToken();
SQLExpr start = primary();
acceptIdentifier("ENDWITH");
SQLExpr end = primary();
expr = new SQLBetweenExpr(expr, start, end);
}
indexDefinition.setTbPartitionBy(expr);
} else if (lexer.identifierEquals(FnvHash.Constants.TBPARTITIONS)) {
lexer.nextToken();
indexDefinition.setTbPartitions(primary());
//} else if (lexer.identifierEquals(FnvHash.Constants.GLOBAL)) {
// lexer.nextToken();
// indexDefinition.setGlobal(true);
} else {
break _opts;
}
break;
case PARTITION:
SQLPartitionBy partitionBy = new MySqlCreateTableParser(this).parsePartitionBy();
indexDefinition.setPartitioning(partitionBy);
break;
default:
break _opts;
}
}
}
}
@Override
protected SQLExpr parseSelectItemRest(String ident, long hash_lower) {
SQLExpr expr = null;
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)
&& lexer.stringVal().charAt(0) != '`'
) {
lexer.nextToken();
String collate = lexer.stringVal();
lexer.nextToken();
SQLBinaryOpExpr binaryExpr = new SQLBinaryOpExpr(
new SQLIdentifierExpr(ident),
SQLBinaryOperator.COLLATE,
new SQLIdentifierExpr(collate), dbType
);
expr = binaryExpr;
} else if (lexer.identifierEquals(FnvHash.Constants.REGEXP)
&& lexer.stringVal().charAt(0) != '`') {
lexer.nextToken();
SQLExpr rightExp = bitOr();
SQLBinaryOpExpr binaryExpr = new SQLBinaryOpExpr(
new SQLIdentifierExpr(ident),
SQLBinaryOperator.RegExp,
rightExp, dbType
);
expr = binaryExpr;
expr = relationalRest(expr);
} else if (FnvHash.Constants.TIMESTAMP == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token() == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLTimestampExpr ts = new SQLTimestampExpr(literal);
expr = ts;
if (lexer.identifierEquals(FnvHash.Constants.AT)) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
String timeZone = null;
if (lexer.identifierEquals(FnvHash.Constants.TIME)) {
lexer.nextToken();
if (lexer.identifierEquals(FnvHash.Constants.ZONE)) {
lexer.nextToken();
timeZone = lexer.stringVal();
lexer.nextToken();
}
}
if (timeZone == null) {
lexer.reset(mark);
} else {
ts.setTimeZone(timeZone);
}
}
} else if (FnvHash.Constants.DATETIME == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token() == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLDateTimeExpr ts = new SQLDateTimeExpr(literal);
expr = ts;
} else if (FnvHash.Constants.BOOLEAN == hash_lower
&& lexer.stringVal().charAt(0) != '`'
&& lexer.token() == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLBooleanExpr ts = new SQLBooleanExpr(Boolean.valueOf(literal));
expr = ts;
} else if ((FnvHash.Constants.CHAR == hash_lower || FnvHash.Constants.VARCHAR == hash_lower)
&& lexer.token() == Token.LITERAL_CHARS) {
String literal = lexer.stringVal();
lexer.nextToken();
SQLCharExpr charExpr = new SQLCharExpr(literal);
expr = charExpr;
} else if (FnvHash.Constants.CURRENT_DATE == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURRENT_DATE);
} else if (FnvHash.Constants.CURRENT_TIMESTAMP == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURRENT_TIMESTAMP);
} else if (FnvHash.Constants.CURRENT_TIME == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURRENT_TIME);
} else if (FnvHash.Constants.CURDATE == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.CURDATE);
} else if (FnvHash.Constants.LOCALTIME == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.LOCALTIME);
} else if (FnvHash.Constants.LOCALTIMESTAMP == hash_lower
&& ident.charAt(0) != '`'
&& lexer.token() != Token.LPAREN) {
expr = new SQLCurrentTimeExpr(SQLCurrentTimeExpr.Type.LOCALTIMESTAMP);
} else if ((FnvHash.Constants._LATIN1 == hash_lower)
&& ident.charAt(0) != '`'
) {
String hexString;
if (lexer.token() == Token.LITERAL_HEX) {
hexString = lexer.hexString();
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
hexString = null;
} else {
acceptIdentifier("X");
hexString = lexer.stringVal();
accept(Token.LITERAL_CHARS);
}
if (hexString == null) {
String str = lexer.stringVal();
lexer.nextToken();
String collate = null;
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)) {
lexer.nextToken();
collate = lexer.stringVal();
if (lexer.token() == Token.LITERAL_CHARS) {
lexer.nextToken();
} else {
accept(Token.IDENTIFIER);
}
}
expr = new MySqlCharExpr(str, "_latin1", collate);
} else {
expr = new MySqlCharExpr(hexString, "_latin1");
}
} else if ((FnvHash.Constants._UTF8 == hash_lower || FnvHash.Constants._UTF8MB4 == hash_lower)
&& ident.charAt(0) != '`'
) {
String hexString;
if (lexer.token() == Token.LITERAL_HEX) {
hexString = lexer.hexString();
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
hexString = null;
} else {
acceptIdentifier("X");
hexString = lexer.stringVal();
accept(Token.LITERAL_CHARS);
}
if (hexString == null) {
String str = lexer.stringVal();
lexer.nextToken();
String collate = null;
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)) {
lexer.nextToken();
collate = lexer.stringVal();
if (lexer.token() == Token.LITERAL_CHARS) {
lexer.nextToken();
} else {
accept(Token.IDENTIFIER);
}
}
expr = new MySqlCharExpr(str, "_utf8", collate);
} else {
expr = new SQLCharExpr(
MySqlUtils.utf8(hexString)
);
}
} else if ((FnvHash.Constants._UTF16 == hash_lower || FnvHash.Constants._UCS2 == hash_lower)
&& ident.charAt(0) != '`'
) {
String hexString;
if (lexer.token() == Token.LITERAL_HEX) {
hexString = lexer.hexString();
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
hexString = null;
} else {
acceptIdentifier("X");
hexString = lexer.stringVal();
accept(Token.LITERAL_CHARS);
}
if (hexString == null) {
String str = lexer.stringVal();
hexString = HexBin.encode(str.getBytes(MySqlUtils.ASCII));
lexer.nextToken();
}
expr = new MySqlCharExpr(hexString, "_utf16");
} else if (FnvHash.Constants._UTF32 == hash_lower
&& ident.charAt(0) != '`'
) {
String hexString;
if (lexer.token() == Token.LITERAL_HEX) {
hexString = lexer.hexString();
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
hexString = null;
} else {
acceptIdentifier("X");
hexString = lexer.stringVal();
accept(Token.LITERAL_CHARS);
}
if (hexString == null) {
String str = lexer.stringVal();
lexer.nextToken();
expr = new MySqlCharExpr(str, "_utf32");
} else {
expr = new SQLCharExpr(
MySqlUtils.utf32(hexString)
);
}
} else if (FnvHash.Constants._GBK == hash_lower
&& ident.charAt(0) != '`'
) {
String hexString;
if (lexer.token() == Token.LITERAL_HEX) {
hexString = lexer.hexString();
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
hexString = null;
} else {
acceptIdentifier("X");
hexString = lexer.stringVal();
accept(Token.LITERAL_CHARS);
}
if (hexString == null) {
String str = lexer.stringVal();
lexer.nextToken();
expr = new MySqlCharExpr(str, "_gbk");
} else {
expr = new SQLCharExpr(
MySqlUtils.gbk(hexString)
);
}
} else if (FnvHash.Constants._BIG5 == hash_lower
&& ident.charAt(0) != '`'
) {
String hexString;
if (lexer.token() == Token.LITERAL_HEX) {
hexString = lexer.hexString();
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
hexString = null;
} else {
acceptIdentifier("X");
hexString = lexer.stringVal();
accept(Token.LITERAL_CHARS);
}
if (hexString == null) {
String str = lexer.stringVal();
lexer.nextToken();
expr = new MySqlCharExpr(str, "_big5");
} else {
expr = new SQLCharExpr(
MySqlUtils.big5(hexString)
);
}
}
return expr;
}
protected SQLExpr parseSelectItemMethod(SQLExpr expr) {
lexer.nextTokenValue();
return this.methodRest(expr, false);
}
protected Pair<String, SQLExpr> parseSelectItemIdentifier(SQLExpr expr) {
String as = null;
if (lexer.hashLCase() == FnvHash.Constants.FORCE) {
String force = lexer.stringVal();
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.PARTITION) {
lexer.reset(savePoint);
as = null;
} else {
as = force;
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && as.length() > 1) {
as = StringUtils.removeNameQuotes(as);
}
lexer.nextTokenComma();
}
} else if (lexer.hashLCase() == FnvHash.Constants.SOUNDS) {
String sounds = lexer.stringVal();
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.LIKE) {
lexer.reset(savePoint);
expr = exprRest(expr);
as = as();
} else {
as = sounds;
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && as.length() > 1) {
as = StringUtils.removeNameQuotes(as);
}
lexer.nextTokenComma();
}
} else if (lexer.hashLCase() == FnvHash.Constants.COLLATE
&& lexer.stringVal().charAt(0) != '`') {
expr = primaryRest(expr);
as = as();
} else if (lexer.hashLCase() == FnvHash.Constants.REGEXP
&& lexer.stringVal().charAt(0) != '`') {
expr = exprRest(expr);
as = as();
} else {
as = lexer.stringVal();
if (isEnabled(SQLParserFeature.IgnoreNameQuotes) && as.length() > 1) {
as = StringUtils.removeNameQuotes(as);
}
lexer.nextTokenComma();
}
return Pair.of(as, expr);
}
@Override
protected String parseSelectItemAlias(String alias) {
boolean specialChar = false;
for (int i = 0; i < alias.length(); ++i) {
char ch = alias.charAt(i);
if (ch == '`') {
specialChar = true;
break;
}
}
if (specialChar) {
alias = alias.replaceAll("`", "``");
alias = '`' + alias + '`';
}
return alias;
}
public SQLExpr primary() {
final Token tok = lexer.token();
switch (tok) {

View File

@ -22,15 +22,16 @@ import java.util.HashMap;
import java.util.Map;
import static com.alibaba.druid.sql.parser.CharTypes.isFirstIdentifierChar;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.*;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class MySqlLexer extends Lexer {
public static SymbolTable quoteTable = new SymbolTable(8192);
public static final Keywords DEFAULT_MYSQL_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -66,16 +67,12 @@ public class MySqlLexer extends Lexer {
map.put("RLIKE", Token.RLIKE);
map.put("FULLTEXT", Token.FULLTEXT);
DEFAULT_MYSQL_KEYWORDS = new Keywords(map);
}
{
dbType = DbType.mysql;
return new Keywords(map);
}
public MySqlLexer(char[] input, int inputLength, boolean skipComment) {
super(input, inputLength, skipComment);
super.keywords = DEFAULT_MYSQL_KEYWORDS;
this.dbType = DbType.mysql;
}
public MySqlLexer(String input) {
@ -85,7 +82,7 @@ public class MySqlLexer extends Lexer {
public MySqlLexer(String input, SQLParserFeature... features) {
super(input, true);
this.keepComments = true;
super.keywords = DEFAULT_MYSQL_KEYWORDS;
this.dbType = DbType.mysql;
for (SQLParserFeature feature : features) {
config(feature, true);
@ -96,7 +93,7 @@ public class MySqlLexer extends Lexer {
super(input, skipComment);
this.skipComment = skipComment;
this.keepComments = keepComments;
super.keywords = DEFAULT_MYSQL_KEYWORDS;
this.dbType = DbType.mysql;
}
public void scanSharp() {
@ -459,8 +456,161 @@ public class MySqlLexer extends Lexer {
}
}
@Override
protected final void scanString() {
scanString2();
{
boolean hasSpecial = false;
int startIndex = pos + 1;
int endIndex = -1; // text.indexOf('\'', startIndex);
for (int i = startIndex; i < text.length(); ++i) {
final char ch = text.charAt(i);
if (ch == '\\') {
hasSpecial = true;
continue;
}
if (ch == '\'') {
endIndex = i;
break;
}
}
if (endIndex == -1) {
throw new ParserException("unclosed str. " + info());
}
String stringVal;
if (token == Token.AS) {
stringVal = text.substring(pos, endIndex + 1);
} else {
if (startIndex == endIndex) {
stringVal = "";
} else {
stringVal = text.substring(startIndex, endIndex);
}
}
// hasSpecial = stringVal.indexOf('\\') != -1;
if (!hasSpecial) {
this.stringVal = stringVal;
int pos = endIndex + 1;
char ch = charAt(pos);
if (ch != '\'') {
this.pos = pos;
this.ch = ch;
token = LITERAL_CHARS;
return;
}
}
}
mark = pos;
boolean hasSpecial = false;
for (; ; ) {
if (isEOF()) {
lexError("unclosed.str.lit");
return;
}
ch = charAt(++pos);
if (ch == '\\') {
scanChar();
if (!hasSpecial) {
initBuff(bufPos);
arraycopy(mark + 1, buf, 0, bufPos);
hasSpecial = true;
}
switch (ch) {
case '0':
putChar('\0');
break;
case '\'':
putChar('\'');
break;
case '"':
putChar('"');
break;
case 'b':
putChar('\b');
break;
case 'n':
putChar('\n');
break;
case 'r':
putChar('\r');
break;
case 't':
putChar('\t');
break;
case '\\':
putChar('\\');
break;
case 'Z':
putChar((char) 0x1A); // ctrl + Z
break;
case '%':
putChar('\\');
putChar('%');
break;
case '_':
putChar('\\');
putChar('_');
break;
case 'u':
if ((features & SQLParserFeature.SupportUnicodeCodePoint.mask) != 0) {
char c1 = charAt(++pos);
char c2 = charAt(++pos);
char c3 = charAt(++pos);
char c4 = charAt(++pos);
int intVal = Integer.parseInt(new String(new char[]{c1, c2, c3, c4}), 16);
putChar((char) intVal);
} else {
putChar(ch);
}
break;
default:
putChar(ch);
break;
}
continue;
}
if (ch == '\'') {
scanChar();
if (ch != '\'') {
token = LITERAL_CHARS;
break;
} else {
if (!hasSpecial) {
initBuff(bufPos);
arraycopy(mark + 1, buf, 0, bufPos);
hasSpecial = true;
}
putChar('\'');
continue;
}
}
if (!hasSpecial) {
bufPos++;
continue;
}
if (bufPos == buf.length) {
putChar(ch);
} else {
buf[bufPos++] = ch;
}
}
if (!hasSpecial) {
stringVal = subString(mark + 1, bufPos);
} else {
stringVal = new String(buf, 0, bufPos);
}
}
public void skipFirstHintsOrMultiCommentAndNextToken() {
@ -801,4 +951,29 @@ public class MySqlLexer extends Lexer {
}
return isIdentifierChar(c);
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
NextTokenPrefixN,
ScanString2PutDoubleBackslash,
JoinRightTableWith,
PostNaturalJoin,
MultipleJoinOn,
GroupByPostDesc,
GroupByItemOrder,
SQLDateExpr,
PrimaryLbraceOdbcEscape,
ParseSelectItemPrefixX,
ParseStatementListUpdatePlanCache,
ParseStatementListRollbackReturn,
ParseStatementListCommitReturn,
ParseDropTableTables,
AsSequence
);
this.dialectFeature.unconfigFeature(
AdditiveRestPipesAsConcat
);
}
}

View File

@ -68,7 +68,7 @@ public class MySqlSelectParser extends SQLSelectParser {
lexer.nextTokenIdent();
if (lexer.hasComment()) {
queryBlock.setCommentsAfaterFrom(lexer.readAndResetComments());
queryBlock.setCommentsAfterFrom(lexer.readAndResetComments());
}
while (lexer.token() == Token.HINT) {
lexer.nextToken();

View File

@ -31,7 +31,6 @@ import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.sql.repository.SchemaObject;
import com.alibaba.druid.sql.visitor.SQLASTOutputVisitor;
import com.alibaba.druid.util.FnvHash;
import com.alibaba.druid.util.JdbcUtils;
import com.alibaba.druid.util.StringUtils;
import java.util.ArrayList;
@ -5079,12 +5078,10 @@ public class MySqlStatementParser extends SQLStatementParser {
} else {
SQLSetStatement stmt = new SQLSetStatement(getDbType());
boolean mariadbSetStatementFlag = false;
if (JdbcUtils.isMysqlDbType(getDbType())) {
if (lexer.identifierEquals("STATEMENT")) {
mariadbSetStatementFlag = true;
lexer.nextToken();
}
}
parseAssignItems(stmt.getItems(), stmt, true);
if (mariadbSetStatementFlag) {
accept(Token.FOR);
@ -9336,4 +9333,198 @@ public class MySqlStatementParser extends SQLStatementParser {
}
return stmt;
}
@Override
protected void parseCreateMaterializedViewRest(SQLCreateMaterializedViewStatement stmt) {
stmt.setDbType(dbType);
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
for (; ; ) {
Token token = lexer.token();
if (lexer.identifierEquals(FnvHash.Constants.CLUSTERED)) {
lexer.nextToken();
if (lexer.token() == Token.KEY) {
MySqlKey clsKey = new MySqlKey();
this.exprParser.parseIndex(clsKey.getIndexDefinition());
clsKey.setIndexType("CLUSTERED");
clsKey.setParent(stmt);
stmt.getTableElementList().add(clsKey);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
} else if (lexer.token() == Token.INDEX) {
MySqlTableIndex idx = new MySqlTableIndex();
this.exprParser.parseIndex(idx.getIndexDefinition());
idx.setIndexType("CLUSTERED");
idx.setParent(stmt);
stmt.getTableElementList().add(idx);
if (lexer.token() == Token.RPAREN) {
break;
} else if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
}
}
if (token == Token.IDENTIFIER) {
SQLColumnDefinition column = this.exprParser.parseColumn(stmt);
stmt.getTableElementList().add((SQLTableElement) column);
} else if (token == Token.PRIMARY //
|| token == Token.UNIQUE //
|| token == Token.CHECK //
|| token == Token.CONSTRAINT
|| token == Token.FOREIGN) {
SQLConstraint constraint = this.exprParser.parseConstaint();
constraint.setParent(stmt);
stmt.getTableElementList().add((SQLTableElement) constraint);
} else if (lexer.token() == (Token.INDEX)) {
MySqlTableIndex idx = new MySqlTableIndex();
this.exprParser.parseIndex(idx.getIndexDefinition());
idx.setParent(stmt);
stmt.getTableElementList().add(idx);
} else if (lexer.token() == (Token.KEY)) {
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
boolean isColumn = false;
if (lexer.identifierEquals(FnvHash.Constants.VARCHAR)) {
isColumn = true;
}
lexer.reset(savePoint);
if (isColumn) {
stmt.getTableElementList().add(this.exprParser.parseColumn());
} else {
SQLName name = null;
if (lexer.token() == Token.IDENTIFIER) {
name = this.exprParser.name();
}
MySqlKey key = new MySqlKey();
this.exprParser.parseIndex(key.getIndexDefinition());
if (name != null) {
key.setName(name);
}
key.setParent(stmt);
stmt.getTableElementList().add(key);
}
continue;
}
if (lexer.token() == COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
}
for (; ; ) {
if (lexer.identifierEquals(FnvHash.Constants.DISTRIBUTED)) {
lexer.nextToken();
accept(Token.BY);
if (lexer.identifierEquals(FnvHash.Constants.HASH)) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
SQLName name = this.exprParser.name();
stmt.getDistributedBy().add(name);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
stmt.setDistributedByType(new SQLIdentifierExpr("HASH"));
} else if (lexer.identifierEquals(FnvHash.Constants.DUPLICATE)) {
lexer.nextToken();
accept(Token.LPAREN);
for (; ; ) {
SQLName name = this.exprParser.name();
stmt.getDistributedBy().add(name);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
stmt.setDistributedByType(new SQLIdentifierExpr("DUPLICATE"));
} else if (lexer.identifierEquals(FnvHash.Constants.BROADCAST)) {
lexer.nextToken();
stmt.setDistributedByType(new SQLIdentifierExpr("BROADCAST"));
}
continue;
} else if (lexer.identifierEquals("INDEX_ALL")) {
lexer.nextToken();
accept(Token.EQ);
if (lexer.token() == Token.LITERAL_CHARS) {
if ("Y".equalsIgnoreCase(lexer.stringVal())) {
lexer.nextToken();
stmt.addOption("INDEX_ALL", new SQLCharExpr("Y"));
} else if ("N".equalsIgnoreCase(lexer.stringVal())) {
lexer.nextToken();
stmt.addOption("INDEX_ALL", new SQLCharExpr("N"));
} else {
throw new ParserException("INDEX_ALL accept parameter ['Y' or 'N'] only.");
}
}
continue;
} else if (lexer.identifierEquals(FnvHash.Constants.ENGINE)) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
SQLExpr expr = this.exprParser.expr();
stmt.addOption("ENGINE", expr);
continue;
} else if (lexer.token() == Token.PARTITION) {
SQLPartitionBy partitionBy = this.exprParser.parsePartitionBy();
stmt.setPartitionBy(partitionBy);
continue;
} else if (lexer.token() == Token.COMMENT) {
lexer.nextToken();
if (lexer.token() == Token.EQ) {
lexer.nextToken();
}
stmt.setComment(this.exprParser.expr());
continue;
}
break;
}
}
@Override
public void parseExplainFormatPartition(SQLExplainStatement explain) {
if (lexer.identifierEquals("FORMAT")
|| lexer.identifierEquals("PARTITIONS")) {
explain.setType(lexer.stringVal());
lexer.nextToken();
}
}
@Override
public void parseExplainFormatType(SQLExplainStatement explain) {
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
if (lexer.identifierEquals("FORMAT")) {
lexer.nextToken();
lexer.nextToken();
} else if (lexer.identifierEquals("TYPE")) {
lexer.nextToken();
lexer.nextToken();
}
accept(Token.RPAREN);
}
}
}

View File

@ -249,8 +249,8 @@ public class MySqlOutputVisitor extends SQLASTOutputVisitor implements MySqlASTV
}
println();
print0(ucase ? "FROM " : "from ");
if (x.getCommentsAfaterFrom() != null) {
printAfterComments(x.getCommentsAfaterFrom());
if (x.getCommentsAfterFrom() != null) {
printAfterComments(x.getCommentsAfterFrom());
println();
}
printTableSource(from);

View File

@ -54,7 +54,7 @@ public class OdpsCreateTableParser extends SQLCreateTableParser {
accept(Token.NOT);
accept(Token.EXISTS);
stmt.setIfNotExiists(true);
stmt.setIfNotExists(true);
}
stmt.setName(this.exprParser.name());

View File

@ -19,10 +19,13 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLObject;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.SQLAssignItem;
import com.alibaba.druid.sql.ast.statement.SQLColumnDefinition;
import com.alibaba.druid.sql.ast.statement.SQLExternalRecordFormat;
import com.alibaba.druid.sql.ast.statement.SQLSelectItem;
import com.alibaba.druid.sql.ast.statement.SQLSetStatement;
import com.alibaba.druid.sql.dialect.odps.ast.OdpsNewExpr;
import com.alibaba.druid.sql.dialect.odps.ast.OdpsTransformExpr;
import com.alibaba.druid.sql.dialect.odps.ast.OdpsUDTFSQLSelectItem;
@ -65,6 +68,350 @@ public class OdpsExprParser extends SQLExprParser {
}
}
@Override
protected SQLExpr primaryAs(SQLExpr sqlExpr) {
Lexer.SavePoint mark = lexer.mark();
String str = lexer.stringVal();
lexer.nextToken();
switch (lexer.token()) {
case COMMA:
case RPAREN:
case AS:
case EQ:
case EQEQ:
case LT:
case LTEQ:
case GT:
case GTEQ:
case LTGT:
case SEMI:
sqlExpr = new SQLIdentifierExpr(str);
break;
case DOT:
sqlExpr = primaryRest(
new SQLIdentifierExpr(str)
);
break;
default:
lexer.reset(mark);
break;
}
return sqlExpr;
}
@Override
protected SQLExpr primaryIn(SQLExpr sqlExpr) {
String str = lexer.stringVal();
lexer.nextToken();
switch (lexer.token()) {
case DOT:
case COMMA:
case LT:
case EQ:
case GT:
case RPAREN:
case IS:
case AS:
sqlExpr = new SQLIdentifierExpr(str);
break;
default:
break;
}
if (sqlExpr != null) {
return sqlExpr;
}
accept(Token.LPAREN);
SQLInListExpr in = new SQLInListExpr();
in.setExpr(
this.expr()
);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
this.exprList(in.getTargetList(), in);
}
accept(Token.RPAREN);
sqlExpr = in;
return sqlExpr;
}
@Override
protected SQLExpr primaryColonColon(SQLExpr sqlExpr) {
lexer.nextToken();
SQLExpr temp = this.primary();
if (temp instanceof SQLArrayExpr) {
sqlExpr = temp;
} else {
SQLMethodInvokeExpr method = (SQLMethodInvokeExpr) temp;
method.setOwner(new SQLIdentifierExpr(""));
sqlExpr = method;
}
return sqlExpr;
}
@Override
protected void methodRestUsing(SQLMethodInvokeExpr methodInvokeExpr) {
if (lexer.identifierEquals(FnvHash.Constants.USING)) {
lexer.nextToken();
SQLExpr using = this.primary();
methodInvokeExpr.setUsing(using);
}
}
protected String doRestSpecific(SQLExpr expr) {
String name = null;
if ((lexer.token() == Token.LITERAL_INT || lexer.token() == Token.LITERAL_FLOAT)) {
name = lexer.numberString();
lexer.nextToken();
} else if (lexer.token() == Token.DOT && expr.toString().equals("odps.sql.mapper")) {
lexer.nextToken();
name = lexer.stringVal();
lexer.nextToken();
}
return name;
}
@Override
protected String nameCommon() {
String identName = lexer.stringVal();
lexer.nextToken();
return identName;
}
@Override
protected SQLExpr relationalRestBang(SQLExpr expr) {
lexer.nextToken();
return notRationalRest(expr, false);
}
@Override
protected void parseDataTypeComplex(StringBuilder typeName) {
if (lexer.token() == Token.LT && dbType == DbType.odps) {
lexer.nextToken();
typeName.append('<');
for (; ; ) {
SQLDataType itemType = this.parseDataType();
typeName.append(itemType.toString());
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
typeName.append(", ");
} else {
break;
}
}
accept(Token.GT);
typeName.append('>');
}
}
@Override
protected void parseColumnCommentLiteralCharsRest(StringBuilder stringVal) {
for (; ; ) {
if (lexer.token() == Token.LITERAL_ALIAS) {
String tmp = lexer.stringVal();
if (tmp.length() > 2 && tmp.charAt(0) == '"' && tmp.charAt(tmp.length() - 1) == '"') {
tmp = tmp.substring(1, tmp.length() - 1);
}
stringVal.append(tmp);
lexer.nextToken();
} else if (lexer.token() == Token.LITERAL_CHARS) {
stringVal.append(lexer.stringVal());
lexer.nextToken();
} else {
break;
}
}
}
@Override
protected void parseAssignItemDot() {
if (lexer.token() == Token.DOT) {
lexer.nextToken();
}
}
@Override
protected void parseAssignItemNcToBeExecuted() {
if (lexer.identifierEquals("NC_TO_BE_EXECUTED")) {
lexer.nextToken(); // skip
}
}
@Override
protected boolean parseAssignItemTblProperties(SQLAssignItem item) {
if (lexer.token() == Token.LPAREN) {
SQLListExpr list = new SQLListExpr();
this.exprList(list.getItems(), list);
item.setTarget(new SQLIdentifierExpr("tblproperties"));
item.setValue(list);
return true;
}
return false;
}
@Override
protected SQLExpr parseAssignItemSQLPropertyExprAndSub(SQLExpr sqlExpr) {
if (sqlExpr instanceof SQLPropertyExpr && lexer.token() == Token.SUB) {
SQLPropertyExpr propertyExpr = (SQLPropertyExpr) sqlExpr;
String name = propertyExpr.getName() + '-';
lexer.nextToken();
if (lexer.token() == Token.IDENTIFIER) {
name += lexer.stringVal();
lexer.nextToken();
}
propertyExpr.setName(name);
return this.primaryRest(propertyExpr);
}
return sqlExpr;
}
@Override
protected SQLExpr parseAssignItemSQLPropertyExpr(SQLExpr sqlExpr) {
if (sqlExpr instanceof SQLPropertyExpr) {
SQLPropertyExpr propertyExpr = (SQLPropertyExpr) sqlExpr;
if (identifierEquals("DATEADD")) {
String func = lexer.stringVal();
lexer.nextToken();
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
accept(Token.RPAREN);
func += "()";
}
String name = propertyExpr.getName() + func;
propertyExpr.setName(name);
} else if (propertyExpr.getName().equalsIgnoreCase("enab") && identifierEquals("le")) {
String name = propertyExpr.getName() + lexer.stringVal();
lexer.nextToken();
propertyExpr.setName(name);
} else if (propertyExpr.getName().equalsIgnoreCase("sq") && identifierEquals("l")) {
String name = propertyExpr.getName() + lexer.stringVal();
lexer.nextToken();
propertyExpr.setName(name);
} else if (propertyExpr.getName().equalsIgnoreCase("s") && identifierEquals("ql")) {
String name = propertyExpr.getName() + lexer.stringVal();
lexer.nextToken();
propertyExpr.setName(name);
sqlExpr = this.primaryRest(propertyExpr);
} else if (lexer.token() == Token.BY) {
String name = propertyExpr.getName() + ' ' + lexer.stringVal();
lexer.nextToken();
propertyExpr.setName(name);
sqlExpr = this.primaryRest(propertyExpr);
}
}
return sqlExpr;
}
@Override
protected boolean parseAssignItemSQLMethodInvokeExpr(SQLExpr sqlExpr, SQLAssignItem item) {
if (sqlExpr instanceof SQLMethodInvokeExpr) {
SQLMethodInvokeExpr func = (SQLMethodInvokeExpr) sqlExpr;
SQLExpr owner = func.getOwner();
if (owner != null) {
item.setTarget(new SQLPropertyExpr(owner, func.getMethodName()));
} else {
item.setTarget(new SQLIdentifierExpr(func.getMethodName()));
}
SQLListExpr properties = new SQLListExpr();
for (SQLExpr argument : func.getArguments()) {
properties.addItem(argument);
}
item.setValue(properties);
return true;
}
return false;
}
@Override
protected void parseAssignItemEq(SQLObject parent) {
if (parent instanceof SQLSetStatement || parent == null) {
lexer.nextTokenForSet();
} else {
lexer.nextToken();
}
}
@Override
protected void parseAssignItemSQLIdentifierExprAndVariant(SQLIdentifierExpr ident) {
if (lexer.identifierEquals(FnvHash.Constants.CLUSTER)
&& ident.nameHashCode64() == FnvHash.Constants.RUNNING
) {
String str = ident.getName() + " " + lexer.stringVal();
lexer.nextToken();
ident.setName(str);
} else if (lexer.token() == Token.IDENTIFIER) {
ident.setName(ident.getName() + ' ' + lexer.stringVal());
lexer.nextToken();
while (lexer.token() == Token.IDENTIFIER) {
ident.setName(ident.getName() + ' ' + lexer.stringVal());
lexer.nextToken();
}
}
}
@Override
protected void parseAssignItemSQLIdentifierExpr(SQLExpr sqlExpr) {
if (sqlExpr instanceof SQLIdentifierExpr) {
SQLIdentifierExpr identExpr = (SQLIdentifierExpr) sqlExpr;
if ((identExpr.getName().equalsIgnoreCase("et")
|| identExpr.getName().equalsIgnoreCase("odps")
)
&& lexer.token() == Token.IDENTIFIER) {
SQLExpr expr = this.primary();
identExpr.setName(
identExpr.getName() + ' ' + expr.toString()
);
}
}
}
@Override
protected SQLExpr parseAssignItemOnLiteralFloat(SQLExpr sqlExpr) {
while (lexer.token() == Token.LITERAL_FLOAT && lexer.numberString().startsWith(".")) {
if (sqlExpr instanceof SQLNumberExpr) {
String numStr = ((SQLNumberExpr) sqlExpr).getLiteral();
numStr += lexer.numberString();
sqlExpr = new SQLIdentifierExpr(numStr);
lexer.nextToken();
} else if (sqlExpr instanceof SQLIdentifierExpr) {
String ident = ((SQLIdentifierExpr) sqlExpr).getName();
ident += lexer.numberString();
sqlExpr = new SQLIdentifierExpr(ident);
lexer.nextToken();
} else {
break;
}
}
return sqlExpr;
}
@Override
protected void parseAssignItemOnComma(SQLExpr sqlExpr, SQLAssignItem item, SQLObject parent) {
if (lexer.token() == Token.COMMA
&& parent instanceof SQLSetStatement) {
SQLListExpr listExpr = new SQLListExpr();
listExpr.addItem(sqlExpr);
sqlExpr.setParent(listExpr);
do {
lexer.nextToken();
if (lexer.token() == Token.SET && dbType == DbType.odps) {
break;
}
SQLExpr listItem = this.expr();
listItem.setParent(listExpr);
listExpr.addItem(listItem);
}
while (lexer.token() == Token.COMMA);
item.setValue(listExpr);
} else {
item.setValue(sqlExpr);
}
}
public OdpsExprParser(Lexer lexer) {
super(lexer, DbType.odps);
@ -77,11 +424,6 @@ public class OdpsExprParser extends SQLExprParser {
this.lexer.nextToken();
}
public OdpsExprParser(String sql, boolean skipComments, boolean keepComments) {
this(new OdpsLexer(sql, skipComments, keepComments));
this.lexer.nextToken();
}
protected SQLExpr parseAliasExpr(String alias) {
String chars = alias.substring(1, alias.length() - 1);
return new SQLCharExpr(chars);

View File

@ -16,18 +16,34 @@
package com.alibaba.druid.sql.dialect.odps.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.dialect.hive.parser.HiveLexer;
import com.alibaba.druid.sql.parser.*;
import java.util.HashMap;
import java.util.Map;
import static com.alibaba.druid.sql.parser.CharTypes.*;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.*;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
public class OdpsLexer extends Lexer {
public static final Keywords DEFAULT_ODPS_KEYWORDS;
public class OdpsLexer extends HiveLexer {
public OdpsLexer(String input, SQLParserFeature... features) {
super(input);
static {
init();
dbType = DbType.odps;
this.skipComment = true;
this.keepComments = false;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -48,42 +64,7 @@ public class OdpsLexer extends Lexer {
map.put("QUALIFY", Token.QUALIFY);
map.put("", Token.SEMI);
DEFAULT_ODPS_KEYWORDS = new Keywords(map);
}
public OdpsLexer(String input, SQLParserFeature... features) {
super(input);
init();
dbType = DbType.odps;
super.keywords = DEFAULT_ODPS_KEYWORDS;
this.skipComment = true;
this.keepComments = false;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
public OdpsLexer(String input, boolean skipComment, boolean keepComments) {
super(input, skipComment);
init();
dbType = DbType.odps;
this.skipComment = skipComment;
this.keepComments = keepComments;
super.keywords = DEFAULT_ODPS_KEYWORDS;
}
public OdpsLexer(String input, CommentHandler commentHandler) {
super(input, commentHandler);
init();
dbType = DbType.odps;
super.keywords = DEFAULT_ODPS_KEYWORDS;
return new Keywords(map);
}
private void init() {
@ -101,10 +82,6 @@ public class OdpsLexer extends Lexer {
}
}
public void scanComment() {
scanHiveComment();
}
public void scanIdentifier() {
hashLCase = 0;
hash = 0;
@ -262,7 +239,45 @@ public class OdpsLexer extends Lexer {
scanVariable();
}
protected final void scanString() {
scanString2();
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanSQLTypeBlockComment,
ScanSQLTypeWithSemi,
ScanSQLTypeWithFunction,
ScanSQLTypeWithBegin,
ScanSQLTypeWithAt,
ScanVariableAt,
ScanVariableMoveToSemi,
ScanVariableSkipIdentifiers,
ScanNumberCommonProcess,
ScanHiveCommentDoubleSpace,
QueryRestSemi,
JoinAt,
UDJ,
TwoConsecutiveUnion,
RewriteGroupByCubeRollupToFunction,
PrimaryTwoConsecutiveSet,
ParseAllIdentifier,
PrimaryRestCommaAfterLparen,
InRestSpecificOperation,
ParseAssignItemEqSemiReturn,
ParseAssignItemEqeq,
ParseStatementListLparenContinue,
ParseRevokeFromUser,
ParseCreateSql,
TableAliasConnectWhere,
TableAliasTable,
TableAliasBetween,
TableAliasRest,
AliasLiteralFloat
);
this.dialectFeature.unconfigFeature(
ParseStatementListSelectUnsupportedSyntax,
ScanNumberPrefixB,
ScanAliasU,
AcceptUnion
);
}
}

View File

@ -23,6 +23,7 @@ import com.alibaba.druid.sql.ast.expr.SQLQueryExpr;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.hive.ast.HiveInsert;
import com.alibaba.druid.sql.dialect.hive.ast.HiveMultiInsertStatement;
import com.alibaba.druid.sql.dialect.hive.parser.HiveStatementParser;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveCreateFunctionStatement;
import com.alibaba.druid.sql.dialect.hive.stmt.HiveLoadDataStatement;
import com.alibaba.druid.sql.dialect.mysql.ast.statement.MySqlKillStatement;
@ -30,22 +31,31 @@ import com.alibaba.druid.sql.dialect.odps.ast.*;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.util.FnvHash;
import java.util.ArrayList;
import java.util.List;
import static com.alibaba.druid.sql.parser.Token.COMMA;
import static com.alibaba.druid.sql.parser.Token.IDENTIFIER;
import static com.alibaba.druid.sql.parser.Token.LPAREN;
import static com.alibaba.druid.sql.parser.Token.ON;
import static com.alibaba.druid.sql.parser.Token.OVERWRITE;
import static com.alibaba.druid.sql.parser.Token.PARTITION;
import static com.alibaba.druid.sql.parser.Token.RPAREN;
public class OdpsStatementParser extends SQLStatementParser {
public class OdpsStatementParser extends HiveStatementParser {
public OdpsStatementParser(String sql) {
super(new OdpsExprParser(sql));
dbType = DbType.odps;
}
public OdpsStatementParser(String sql, SQLParserFeature... features) {
super(new OdpsExprParser(sql, features));
dbType = DbType.odps;
}
public OdpsStatementParser(SQLExprParser exprParser) {
super(exprParser);
dbType = DbType.odps;
}
public SQLSelectStatement parseSelect() {
@ -1620,4 +1630,112 @@ public class OdpsStatementParser extends SQLStatementParser {
stmt.setArguments(arguments);
return stmt;
}
@Override
protected boolean alterTableAfterNameRest(SQLAlterTableStatement stmt) {
if (lexer.identifierEquals("MERGE")) {
alterTableMerge(stmt);
} else if ((lexer.identifierEquals(FnvHash.Constants.RANGE)
|| lexer.identifierEquals(FnvHash.Constants.CLUSTERED))
) {
if (lexer.identifierEquals(FnvHash.Constants.RANGE)) {
lexer.nextToken();
acceptIdentifier("CLUSTERED");
stmt.setRange(true);
} else {
lexer.nextToken();
}
accept(Token.BY);
accept(Token.LPAREN);
for (; ; ) {
SQLSelectOrderByItem item = this.exprParser.parseSelectOrderByItem();
stmt.addClusteredByItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
continue;
}
break;
}
accept(Token.RPAREN);
} else if (lexer.identifierEquals(FnvHash.Constants.SORTED)) {
alterTableSorted(stmt);
} else if (dbType == DbType.odps && lexer.token() == Token.NOT) {
lexer.nextToken();
acceptIdentifier("CLUSTERED");
stmt.setNotClustered(true);
} else {
return true;
}
return super.alterTableAfterNameRest(stmt);
}
@Override
protected boolean alterTableSetRest(SQLAlterTableStatement stmt) {
if (lexer.identifierEquals("CHANGELOGS")) {
lexer.nextToken();
OdpsAlterTableSetChangeLogs item = new OdpsAlterTableSetChangeLogs();
item.setValue(this.exprParser.primary());
stmt.addItem(item);
} else if (lexer.identifierEquals("FILEFORMAT")) {
lexer.nextToken();
OdpsAlterTableSetFileFormat item = new OdpsAlterTableSetFileFormat();
item.setValue(this.exprParser.primary());
stmt.addItem(item);
} else {
return super.alterTableSetRest(stmt);
}
return false;
}
@Override
protected void parseCreateMaterializedViewRest(SQLCreateMaterializedViewStatement stmt) {
if (lexer.identifierEquals(FnvHash.Constants.LIFECYCLE)) {
lexer.nextToken();
stmt.setLifyCycle(
this.exprParser.primary()
);
}
if (lexer.token() == Token.PARTITIONED) {
lexer.nextToken();
accept(ON);
accept(LPAREN);
this.exprParser.names(stmt.getPartitionedOn(), stmt);
accept(RPAREN);
}
}
@Override
public void parseUpdateStatementPartition(SQLUpdateStatement updateStatement) {
if (lexer.token() == PARTITION) {
lexer.nextToken();
updateStatement.setPartitions(new ArrayList<>());
this.exprParser.parseAssignItem(updateStatement.getPartitions(), updateStatement);
}
}
@Override
protected void parseUpdateSetComma() {
if (lexer.token() == COMMA) {
lexer.nextToken();
}
}
@Override
public void parseCreateViewAtDataType(SQLColumnDefinition column, SQLName expr) {
if (expr.getSimpleName().startsWith("@")) {
column.setDataType(this.exprParser.parseDataType());
}
}
@Override
protected void parseWithQuerySkip() {
if (lexer.identifierEquals(FnvHash.Constants.STRING)
|| lexer.identifierEquals(FnvHash.Constants.INT)
|| lexer.identifierEquals(FnvHash.Constants.BIGINT)
) {
lexer.nextToken(); // skip
}
}
}

View File

@ -63,6 +63,15 @@ public class OdpsOutputVisitor extends HiveOutputVisitor implements OdpsASTVisit
super(appender, DbType.odps);
}
@Override
public boolean visit(SQLCreateTableStatement x) {
if (x instanceof OdpsCreateTableStatement) {
return visit((OdpsCreateTableStatement) x);
}
return super.visit(x);
}
@Override
public boolean visit(OdpsCreateTableStatement x) {
List<SQLCommentHint> headHints = x.getHeadHintsDirect();
if (headHints != null) {
@ -78,9 +87,7 @@ public class OdpsOutputVisitor extends HiveOutputVisitor implements OdpsASTVisit
print0(ucase ? "CREATE " : "create ");
if (x.isExternal()) {
print0(ucase ? "EXTERNAL " : "external ");
}
printCreateTableFeatures(x);
if (x.isIfNotExists()) {
print0(ucase ? "TABLE IF NOT EXISTS " : "table if not exists ");

View File

@ -314,7 +314,7 @@ public class OracleCreateTableStatement extends SQLCreateTableStatement implemen
this.acceptChild(visitor, tablespace);
this.acceptChild(visitor, select);
this.acceptChild(visitor, storage);
this.acceptChild(visitor, partitioning);
this.acceptChild(visitor, partitionBy);
}
visitor.endVisit(this);
}

View File

@ -211,7 +211,7 @@ public class OracleCreateTableParser extends SQLCreateTableParser {
if (lexer.identifierEquals("RANGE")) {
SQLPartitionByRange partitionByRange = this.getExprParser().partitionByRange();
this.getExprParser().partitionClauseRest(partitionByRange);
stmt.setPartitioning(partitionByRange);
stmt.setPartitionBy(partitionByRange);
continue;
} else if (lexer.identifierEquals("HASH")) {
SQLPartitionByHash partitionByHash = this.getExprParser().partitionByHash();
@ -232,12 +232,12 @@ public class OracleCreateTableParser extends SQLCreateTableParser {
throw new ParserException("TODO : " + lexer.info());
}
}
stmt.setPartitioning(partitionByHash);
stmt.setPartitionBy(partitionByHash);
continue;
} else if (lexer.identifierEquals("LIST")) {
SQLPartitionByList partitionByList = partitionByList();
this.getExprParser().partitionClauseRest(partitionByList);
stmt.setPartitioning(partitionByList);
stmt.setPartitionBy(partitionByList);
continue;
} else {
throw new ParserException("TODO : " + lexer.info());
@ -634,4 +634,9 @@ public class OracleCreateTableParser extends SQLCreateTableParser {
public OracleExprParser getExprParser() {
return (OracleExprParser) exprParser;
}
@Override
protected SQLSelect createTableQueryRest() {
return new OracleSelectParser(this.exprParser).select();
}
}

View File

@ -1834,7 +1834,7 @@ public class OracleExprParser extends SQLExprParser {
return partition;
}
protected SQLPartitionBy parsePartitionBy() {
public SQLPartitionBy parsePartitionBy() {
lexer.nextToken();
accept(Token.BY);
@ -2151,4 +2151,15 @@ public class OracleExprParser extends SQLExprParser {
accept(Token.RPAREN);
return partitionByHash;
}
@Override
protected void parseIdentifySpecific() {
accept(Token.START);
accept(Token.WITH);
}
@Override
protected SQLExpr parseSelectItemRest(String ident, long hash_lower) {
return null;
}
}

View File

@ -15,18 +15,20 @@
*/
package com.alibaba.druid.sql.dialect.oracle.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.*;
import java.util.HashMap;
import java.util.Map;
import static com.alibaba.druid.sql.parser.CharTypes.isIdentifierChar;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.*;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.LayoutCharacters.EOI;
public class OracleLexer extends Lexer {
public static final Keywords DEFAULT_ORACLE_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<>(Keywords.DEFAULT_KEYWORDS.getKeywords());
map.put("BEGIN", Token.BEGIN);
@ -53,7 +55,7 @@ public class OracleLexer extends Lexer {
map.put("MERGE", Token.MERGE);
map.put("MODE", Token.MODE);
// map.put("MODEL", Token.MODEL);
// map.put("MODEL", Token.MODEL);
map.put("NOWAIT", Token.NOWAIT);
map.put("OF", Token.OF);
map.put("PRIOR", Token.PRIOR);
@ -114,26 +116,26 @@ public class OracleLexer extends Lexer {
map.put("", Token.LPAREN);
map.put("", Token.RPAREN);
DEFAULT_ORACLE_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public OracleLexer(char[] input, int inputLength, boolean skipComment) {
super(input, inputLength, skipComment);
super.keywords = DEFAULT_ORACLE_KEYWORDS;
dbType = DbType.oracle;
}
public OracleLexer(String input) {
super(input);
this.skipComment = true;
this.keepComments = true;
super.keywords = DEFAULT_ORACLE_KEYWORDS;
dbType = DbType.oracle;
}
public OracleLexer(String input, SQLParserFeature... features) {
super(input);
this.skipComment = true;
this.keepComments = true;
super.keywords = DEFAULT_ORACLE_KEYWORDS;
dbType = DbType.oracle;
for (SQLParserFeature feature : features) {
config(feature, true);
@ -393,4 +395,16 @@ public class OracleLexer extends Lexer {
}
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanSQLTypeWithBegin,
SQLDateExpr,
PrimaryVariantColon,
CreateTableBodySupplemental,
AsCommaFrom
);
this.dialectFeature.unconfigFeature(SQLTimestampExpr);
}
}

View File

@ -401,8 +401,8 @@ public class OracleOutputVisitor extends SQLASTOutputVisitor implements OracleAS
protected void printFrom(SQLSelectQueryBlock x) {
println();
print0(ucase ? "FROM " : "from ");
if (x.getCommentsAfaterFrom() != null) {
printAfterComments(x.getCommentsAfaterFrom());
if (x.getCommentsAfterFrom() != null) {
printAfterComments(x.getCommentsAfterFrom());
println();
}
SQLTableSource from = x.getFrom();
@ -1725,11 +1725,7 @@ public class OracleOutputVisitor extends SQLASTOutputVisitor implements OracleAS
print0(ucase ? "MONITORING" : "monitoring");
}
if (x.getPartitioning() != null) {
println();
print0(ucase ? "PARTITION BY " : "partition by ");
x.getPartitioning().accept(this);
}
printPartitionBy(x);
if (x.getCluster() != null) {
println();

View File

@ -25,9 +25,8 @@ import static com.alibaba.druid.sql.parser.CharTypes.isIdentifierChar;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class OscarLexer extends Lexer {
public static final Keywords DEFAULT_OSCAR_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -80,13 +79,12 @@ public class OscarLexer extends Lexer {
map.put("TEMPORARY", Token.TEMPORARY);
map.put("TEMP", Token.TEMP);
DEFAULT_OSCAR_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public OscarLexer(String input, SQLParserFeature... features) {
super(input, true);
this.keepComments = true;
super.keywords = DEFAULT_OSCAR_KEYWORDS;
super.dbType = DbType.oscar;
for (SQLParserFeature feature : features) {
config(feature, true);

View File

@ -151,7 +151,7 @@ public class OscarSelectParser extends SQLSelectParser {
for (;;) {
if (lexer.token() == Token.LIMIT) {
SQLLimit limit = new SQLLimit();
SQLLimit limit = getOrInitLimit(queryBlock);
lexer.nextToken();
if (lexer.token() == Token.ALL) {
@ -163,11 +163,7 @@ public class OscarSelectParser extends SQLSelectParser {
queryBlock.setLimit(limit);
} else if (lexer.token() == Token.OFFSET) {
SQLLimit limit = queryBlock.getLimit();
if (limit == null) {
limit = new SQLLimit();
queryBlock.setLimit(limit);
}
SQLLimit limit = getOrInitLimit(queryBlock);
lexer.nextToken();
SQLExpr offset = expr();
limit.setOffset(offset);
@ -253,6 +249,15 @@ public class OscarSelectParser extends SQLSelectParser {
return queryRest(queryBlock, acceptUnion);
}
private SQLLimit getOrInitLimit(SQLSelectQueryBlock queryBlock) {
SQLLimit limit = queryBlock.getLimit();
if (limit == null) {
limit = new SQLLimit();
queryBlock.setLimit(limit);
}
return limit;
}
public SQLTableSource parseTableSourceRest(SQLTableSource tableSource) {
if (lexer.token() == Token.AS && tableSource instanceof SQLExprTableSource) {
lexer.nextToken();

View File

@ -1764,11 +1764,7 @@ public class OscarOutputVisitor extends SQLASTOutputVisitor implements OscarASTV
print0(ucase ? "MONITORING" : "monitoring");
}
if (x.getPartitioning() != null) {
println();
print0(ucase ? "PARTITION BY " : "partition by ");
x.getPartitioning().accept(this);
}
printPartitionBy(x);
if (x.getCluster() != null) {
println();

View File

@ -282,7 +282,7 @@ public class OscarStatementParser extends SQLStatementParser {
return stmt;
}
public OscarDropSchemaStatement parseDropSchema() {
public OscarDropSchemaStatement parseDropSchema(boolean physical) {
OscarDropSchemaStatement stmt = new OscarDropSchemaStatement();
if (lexer.token() == Token.SCHEMA) {

View File

@ -15,6 +15,7 @@
*/
package com.alibaba.druid.sql.dialect.phoenix.parser;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.parser.Keywords;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.SQLParserFeature;
@ -27,9 +28,8 @@ import java.util.Map;
* Created by wenshao on 16/9/13.
*/
public class PhoenixLexer extends Lexer {
public static final Keywords DEFAULT_PHOENIX_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -47,12 +47,12 @@ public class PhoenixLexer extends Lexer {
map.put("MATCHED", Token.MATCHED);
map.put("UPSERT", Token.UPSERT);
DEFAULT_PHOENIX_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public PhoenixLexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_PHOENIX_KEYWORDS;
dbType = DbType.phoenix;
for (SQLParserFeature feature : features) {
config(feature, true);
}

View File

@ -9,6 +9,7 @@ import com.alibaba.druid.sql.ast.SQLPartitionByRange;
import com.alibaba.druid.sql.ast.SQLPartitionOf;
import com.alibaba.druid.sql.ast.expr.SQLBetweenExpr;
import com.alibaba.druid.sql.ast.expr.SQLIntegerExpr;
import com.alibaba.druid.sql.ast.statement.SQLCreateTableStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.util.FnvHash;
@ -29,6 +30,37 @@ public class PGCreateTableParser extends SQLCreateTableParser {
super(exprParser);
}
protected void parseCreateTableRest(SQLCreateTableStatement stmt) {
// For partition of/by for PG
for (int i = 0; i < 2; i++) {
if (lexer.token() == Token.PARTITION) {
Lexer.SavePoint mark = lexer.mark();
lexer.nextToken();
if (Token.OF.equals(lexer.token())) {
lexer.reset(mark);
SQLPartitionOf partitionOf = parsePartitionOf();
stmt.setPartitionOf(partitionOf);
} else if (Token.BY.equals(lexer.token())) {
lexer.reset(mark);
SQLPartitionBy partitionClause = parsePartitionBy();
stmt.setPartitionBy(partitionClause);
}
}
}
if (lexer.nextIf(Token.WITH)) {
accept(Token.LPAREN);
parseAssignItems(stmt.getTableOptions(), stmt, false);
accept(Token.RPAREN);
}
if (lexer.nextIf(Token.TABLESPACE)) {
stmt.setTablespace(
this.exprParser.name()
);
}
}
public SQLPartitionBy parsePartitionBy() {
lexer.nextToken();
accept(Token.BY);

View File

@ -19,10 +19,13 @@ import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.ast.SQLArrayDataType;
import com.alibaba.druid.sql.ast.SQLCurrentTimeExpr;
import com.alibaba.druid.sql.ast.SQLDataType;
import com.alibaba.druid.sql.ast.SQLDataTypeImpl;
import com.alibaba.druid.sql.ast.SQLExpr;
import com.alibaba.druid.sql.ast.expr.*;
import com.alibaba.druid.sql.ast.statement.SQLUpdateSetItem;
import com.alibaba.druid.sql.dialect.postgresql.ast.expr.*;
import com.alibaba.druid.sql.parser.Lexer;
import com.alibaba.druid.sql.parser.ParserException;
import com.alibaba.druid.sql.parser.SQLExprParser;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.Token;
@ -94,6 +97,153 @@ public class PGExprParser extends SQLExprParser {
return new PGSelectParser(this);
}
@Override
protected SQLExpr methodRestAllowIdentifierMethodSpecific(String methodName, long hash_lower, SQLMethodInvokeExpr methodInvokeExpr) {
if (hash_lower == FnvHash.Constants.INT4) {
PGTypeCastExpr castExpr = new PGTypeCastExpr();
castExpr.setExpr(this.expr());
castExpr.setDataType(new SQLDataTypeImpl(methodName));
accept(Token.RPAREN);
return castExpr;
} else if (hash_lower == FnvHash.Constants.VARBIT) {
PGTypeCastExpr castExpr = new PGTypeCastExpr();
SQLExpr len = this.primary();
castExpr.setDataType(new SQLDataTypeImpl(methodName, len));
accept(Token.RPAREN);
castExpr.setExpr(this.expr());
return castExpr;
}
return null;
}
@Override
protected SQLExpr primaryOn(SQLExpr sqlExpr) {
String methodName = lexer.stringVal();
lexer.nextToken();
if (lexer.token() == Token.LPAREN) {
sqlExpr = this.methodRest(new SQLIdentifierExpr(methodName), true);
return sqlExpr;
}
throw new ParserException("ERROR. " + lexer.info());
}
@Override
protected SQLExpr primaryLiteralCharsRest(SQLExpr sqlExpr) {
Lexer.SavePoint savePoint = lexer.mark();
lexer.nextToken();
if (lexer.token() == Token.IDENTIFIER) {
String collate = lexer.stringVal();
if (collate.equalsIgnoreCase("collate")) {
lexer.nextToken();
String collateValue = lexer.stringVal();
if (lexer.token() == Token.IDENTIFIER || lexer.token() == Token.LITERAL_ALIAS || lexer.token() == Token.LITERAL_CHARS) {
((SQLCharExpr) sqlExpr).setCollate(lexer.stringVal());
} else {
throw new ParserException("syntax error. " + lexer.info());
}
} else {
lexer.reset(savePoint);
}
} else {
lexer.reset(savePoint);
}
lexer.nextToken();
return sqlExpr;
}
@Override
protected void parseUpdateSetItemLbracket(SQLUpdateSetItem item) {
SQLExpr column = item.getColumn();
column = this.primaryRest(column);
item.setColumn(column);
}
@Override
public SQLBinaryOperator andRestGetAndOperator() {
return SQLBinaryOperator.PG_And;
}
@Override
protected SQLExpr relationalRestTilde(SQLExpr expr) {
lexer.nextToken();
SQLExpr rightExp = relational();
rightExp = relationalRest(rightExp);
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.POSIX_Regular_Match, rightExp, dbType);
}
@Override
protected SQLExpr relationalRestTildeStar(SQLExpr expr) {
lexer.nextToken();
SQLExpr rightExp = relational();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.POSIX_Regular_Match_Insensitive, rightExp, dbType);
}
@Override
protected SQLExpr relationalRestQues(SQLExpr expr) {
lexer.nextToken();
SQLExpr rightExp = bitOr();
rightExp = relationalRest(rightExp);
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.JSONContains, rightExp, dbType);
}
@Override
protected SQLExpr relationalRestBangTilde(SQLExpr expr) {
lexer.nextToken();
SQLExpr rightExp = relational();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.POSIX_Regular_Not_Match, rightExp, dbType);
}
@Override
protected SQLExpr relationalRestBangTildeStar(SQLExpr expr) {
lexer.nextToken();
SQLExpr rightExp = relational();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.POSIX_Regular_Not_Match_POSIX_Regular_Match_Insensitive, rightExp, dbType);
}
@Override
protected SQLExpr relationalRestTildeEq(SQLExpr expr) {
lexer.nextToken();
SQLExpr rightExp = relational();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.SAME_AS, rightExp, dbType);
}
@Override
protected SQLExpr relationalRestIdentifierSimilar(SQLExpr expr) {
lexer.nextToken();
accept(Token.TO);
SQLExpr rightExp = bitOr();
return new SQLBinaryOpExpr(expr, SQLBinaryOperator.SIMILAR_TO, rightExp, dbType);
}
@Override
protected void parseDataTypeDouble(StringBuilder typeName) {
typeName.append(' ').append(lexer.stringVal());
lexer.nextToken();
}
@Override
protected SQLExpr parseSelectItemRest(String ident, long hash_lower) {
SQLExpr expr;
if (lexer.identifierEquals(FnvHash.Constants.COLLATE)
&& lexer.stringVal().charAt(0) != '`'
) {
lexer.nextToken();
String collate = lexer.stringVal();
lexer.nextToken();
SQLBinaryOpExpr binaryExpr = new SQLBinaryOpExpr(
new SQLIdentifierExpr(ident),
SQLBinaryOperator.COLLATE,
new SQLIdentifierExpr(collate), dbType
);
expr = binaryExpr;
} else {
expr = super.parseSelectItemRest(ident, hash_lower);
}
return expr;
}
public SQLExpr primary() {
if (lexer.token() == Token.ARRAY) {
String ident = lexer.stringVal();

View File

@ -22,12 +22,13 @@ import java.util.HashMap;
import java.util.Map;
import static com.alibaba.druid.sql.parser.CharTypes.isIdentifierChar;
import static com.alibaba.druid.sql.parser.DialectFeature.LexerFeature.*;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.*;
import static com.alibaba.druid.sql.parser.Token.LITERAL_CHARS;
public class PGLexer extends Lexer {
public static final Keywords DEFAULT_PG_KEYWORDS;
static {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -73,14 +74,13 @@ public class PGLexer extends Lexer {
map.put("INTERVAL", Token.INTERVAL);
map.put("LANGUAGE", Token.LANGUAGE);
DEFAULT_PG_KEYWORDS = new Keywords(map);
return new Keywords(map);
}
public PGLexer(String input, SQLParserFeature... features) {
super(input, true);
this.keepComments = true;
super.keywords = DEFAULT_PG_KEYWORDS;
super.dbType = DbType.postgresql;
dbType = DbType.postgresql;
for (SQLParserFeature feature : features) {
config(feature, true);
}
@ -230,4 +230,39 @@ public class PGLexer extends Lexer {
stringVal = addSymbol();
token = Token.VARIANT;
}
protected void nextTokenQues() {
if (ch == '?') {
scanChar();
if (ch == '|') {
scanChar();
token = Token.QUESQUESBAR;
} else {
token = Token.QUESQUES;
}
} else if (ch == '|') {
scanChar();
if (ch == '|') {
unscan();
token = Token.QUES;
} else {
token = Token.QUESBAR;
}
} else if (ch == '&') {
scanChar();
token = Token.QUESAMP;
} else {
token = Token.QUES;
}
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(
ScanVariableGreaterThan,
SQLDateExpr,
ParseStatementListWhen
);
}
}

View File

@ -26,11 +26,12 @@ import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.postgresql.ast.stmt.*;
import com.alibaba.druid.sql.parser.*;
import com.alibaba.druid.util.FnvHash;
import com.alibaba.druid.util.JdbcUtils;
import java.util.ArrayList;
import java.util.List;
import static com.alibaba.druid.sql.parser.Token.RESTRICT;
public class PGSQLStatementParser extends SQLStatementParser {
public static final String TIME_ZONE = "TIME ZONE";
public static final String TIME = "TIME";
@ -282,7 +283,7 @@ public class PGSQLStatementParser extends SQLStatementParser {
return stmt;
}
public PGDropSchemaStatement parseDropSchema() {
public PGDropSchemaStatement parseDropSchema(boolean physical) {
PGDropSchemaStatement stmt = new PGDropSchemaStatement();
if (lexer.token() == Token.SCHEMA) {
@ -774,7 +775,7 @@ public class PGSQLStatementParser extends SQLStatementParser {
lexer.nextToken();
values.add(this.exprParser.primary());
lexer.nextToken();
} else if (JdbcUtils.isPgsqlDbType(dbType) && ("schema".equalsIgnoreCase(parameter) || "names".equalsIgnoreCase(parameter))) {
} else if ("schema".equalsIgnoreCase(parameter) || "names".equalsIgnoreCase(parameter)) {
paramExpr = new SQLIdentifierExpr(parameter);
lexer.nextToken();
String value = lexer.stringVal();
@ -1106,4 +1107,40 @@ public class PGSQLStatementParser extends SQLStatementParser {
stmt.setPassword(this.exprParser.primary());
return stmt;
}
@Override
protected boolean alterTableAfterNameRest(SQLAlterTableStatement stmt) {
if (lexer.identifierEquals("CHANGEOWNER") && lexer.identifierEquals("OWNER")) {
alterTableOwner(stmt);
} else {
return true;
}
return false;
}
@Override
protected void alterTableAlterComma() {
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
@Override
public void parseAlterDropRest(SQLAlterTableStatement stmt, SQLAlterTableDropColumnItem item) {
item.getColumns().add(this.exprParser.name());
if (lexer.token() == Token.CASCADE) {
item.setCascade(true);
lexer.nextToken();
}
if (RESTRICT == lexer.token()) {
item.setRestrict(true);
lexer.nextToken();
}
stmt.addItem(item);
if (lexer.token() == Token.COMMA) {
lexer.nextToken();
}
}
}

View File

@ -18,11 +18,7 @@ package com.alibaba.druid.sql.dialect.postgresql.parser;
import com.alibaba.druid.sql.ast.*;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLSizeExpr;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLSelectQuery;
import com.alibaba.druid.sql.ast.statement.SQLSelectQueryBlock;
import com.alibaba.druid.sql.ast.statement.SQLTableSampling;
import com.alibaba.druid.sql.ast.statement.SQLTableSource;
import com.alibaba.druid.sql.ast.statement.*;
import com.alibaba.druid.sql.dialect.postgresql.ast.stmt.PGFunctionTableSource;
import com.alibaba.druid.sql.dialect.postgresql.ast.stmt.PGSelectQueryBlock;
import com.alibaba.druid.sql.dialect.postgresql.ast.stmt.PGSelectQueryBlock.IntoOption;
@ -142,7 +138,7 @@ public class PGSelectParser extends SQLSelectParser {
for (; ; ) {
if (lexer.token() == Token.LIMIT) {
SQLLimit limit = new SQLLimit();
SQLLimit limit = getOrInitLimit(queryBlock);
lexer.nextToken();
if (lexer.token() == Token.ALL) {
@ -152,13 +148,9 @@ public class PGSelectParser extends SQLSelectParser {
limit.setRowCount(expr());
}
queryBlock.setLimit(limit);
} else if (lexer.token() == Token.OFFSET) {
SQLLimit limit = queryBlock.getLimit();
if (limit == null) {
limit = new SQLLimit();
queryBlock.setLimit(limit);
}
SQLLimit limit = getOrInitLimit(queryBlock);
lexer.nextToken();
SQLExpr offset = expr();
limit.setOffset(offset);
@ -244,6 +236,15 @@ public class PGSelectParser extends SQLSelectParser {
return queryRest(queryBlock, acceptUnion);
}
private SQLLimit getOrInitLimit(SQLSelectQueryBlock queryBlock) {
SQLLimit limit = queryBlock.getLimit();
if (limit == null) {
limit = new SQLLimit();
queryBlock.setLimit(limit);
}
return limit;
}
public SQLTableSource parseTableSourceRest(SQLTableSource tableSource) {
if (lexer.token() == Token.AS && tableSource instanceof SQLExprTableSource) {
lexer.nextToken();

View File

@ -370,15 +370,24 @@ public class PGOutputVisitor extends SQLASTOutputVisitor implements PGASTVisitor
public boolean visit(PGFunctionTableSource x) {
x.getExpr().accept(this);
if (x.getAlias() != null) {
print0(ucase ? " AS " : " as ");
print0(x.getAlias());
String alias = x.getAlias();
List<SQLParameter> parameters = x.getParameters();
if (alias != null || !x.getParameters().isEmpty()) {
print0(ucase ? " AS" : " as");
}
if (x.getParameters().size() > 0) {
if (alias != null) {
print(' ');
print0(alias);
}
if (!parameters.isEmpty()) {
incrementIndent();
println();
print('(');
printAndAccept(x.getParameters(), ", ");
printAndAccept(parameters, ", ");
print(')');
decrementIndent();
}
return false;
@ -1397,8 +1406,8 @@ public class PGOutputVisitor extends SQLASTOutputVisitor implements PGASTVisitor
println();
print0(ucase ? "FROM " : "from ");
if (x.getFrom() != null) {
if (x.getCommentsAfaterFrom() != null) {
printAfterComments(x.getCommentsAfaterFrom());
if (x.getCommentsAfterFrom() != null) {
printAfterComments(x.getCommentsAfterFrom());
println();
}
x.getFrom().accept(this);
@ -1912,11 +1921,7 @@ public class PGOutputVisitor extends SQLASTOutputVisitor implements PGASTVisitor
print0(ucase ? "MONITORING" : "monitoring");
}
if (x.getPartitioning() != null) {
println();
print0(ucase ? "PARTITION BY " : "partition by ");
x.getPartitioning().accept(this);
}
printPartitionBy(x);
if (x.getCluster() != null) {
println();
@ -2875,4 +2880,15 @@ public class PGOutputVisitor extends SQLASTOutputVisitor implements PGASTVisitor
protected boolean legacyCube() {
return true;
}
protected void printTableOption(SQLExpr name, SQLExpr value, int index) {
if (index != 0) {
print(",");
println();
}
String key = name.toString();
print0(key);
print0(" = ");
value.accept(this);
}
}

View File

@ -66,7 +66,7 @@ public class PrestoCreateTableParser extends SQLCreateTableParser {
accept(Token.NOT);
accept(Token.EXISTS);
createTable.setIfNotExiists(true);
createTable.setIfNotExists(true);
}
createTable.setName(this.exprParser.name());

View File

@ -9,10 +9,11 @@ import com.alibaba.druid.sql.parser.Token;
import java.util.HashMap;
import java.util.Map;
public class PrestoLexer extends Lexer {
public static final Keywords DEFAULT_PHOENIX_KEYWORDS;
import static com.alibaba.druid.sql.parser.DialectFeature.ParserFeature.SQLDateExpr;
static {
public class PrestoLexer extends Lexer {
@Override
protected Keywords loadKeywords() {
Map<String, Token> map = new HashMap<String, Token>();
map.putAll(Keywords.DEFAULT_KEYWORDS.getKeywords());
@ -33,18 +34,20 @@ public class PrestoLexer extends Lexer {
map.put("IF", Token.IF);
DEFAULT_PHOENIX_KEYWORDS = new Keywords(map);
}
{
dbType = DbType.presto;
return new Keywords(map);
}
public PrestoLexer(String input, SQLParserFeature... features) {
super(input);
super.keywords = DEFAULT_PHOENIX_KEYWORDS;
this.dbType = DbType.presto;
for (SQLParserFeature feature : features) {
config(feature, true);
}
}
@Override
protected void initDialectFeature() {
super.initDialectFeature();
this.dialectFeature.configFeature(SQLDateExpr);
}
}

View File

@ -21,6 +21,7 @@ import com.alibaba.druid.sql.ast.SQLName;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.ast.expr.SQLIdentifierExpr;
import com.alibaba.druid.sql.ast.expr.SQLPropertyExpr;
import com.alibaba.druid.sql.ast.statement.SQLExplainStatement;
import com.alibaba.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.druid.sql.ast.statement.SQLInsertInto;
import com.alibaba.druid.sql.ast.statement.SQLSelect;
@ -511,4 +512,30 @@ public class PrestoStatementParser extends SQLStatementParser {
return stmt;
}
@Override
public void parseCreateTableSupportSchema() {
if (lexer.token() == Token.SCHEMA) {
lexer.nextToken();
} else {
accept(Token.DATABASE);
}
}
@Override
public void parseExplainFormatType(SQLExplainStatement explain) {
if (lexer.token() == Token.LPAREN) {
lexer.nextToken();
if (lexer.identifierEquals("FORMAT")) {
lexer.nextToken();
lexer.nextToken();
} else if (lexer.identifierEquals("TYPE")) {
lexer.nextToken();
lexer.nextToken();
}
accept(Token.RPAREN);
}
}
}

Some files were not shown because too many files have changed in this diff Show More