add keyword config for spark/hive (#6390)
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (11, ubuntu-latest) (push) Waiting to run Details
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (17, ubuntu-latest) (push) Waiting to run Details
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (21, ubuntu-latest) (push) Waiting to run Details
Java CI / Test JDK ${{ matrix.java }}, ${{ matrix.os }} (8, ubuntu-latest) (push) Waiting to run Details

* add dbtype adb_mysql

* Presto/Trino parser fix

* add CTE visit for insertNode

* add keyword config for spark/hive
This commit is contained in:
Muhong_Yang 2025-04-01 20:40:20 +08:00 committed by GitHub
parent ddf756e6dd
commit 568a1a6ec8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 655 additions and 0 deletions

View File

@ -0,0 +1,160 @@
ALL
ALTER
AND
ARRAY
AS
AUTHORIZATION
BETWEEN
BIGINT
BINARY
BOOLEAN
BOTH
BY
CASE
CAST
CHAR
COLUMN
CONF
CREATE
CROSS
CUBE
CURRENT
CURRENT_DATE
CURRENT_TIMESTAMP
CURSOR
DATABASE
DATE
DECIMAL
DELETE
DESCRIBE
DISTINCT
DOUBLE
DROP
ELSE
END
EXCHANGE
EXISTS
EXTENDED
EXTERNAL
FALSE
FETCH
FLOAT
FOLLOWING
FOR
FROM
FULL
FUNCTION
GRANT
GROUP
GROUPING
HAVING
IF
IMPORT
IN
INNER
INSERT
INT
INTERSECT
INTERVAL
INTO
IS
JOIN
LATERAL
LEFT
LESS
LIKE
LOCAL
MACRO
MAP
MORE
NONE
NOT
NULL
OF
ON
OR
ORDER
OUT
OUTER
OVER
PARTIALSCAN
PARTITION
PERCENT
PRECEDING
PRESERVE
PROCEDURE
RANGE
READS
REDUCE
REVOKE
RIGHT
ROLLUP
ROW
ROWS
SELECT
SET
SMALLINT
TABLE
TABLESAMPLE
THEN
TIMESTAMP
TO
TRANSFORM
TRIGGER
TRUE
TRUNCATE
UNBOUNDED
UNION
UNIQUEJOIN
UPDATE
USER
USING
UTC_TMESTAMP
VALUES
VARCHAR
WHEN
WHERE
WINDOW
WITH
COMMIT
ONLY
REGEXP
RLIKE
ROLLBACK
START
CACHE
CONSTRAINT
FOREIGN
PRIMARY
REFERENCES
EXCEPT
EXTRACT
FLOOR
GROUPING
INTEGER
MINUS
PRECISION
MERGE
ANY
APPLICATION
DEC
NUMERIC
SYNC
TIME
TIMESTAMPLOCALTZ
UNIQUE
COMPACTIONID
CONNECTOR
CONNECTORS
CONVERT
DDL
FORCE
LEADING
OLDER
PKFK_JOIN
PREPARE
QUALIFY
REAL
SOME
THAN
TRAILING

View File

@ -0,0 +1,160 @@
ALL
ALTER
AND
ARRAY
AS
AUTHORIZATION
BETWEEN
BIGINT
BINARY
BOOLEAN
BOTH
BY
CASE
CAST
CHAR
COLUMN
CONF
CREATE
CROSS
CUBE
CURRENT
CURRENT_DATE
CURRENT_TIMESTAMP
CURSOR
DATABASE
DATE
DECIMAL
DELETE
DESCRIBE
DISTINCT
DOUBLE
DROP
ELSE
END
EXCHANGE
EXISTS
EXTENDED
EXTERNAL
FALSE
FETCH
FLOAT
FOLLOWING
FOR
FROM
FULL
FUNCTION
GRANT
GROUP
GROUPING
HAVING
IF
IMPORT
IN
INNER
INSERT
INT
INTERSECT
INTERVAL
INTO
IS
JOIN
LATERAL
LEFT
LESS
LIKE
LOCAL
MACRO
MAP
MORE
NONE
NOT
NULL
OF
ON
OR
ORDER
OUT
OUTER
OVER
PARTIALSCAN
PARTITION
PERCENT
PRECEDING
PRESERVE
PROCEDURE
RANGE
READS
REDUCE
REVOKE
RIGHT
ROLLUP
ROW
ROWS
SELECT
SET
SMALLINT
TABLE
TABLESAMPLE
THEN
TIMESTAMP
TO
TRANSFORM
TRIGGER
TRUE
TRUNCATE
UNBOUNDED
UNION
UNIQUEJOIN
UPDATE
USER
USING
UTC_TMESTAMP
VALUES
VARCHAR
WHEN
WHERE
WINDOW
WITH
COMMIT
ONLY
REGEXP
RLIKE
ROLLBACK
START
CACHE
CONSTRAINT
FOREIGN
PRIMARY
REFERENCES
EXCEPT
EXTRACT
FLOOR
GROUPING
INTEGER
MINUS
PRECISION
MERGE
ANY
APPLICATION
DEC
NUMERIC
SYNC
TIME
TIMESTAMPLOCALTZ
UNIQUE
COMPACTIONID
CONNECTOR
CONNECTORS
CONVERT
DDL
FORCE
LEADING
OLDER
PKFK_JOIN
PREPARE
QUALIFY
REAL
SOME
THAN
TRAILING

View File

@ -0,0 +1,139 @@
ALL
ALTER
AND
ANY
ARRAY
AS
AT
AUTHORIZATION
BETWEEN
BIGINT
BINARY
BOOLEAN
BOTH
BY
CASE
CAST
CHAR
CHARACTER
CHECK
CONSTRAINT
CREATE
CROSS
CUBE
CURRENT
CURRENT_DATE
CURRENT_TIME
CURRENT_TIMESTAMP
CURRENT_USER
DATE
DEC
DECIMAL
DELETE
DESC
DESCRIBE
DISTINCT
DOUBLE
DROP
ELSE
END
ESCAPE
EXCEPT
EXISTS
EXTRACT
FALSE
FETCH
FILTER
FLOAT
FOR
FOREIGN
FROM
FULL
FUNCTION
GENERATED
GLOBAL
GRANT
GROUP
GROUPING
HAVING
IF
IN
INNER
INSERT
INT
INTEGER
INTERSECT
INTERVAL
INTO
IS
JOIN
LATERAL
LEADING
LEFT
LIKE
LOCAL
NOT
NULL
NUMERIC
OF
OFFSET
ON
ONLY
OR
ORDER
OUTER
OVERLAPS
OVERLAY
PARTITION
PERCENTILE_CONT
PERCENTILE_DISC
POSITION
PRIMARY
PROCEDURE
RANGE
REAL
REFERENCES
REVOKE
RIGHT
ROLLBACK
ROLLUP
ROW
ROWS
SELECT
SESSION_USER
SET
SMALLINT
SOME
START
STRING
STRUCT
SUBSTR
SUBSTRING
SYSTEM_TIME
SYSTEM_VERSION
TABLE
TABLESAMPLE
THEN
TIME
TIMESTAMP
TINYINT
TO
TRAILING
TRANSFORM
TRUE
TRUNCATE
UNION
UNIQUE
UNKNOWN
UNPIVOT
UPDATE
USER
USING
VALUES
VARCHAR
VIEW
WHEN
WHERE
WINDOW
WITH
WITHIN

View File

@ -0,0 +1,139 @@
ALL
ALTER
AND
ANY
ARRAY
AS
AT
AUTHORIZATION
BETWEEN
BIGINT
BINARY
BOOLEAN
BOTH
BY
CASE
CAST
CHAR
CHARACTER
CHECK
CONSTRAINT
CREATE
CROSS
CUBE
CURRENT
CURRENT_DATE
CURRENT_TIME
CURRENT_TIMESTAMP
CURRENT_USER
DATE
DEC
DECIMAL
DELETE
DESC
DESCRIBE
DISTINCT
DOUBLE
DROP
ELSE
END
ESCAPE
EXCEPT
EXISTS
EXTRACT
FALSE
FETCH
FILTER
FLOAT
FOR
FOREIGN
FROM
FULL
FUNCTION
GENERATED
GLOBAL
GRANT
GROUP
GROUPING
HAVING
IF
IN
INNER
INSERT
INT
INTEGER
INTERSECT
INTERVAL
INTO
IS
JOIN
LATERAL
LEADING
LEFT
LIKE
LOCAL
NOT
NULL
NUMERIC
OF
OFFSET
ON
ONLY
OR
ORDER
OUTER
OVERLAPS
OVERLAY
PARTITION
PERCENTILE_CONT
PERCENTILE_DISC
POSITION
PRIMARY
PROCEDURE
RANGE
REAL
REFERENCES
REVOKE
RIGHT
ROLLBACK
ROLLUP
ROW
ROWS
SELECT
SESSION_USER
SET
SMALLINT
SOME
START
STRING
STRUCT
SUBSTR
SUBSTRING
SYSTEM_TIME
SYSTEM_VERSION
TABLE
TABLESAMPLE
THEN
TIME
TIMESTAMP
TINYINT
TO
TRAILING
TRANSFORM
TRUE
TRUNCATE
UNION
UNIQUE
UNKNOWN
UNPIVOT
UPDATE
USER
USING
VALUES
VARCHAR
VIEW
WHEN
WHERE
WINDOW
WITH
WITHIN

View File

@ -0,0 +1,28 @@
package com.alibaba.druid.bvt.sql.hive;
import com.alibaba.druid.DbType;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLParserUtils;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.sql.visitor.VisitorFeature;
import junit.framework.TestCase;
import org.junit.Assert;
public class HiveKeywordsTest extends TestCase {
public void test_keywords() {
DbType dbType = DbType.hive;
String sql = "select 1 as TIMESTAMPLOCALTZ";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql,
dbType,
SQLParserFeature.IgnoreNameQuotes);
SQLStatement stmt = parser.parseStatement();
assertEquals(Token.EOF, parser.getLexer().token());
String result = SQLUtils.toSQLString(stmt, dbType, null, VisitorFeature.OutputNameQuote).trim();
String expectedSql = "SELECT 1 AS `TIMESTAMPLOCALTZ`";
Assert.assertEquals(expectedSql, result);
}
}

View File

@ -0,0 +1,29 @@
package com.alibaba.druid.bvt.sql.spark;
import com.alibaba.druid.DbType;
import com.alibaba.druid.bvt.sql.SQLResourceTest;
import com.alibaba.druid.sql.SQLUtils;
import com.alibaba.druid.sql.ast.SQLStatement;
import com.alibaba.druid.sql.parser.SQLParserFeature;
import com.alibaba.druid.sql.parser.SQLParserUtils;
import com.alibaba.druid.sql.parser.SQLStatementParser;
import com.alibaba.druid.sql.parser.Token;
import com.alibaba.druid.sql.visitor.VisitorFeature;
import junit.framework.TestCase;
import org.junit.Assert;
import org.junit.Test;
public class SparkKeywordsTest extends TestCase {
public void test_keywords() {
DbType dbType = DbType.spark;
String sql = "select 1 as authorization";
SQLStatementParser parser = SQLParserUtils.createSQLStatementParser(sql,
dbType,
SQLParserFeature.IgnoreNameQuotes);
SQLStatement stmt = parser.parseStatement();
assertEquals(Token.EOF, parser.getLexer().token());
String result = SQLUtils.toSQLString(stmt, dbType, null, VisitorFeature.OutputNameQuote).trim();
String expectedSql = "SELECT 1 AS `authorization`";
Assert.assertEquals(expectedSql, result);
}
}