Support symbolic boolean operators for OR and AND
SpEL typically supports logical operators for boolean expressions consistent with standard Java language syntax. However, the operators for logical AND and logical OR are currently only supported as textual operators. In other words, SpEL does not support the use of && and || as logical operators. The SpEL tokenizer has now been modified to recognize && and || as symbolic boolean operators. The parser has been modified to allow the use of either the textual or symbolic operators. Issue: SPR-9614
This commit is contained in:
parent
6249539426
commit
58e6214b7b
|
|
@ -169,7 +169,7 @@ class InternalSpelExpressionParser extends TemplateAwareExpressionParser {
|
||||||
//logicalOrExpression : logicalAndExpression (OR^ logicalAndExpression)*;
|
//logicalOrExpression : logicalAndExpression (OR^ logicalAndExpression)*;
|
||||||
private SpelNodeImpl eatLogicalOrExpression() {
|
private SpelNodeImpl eatLogicalOrExpression() {
|
||||||
SpelNodeImpl expr = eatLogicalAndExpression();
|
SpelNodeImpl expr = eatLogicalAndExpression();
|
||||||
while (peekIdentifierToken("or")) {
|
while (peekIdentifierToken("or") || peekToken(TokenKind.SYMBOLIC_OR)) {
|
||||||
Token t = nextToken(); //consume OR
|
Token t = nextToken(); //consume OR
|
||||||
SpelNodeImpl rhExpr = eatLogicalAndExpression();
|
SpelNodeImpl rhExpr = eatLogicalAndExpression();
|
||||||
checkRightOperand(t,rhExpr);
|
checkRightOperand(t,rhExpr);
|
||||||
|
|
@ -181,7 +181,7 @@ class InternalSpelExpressionParser extends TemplateAwareExpressionParser {
|
||||||
// logicalAndExpression : relationalExpression (AND^ relationalExpression)*;
|
// logicalAndExpression : relationalExpression (AND^ relationalExpression)*;
|
||||||
private SpelNodeImpl eatLogicalAndExpression() {
|
private SpelNodeImpl eatLogicalAndExpression() {
|
||||||
SpelNodeImpl expr = eatRelationalExpression();
|
SpelNodeImpl expr = eatRelationalExpression();
|
||||||
while (peekIdentifierToken("and")) {
|
while (peekIdentifierToken("and") || peekToken(TokenKind.SYMBOLIC_AND)) {
|
||||||
Token t = nextToken();// consume 'AND'
|
Token t = nextToken();// consume 'AND'
|
||||||
SpelNodeImpl rhExpr = eatRelationalExpression();
|
SpelNodeImpl rhExpr = eatRelationalExpression();
|
||||||
checkRightOperand(t,rhExpr);
|
checkRightOperand(t,rhExpr);
|
||||||
|
|
@ -432,7 +432,6 @@ class InternalSpelExpressionParser extends TemplateAwareExpressionParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
//startNode
|
//startNode
|
||||||
// : parenExpr | literal
|
// : parenExpr | literal
|
||||||
// | type
|
// | type
|
||||||
|
|
@ -513,7 +512,7 @@ class InternalSpelExpressionParser extends TemplateAwareExpressionParser {
|
||||||
private boolean maybeEatNullReference() {
|
private boolean maybeEatNullReference() {
|
||||||
if (peekToken(TokenKind.IDENTIFIER)) {
|
if (peekToken(TokenKind.IDENTIFIER)) {
|
||||||
Token nullToken = peekToken();
|
Token nullToken = peekToken();
|
||||||
if (!nullToken.stringValue().toLowerCase().equals("null")) {
|
if (!nullToken.stringValue().equalsIgnoreCase("null")) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
nextToken();
|
nextToken();
|
||||||
|
|
@ -805,7 +804,6 @@ class InternalSpelExpressionParser extends TemplateAwareExpressionParser {
|
||||||
return t.kind==TokenKind.SELECT || t.kind==TokenKind.SELECT_FIRST || t.kind==TokenKind.SELECT_LAST;
|
return t.kind==TokenKind.SELECT || t.kind==TokenKind.SELECT_FIRST || t.kind==TokenKind.SELECT_LAST;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private boolean moreTokens() {
|
private boolean moreTokens() {
|
||||||
return tokenStreamPointer<tokenStream.size();
|
return tokenStreamPointer<tokenStream.size();
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
/*
|
/*
|
||||||
* Copyright 2002-2009 the original author or authors.
|
* Copyright 2002-2012 the original author or authors.
|
||||||
*
|
*
|
||||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
* you may not use this file except in compliance with the License.
|
* you may not use this file except in compliance with the License.
|
||||||
|
|
@ -29,7 +29,7 @@ enum TokenKind {
|
||||||
DIV("/"), GE(">="), GT(">"), LE("<="), LT("<"), EQ("=="), NE("!="),
|
DIV("/"), GE(">="), GT(">"), LE("<="), LT("<"), EQ("=="), NE("!="),
|
||||||
MOD("%"), NOT("!"), ASSIGN("="), INSTANCEOF("instanceof"), MATCHES("matches"), BETWEEN("between"),
|
MOD("%"), NOT("!"), ASSIGN("="), INSTANCEOF("instanceof"), MATCHES("matches"), BETWEEN("between"),
|
||||||
SELECT("?["), POWER("^"),
|
SELECT("?["), POWER("^"),
|
||||||
ELVIS("?:"), SAFE_NAVI("?."), BEAN_REF("@")
|
ELVIS("?:"), SAFE_NAVI("?."), BEAN_REF("@"), SYMBOLIC_OR("||"), SYMBOLIC_AND("&&")
|
||||||
;
|
;
|
||||||
|
|
||||||
char[] tokenChars;
|
char[] tokenChars;
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,7 @@ class Tokenizer {
|
||||||
int pos;
|
int pos;
|
||||||
int max;
|
int max;
|
||||||
List<Token> tokens = new ArrayList<Token>();
|
List<Token> tokens = new ArrayList<Token>();
|
||||||
|
|
||||||
public Tokenizer(String inputdata) {
|
public Tokenizer(String inputdata) {
|
||||||
this.expressionString = inputdata;
|
this.expressionString = inputdata;
|
||||||
this.toProcess = (inputdata+"\0").toCharArray();
|
this.toProcess = (inputdata+"\0").toCharArray();
|
||||||
|
|
@ -46,7 +46,7 @@ class Tokenizer {
|
||||||
this.pos = 0;
|
this.pos = 0;
|
||||||
process();
|
process();
|
||||||
}
|
}
|
||||||
|
|
||||||
public void process() {
|
public void process() {
|
||||||
while (pos<max) {
|
while (pos<max) {
|
||||||
char ch = toProcess[pos];
|
char ch = toProcess[pos];
|
||||||
|
|
@ -128,6 +128,16 @@ class Tokenizer {
|
||||||
pushCharToken(TokenKind.ASSIGN);
|
pushCharToken(TokenKind.ASSIGN);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
case '&':
|
||||||
|
if (isTwoCharToken(TokenKind.SYMBOLIC_AND)) {
|
||||||
|
pushPairToken(TokenKind.SYMBOLIC_AND);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case '|':
|
||||||
|
if (isTwoCharToken(TokenKind.SYMBOLIC_OR)) {
|
||||||
|
pushPairToken(TokenKind.SYMBOLIC_OR);
|
||||||
|
}
|
||||||
|
break;
|
||||||
case '?':
|
case '?':
|
||||||
if (isTwoCharToken(TokenKind.SELECT)) {
|
if (isTwoCharToken(TokenKind.SELECT)) {
|
||||||
pushPairToken(TokenKind.SELECT);
|
pushPairToken(TokenKind.SELECT);
|
||||||
|
|
@ -195,12 +205,11 @@ class Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<Token> getTokens() {
|
public List<Token> getTokens() {
|
||||||
return tokens;
|
return tokens;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// STRING_LITERAL: '\''! (APOS|~'\'')* '\''!;
|
// STRING_LITERAL: '\''! (APOS|~'\'')* '\''!;
|
||||||
private void lexQuotedStringLiteral() {
|
private void lexQuotedStringLiteral() {
|
||||||
int start = pos;
|
int start = pos;
|
||||||
|
|
@ -223,7 +232,7 @@ class Tokenizer {
|
||||||
pos++;
|
pos++;
|
||||||
tokens.add(new Token(TokenKind.LITERAL_STRING, subarray(start,pos), start, pos));
|
tokens.add(new Token(TokenKind.LITERAL_STRING, subarray(start,pos), start, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
// DQ_STRING_LITERAL: '"'! (~'"')* '"'!;
|
// DQ_STRING_LITERAL: '"'! (~'"')* '"'!;
|
||||||
private void lexDoubleQuotedStringLiteral() {
|
private void lexDoubleQuotedStringLiteral() {
|
||||||
int start = pos;
|
int start = pos;
|
||||||
|
|
@ -241,8 +250,7 @@ class Tokenizer {
|
||||||
pos++;
|
pos++;
|
||||||
tokens.add(new Token(TokenKind.LITERAL_STRING, subarray(start,pos), start, pos));
|
tokens.add(new Token(TokenKind.LITERAL_STRING, subarray(start,pos), start, pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// REAL_LITERAL :
|
// REAL_LITERAL :
|
||||||
// ('.' (DECIMAL_DIGIT)+ (EXPONENT_PART)? (REAL_TYPE_SUFFIX)?) |
|
// ('.' (DECIMAL_DIGIT)+ (EXPONENT_PART)? (REAL_TYPE_SUFFIX)?) |
|
||||||
// ((DECIMAL_DIGIT)+ '.' (DECIMAL_DIGIT)+ (EXPONENT_PART)? (REAL_TYPE_SUFFIX)?) |
|
// ((DECIMAL_DIGIT)+ '.' (DECIMAL_DIGIT)+ (EXPONENT_PART)? (REAL_TYPE_SUFFIX)?) |
|
||||||
|
|
@ -256,7 +264,7 @@ class Tokenizer {
|
||||||
// fragment REAL_TYPE_SUFFIX : 'F' | 'f' | 'D' | 'd';
|
// fragment REAL_TYPE_SUFFIX : 'F' | 'f' | 'D' | 'd';
|
||||||
// INTEGER_LITERAL
|
// INTEGER_LITERAL
|
||||||
// : (DECIMAL_DIGIT)+ (INTEGER_TYPE_SUFFIX)?;
|
// : (DECIMAL_DIGIT)+ (INTEGER_TYPE_SUFFIX)?;
|
||||||
|
|
||||||
private void lexNumericLiteral(boolean firstCharIsZero) {
|
private void lexNumericLiteral(boolean firstCharIsZero) {
|
||||||
boolean isReal = false;
|
boolean isReal = false;
|
||||||
int start = pos;
|
int start = pos;
|
||||||
|
|
@ -353,10 +361,10 @@ class Tokenizer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// if this is changed, it must remain sorted
|
// if this is changed, it must remain sorted
|
||||||
private static final String[] alternativeOperatorNames = { "DIV","EQ","GE","GT","LE","LT","MOD","NE","NOT"};
|
private static final String[] alternativeOperatorNames = { "DIV","EQ","GE","GT","LE","LT","MOD","NE","NOT"};
|
||||||
|
|
||||||
private void lexIdentifier() {
|
private void lexIdentifier() {
|
||||||
int start = pos;
|
int start = pos;
|
||||||
do {
|
do {
|
||||||
|
|
@ -375,7 +383,7 @@ class Tokenizer {
|
||||||
}
|
}
|
||||||
tokens.add(new Token(TokenKind.IDENTIFIER,subarray,start,pos));
|
tokens.add(new Token(TokenKind.IDENTIFIER,subarray,start,pos));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void pushIntToken(char[] data,boolean isLong, int start, int end) {
|
private void pushIntToken(char[] data,boolean isLong, int start, int end) {
|
||||||
if (isLong) {
|
if (isLong) {
|
||||||
tokens.add(new Token(TokenKind.LITERAL_LONG,data, start, end));
|
tokens.add(new Token(TokenKind.LITERAL_LONG,data, start, end));
|
||||||
|
|
@ -398,7 +406,7 @@ class Tokenizer {
|
||||||
tokens.add(new Token(TokenKind.LITERAL_HEXINT, data, start, end));
|
tokens.add(new Token(TokenKind.LITERAL_HEXINT, data, start, end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void pushRealToken(char[] data, boolean isFloat, int start, int end) {
|
private void pushRealToken(char[] data, boolean isFloat, int start, int end) {
|
||||||
if (isFloat) {
|
if (isFloat) {
|
||||||
tokens.add(new Token(TokenKind.LITERAL_REAL_FLOAT, data, start, end));
|
tokens.add(new Token(TokenKind.LITERAL_REAL_FLOAT, data, start, end));
|
||||||
|
|
@ -406,13 +414,13 @@ class Tokenizer {
|
||||||
tokens.add(new Token(TokenKind.LITERAL_REAL, data, start, end));
|
tokens.add(new Token(TokenKind.LITERAL_REAL, data, start, end));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private char[] subarray(int start, int end) {
|
private char[] subarray(int start, int end) {
|
||||||
char[] result = new char[end - start];
|
char[] result = new char[end - start];
|
||||||
System.arraycopy(toProcess, start, result, 0, end - start);
|
System.arraycopy(toProcess, start, result, 0, end - start);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if this might be a two character token.
|
* Check if this might be a two character token.
|
||||||
*/
|
*/
|
||||||
|
|
@ -421,7 +429,7 @@ class Tokenizer {
|
||||||
Assert.isTrue(toProcess[pos] == kind.tokenChars[0]);
|
Assert.isTrue(toProcess[pos] == kind.tokenChars[0]);
|
||||||
return toProcess[pos+1] == kind.tokenChars[1];
|
return toProcess[pos+1] == kind.tokenChars[1];
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Push a token of just one character in length.
|
* Push a token of just one character in length.
|
||||||
*/
|
*/
|
||||||
|
|
@ -429,7 +437,7 @@ class Tokenizer {
|
||||||
tokens.add(new Token(kind,pos,pos+1));
|
tokens.add(new Token(kind,pos,pos+1));
|
||||||
pos++;
|
pos++;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Push a token of two characters in length.
|
* Push a token of two characters in length.
|
||||||
*/
|
*/
|
||||||
|
|
@ -437,7 +445,7 @@ class Tokenizer {
|
||||||
tokens.add(new Token(kind,pos,pos+2));
|
tokens.add(new Token(kind,pos,pos+2));
|
||||||
pos+=2;
|
pos+=2;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void pushOneCharOrTwoCharToken(TokenKind kind, int pos, char[] data) {
|
private void pushOneCharOrTwoCharToken(TokenKind kind, int pos, char[] data) {
|
||||||
tokens.add(new Token(kind,data,pos,pos+kind.getLength()));
|
tokens.add(new Token(kind,data,pos,pos+kind.getLength()));
|
||||||
}
|
}
|
||||||
|
|
@ -446,7 +454,7 @@ class Tokenizer {
|
||||||
private boolean isIdentifier(char ch) {
|
private boolean isIdentifier(char ch) {
|
||||||
return isAlphabetic(ch) || isDigit(ch) || ch=='_' || ch=='$';
|
return isAlphabetic(ch) || isDigit(ch) || ch=='_' || ch=='$';
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isChar(char a,char b) {
|
private boolean isChar(char a,char b) {
|
||||||
char ch = toProcess[pos];
|
char ch = toProcess[pos];
|
||||||
return ch==a || ch==b;
|
return ch==a || ch==b;
|
||||||
|
|
@ -467,7 +475,7 @@ class Tokenizer {
|
||||||
private boolean isSign(char ch) {
|
private boolean isSign(char ch) {
|
||||||
return ch=='+' || ch=='-';
|
return ch=='+' || ch=='-';
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isDigit(char ch) {
|
private boolean isDigit(char ch) {
|
||||||
if (ch>255) {
|
if (ch>255) {
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -481,14 +489,14 @@ class Tokenizer {
|
||||||
}
|
}
|
||||||
return (flags[ch] & IS_ALPHA)!=0;
|
return (flags[ch] & IS_ALPHA)!=0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean isHexadecimalDigit(char ch) {
|
private boolean isHexadecimalDigit(char ch) {
|
||||||
if (ch>255) {
|
if (ch>255) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return (flags[ch] & IS_HEXDIGIT)!=0;
|
return (flags[ch] & IS_HEXDIGIT)!=0;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final byte flags[] = new byte[256];
|
private static final byte flags[] = new byte[256];
|
||||||
private static final byte IS_DIGIT=0x01;
|
private static final byte IS_DIGIT=0x01;
|
||||||
private static final byte IS_HEXDIGIT=0x02;
|
private static final byte IS_HEXDIGIT=0x02;
|
||||||
|
|
|
||||||
|
|
@ -70,7 +70,7 @@ public class SpelParserTests {
|
||||||
assertEquals(5, expr.getValue());
|
assertEquals(5, expr.getValue());
|
||||||
expr = parser.parseRaw("2 + 3");
|
expr = parser.parseRaw("2 + 3");
|
||||||
assertEquals(5, expr.getValue());
|
assertEquals(5, expr.getValue());
|
||||||
expr = parser.parseRaw("2\n+ 3");
|
expr = parser.parseRaw("2\n+\t3");
|
||||||
assertEquals(5, expr.getValue());
|
assertEquals(5, expr.getValue());
|
||||||
expr = parser.parseRaw("2\r\n+\t3");
|
expr = parser.parseRaw("2\r\n+\t3");
|
||||||
assertEquals(5, expr.getValue());
|
assertEquals(5, expr.getValue());
|
||||||
|
|
@ -229,6 +229,24 @@ public class SpelParserTests {
|
||||||
assertEquals(Boolean.FALSE, expr.getValue(Boolean.class));
|
assertEquals(Boolean.FALSE, expr.getValue(Boolean.class));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void booleanOperators_symbolic_spr9614() throws EvaluationException, ParseException {
|
||||||
|
SpelExpression expr = new SpelExpressionParser().parseRaw("true");
|
||||||
|
assertEquals(Boolean.TRUE, expr.getValue(Boolean.class));
|
||||||
|
expr = new SpelExpressionParser().parseRaw("false");
|
||||||
|
assertEquals(Boolean.FALSE, expr.getValue(Boolean.class));
|
||||||
|
expr = new SpelExpressionParser().parseRaw("false && false");
|
||||||
|
assertEquals(Boolean.FALSE, expr.getValue(Boolean.class));
|
||||||
|
expr = new SpelExpressionParser().parseRaw("true && (true || false)");
|
||||||
|
assertEquals(Boolean.TRUE, expr.getValue(Boolean.class));
|
||||||
|
expr = new SpelExpressionParser().parseRaw("true && true || false");
|
||||||
|
assertEquals(Boolean.TRUE, expr.getValue(Boolean.class));
|
||||||
|
expr = new SpelExpressionParser().parseRaw("!true");
|
||||||
|
assertEquals(Boolean.FALSE, expr.getValue(Boolean.class));
|
||||||
|
expr = new SpelExpressionParser().parseRaw("!(false || true)");
|
||||||
|
assertEquals(Boolean.FALSE, expr.getValue(Boolean.class));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void stringLiterals() throws EvaluationException, ParseException {
|
public void stringLiterals() throws EvaluationException, ParseException {
|
||||||
SpelExpression expr = new SpelExpressionParser().parseRaw("'howdy'");
|
SpelExpression expr = new SpelExpressionParser().parseRaw("'howdy'");
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,8 @@ Changes in version 3.2 M2 (2012-08-xx)
|
||||||
|
|
||||||
* spring-test module now depends on junit:junit-dep (SPR-6966)
|
* spring-test module now depends on junit:junit-dep (SPR-6966)
|
||||||
* now inferring return type of generic factory methods (SPR-9493)
|
* now inferring return type of generic factory methods (SPR-9493)
|
||||||
* SpEL Tokenizer now supports methods on integers (SPR-9612)
|
* SpEL now supports method invocations on integers (SPR-9612)
|
||||||
|
* SpEL now supports symbolic boolean operators for OR and AND (SPR-9614)
|
||||||
* introduced support for case-insensitive null literals in SpEL expressions (SPR-9613)
|
* introduced support for case-insensitive null literals in SpEL expressions (SPR-9613)
|
||||||
* now using BufferedInputStream in SimpleMetaDataReader to double performance (SPR-9528)
|
* now using BufferedInputStream in SimpleMetaDataReader to double performance (SPR-9528)
|
||||||
* introduced "repeatCount" property in Quartz SimpleTriggerFactoryBean (SPR-9521)
|
* introduced "repeatCount" property in Quartz SimpleTriggerFactoryBean (SPR-9521)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue