{
private static int parseAcks(String acksString) {
try {
- return acksString.trim().toLowerCase().equals("all") ? -1 : Integer.parseInt(acksString.trim());
+ return acksString.trim().equalsIgnoreCase("all") ? -1 : Integer.parseInt(acksString.trim());
} catch (NumberFormatException e) {
throw new ConfigException("Invalid configuration value for 'acks': " + acksString);
}
diff --git a/clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java b/clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java
index 1df55d98dc3..3a562ce9288 100644
--- a/clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java
+++ b/clients/src/main/java/org/apache/kafka/common/config/ConfigDef.java
@@ -24,6 +24,7 @@ import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
@@ -891,7 +892,7 @@ public class ConfigDef {
b.append(def.documentation);
b.append("");
b.append("");
- b.append(def.type.toString().toLowerCase());
+ b.append(def.type.toString().toLowerCase(Locale.ROOT));
b.append(" | ");
b.append("");
if (def.hasDefault()) {
@@ -908,7 +909,7 @@ public class ConfigDef {
b.append(def.validator != null ? def.validator.toString() : "");
b.append(" | ");
b.append("");
- b.append(def.importance.toString().toLowerCase());
+ b.append(def.importance.toString().toLowerCase(Locale.ROOT));
b.append(" | ");
b.append("\n");
}
@@ -937,7 +938,7 @@ public class ConfigDef {
b.append("\n\n");
}
b.append(" * Type: ");
- b.append(def.type.toString().toLowerCase());
+ b.append(def.type.toString().toLowerCase(Locale.ROOT));
b.append("\n");
if (def.defaultValue != null) {
b.append(" * Default: ");
@@ -951,7 +952,7 @@ public class ConfigDef {
b.append("\n");
}
b.append(" * Importance: ");
- b.append(def.importance.toString().toLowerCase());
+ b.append(def.importance.toString().toLowerCase(Locale.ROOT));
b.append("\n\n");
}
return b.toString();
diff --git a/clients/src/main/java/org/apache/kafka/common/metrics/stats/Rate.java b/clients/src/main/java/org/apache/kafka/common/metrics/stats/Rate.java
index 9dfc457b091..971b7b648bb 100644
--- a/clients/src/main/java/org/apache/kafka/common/metrics/stats/Rate.java
+++ b/clients/src/main/java/org/apache/kafka/common/metrics/stats/Rate.java
@@ -13,6 +13,7 @@
package org.apache.kafka.common.metrics.stats;
import java.util.List;
+import java.util.Locale;
import java.util.concurrent.TimeUnit;
import org.apache.kafka.common.metrics.MeasurableStat;
@@ -48,7 +49,7 @@ public class Rate implements MeasurableStat {
}
public String unitName() {
- return unit.name().substring(0, unit.name().length() - 2).toLowerCase();
+ return unit.name().substring(0, unit.name().length() - 2).toLowerCase(Locale.ROOT);
}
@Override
diff --git a/clients/src/main/java/org/apache/kafka/common/protocol/SecurityProtocol.java b/clients/src/main/java/org/apache/kafka/common/protocol/SecurityProtocol.java
index 905c67044dd..d5fbed71d22 100644
--- a/clients/src/main/java/org/apache/kafka/common/protocol/SecurityProtocol.java
+++ b/clients/src/main/java/org/apache/kafka/common/protocol/SecurityProtocol.java
@@ -21,6 +21,7 @@ import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.Set;
@@ -85,7 +86,7 @@ public enum SecurityProtocol {
/** Case insensitive lookup by protocol name */
public static SecurityProtocol forName(String name) {
- return SecurityProtocol.valueOf(name.toUpperCase());
+ return SecurityProtocol.valueOf(name.toUpperCase(Locale.ROOT));
}
/**
diff --git a/clients/src/test/java/org/apache/kafka/test/MockConsumerInterceptor.java b/clients/src/test/java/org/apache/kafka/test/MockConsumerInterceptor.java
index 0c187cb79a8..cff12a3e419 100644
--- a/clients/src/test/java/org/apache/kafka/test/MockConsumerInterceptor.java
+++ b/clients/src/test/java/org/apache/kafka/test/MockConsumerInterceptor.java
@@ -29,6 +29,7 @@ import org.apache.kafka.common.config.ConfigException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
+import java.util.Locale;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
@@ -59,7 +60,7 @@ public class MockConsumerInterceptor implements ConsumerInterceptor NoCompressionCodec
case GZIPCompressionCodec.name => GZIPCompressionCodec
case SnappyCompressionCodec.name => SnappyCompressionCodec
@@ -43,10 +45,10 @@ object BrokerCompressionCodec {
val brokerCompressionCodecs = List(UncompressedCodec, SnappyCompressionCodec, LZ4CompressionCodec, GZIPCompressionCodec, ProducerCompressionCodec)
val brokerCompressionOptions = brokerCompressionCodecs.map(codec => codec.name)
- def isValid(compressionType: String): Boolean = brokerCompressionOptions.contains(compressionType.toLowerCase())
+ def isValid(compressionType: String): Boolean = brokerCompressionOptions.contains(compressionType.toLowerCase(Locale.ROOT))
def getCompressionCodec(compressionType: String): CompressionCodec = {
- compressionType.toLowerCase match {
+ compressionType.toLowerCase(Locale.ROOT) match {
case UncompressedCodec.name => NoCompressionCodec
case _ => CompressionCodec.getCompressionCodec(compressionType)
}
diff --git a/core/src/main/scala/kafka/tools/ConsoleConsumer.scala b/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
index 50add72c977..e9a43f2bf42 100755
--- a/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
+++ b/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
@@ -349,9 +349,9 @@ class DefaultMessageFormatter extends MessageFormatter {
override def init(props: Properties) {
if (props.containsKey("print.timestamp"))
- printTimestamp = props.getProperty("print.timestamp").trim.toLowerCase.equals("true")
+ printTimestamp = props.getProperty("print.timestamp").trim.equalsIgnoreCase("true")
if (props.containsKey("print.key"))
- printKey = props.getProperty("print.key").trim.toLowerCase.equals("true")
+ printKey = props.getProperty("print.key").trim.equalsIgnoreCase("true")
if (props.containsKey("key.separator"))
keySeparator = props.getProperty("key.separator").getBytes
if (props.containsKey("line.separator"))
diff --git a/core/src/main/scala/kafka/tools/ConsoleProducer.scala b/core/src/main/scala/kafka/tools/ConsoleProducer.scala
index 0116a9666fd..e6476015f19 100644
--- a/core/src/main/scala/kafka/tools/ConsoleProducer.scala
+++ b/core/src/main/scala/kafka/tools/ConsoleProducer.scala
@@ -295,11 +295,11 @@ object ConsoleProducer {
override def init(inputStream: InputStream, props: Properties) {
topic = props.getProperty("topic")
if (props.containsKey("parse.key"))
- parseKey = props.getProperty("parse.key").trim.toLowerCase.equals("true")
+ parseKey = props.getProperty("parse.key").trim.equalsIgnoreCase("true")
if (props.containsKey("key.separator"))
keySeparator = props.getProperty("key.separator")
if (props.containsKey("ignore.error"))
- ignoreError = props.getProperty("ignore.error").trim.toLowerCase.equals("true")
+ ignoreError = props.getProperty("ignore.error").trim.equalsIgnoreCase("true")
reader = new BufferedReader(new InputStreamReader(inputStream))
}
diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala
index 673d84e33a4..026fbaedce3 100755
--- a/core/src/main/scala/kafka/utils/Log4jController.scala
+++ b/core/src/main/scala/kafka/utils/Log4jController.scala
@@ -20,6 +20,7 @@ package kafka.utils
import org.apache.log4j.{Logger, Level, LogManager}
import java.util
+import java.util.Locale
object Log4jController {
@@ -81,7 +82,7 @@ private class Log4jController extends Log4jControllerMBean {
def setLogLevel(loggerName: String, level: String) = {
val log = newLogger(loggerName)
if (!loggerName.trim.isEmpty && !level.trim.isEmpty && log != null) {
- log.setLevel(Level.toLevel(level.toUpperCase))
+ log.setLevel(Level.toLevel(level.toUpperCase(Locale.ROOT)))
true
}
else false
diff --git a/core/src/main/scala/kafka/utils/Os.scala b/core/src/main/scala/kafka/utils/Os.scala
index 6574f08d918..0100a0ab696 100644
--- a/core/src/main/scala/kafka/utils/Os.scala
+++ b/core/src/main/scala/kafka/utils/Os.scala
@@ -17,7 +17,9 @@
package kafka.utils
+import java.util.Locale
+
object Os {
- val name = System.getProperty("os.name").toLowerCase
+ val name = System.getProperty("os.name").toLowerCase(Locale.ROOT)
val isWindows = name.startsWith("windows")
}
\ No newline at end of file
diff --git a/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala
index 349f7ad5077..8dbb80be531 100644
--- a/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala
+++ b/core/src/test/scala/integration/kafka/api/PlaintextConsumerTest.scala
@@ -35,6 +35,7 @@ import org.junit.Test
import scala.collection.JavaConverters._
import scala.collection.mutable.Buffer
+import java.util.Locale
/* We have some tests in this class instead of `BaseConsumerTest` in order to keep the build time under control. */
class PlaintextConsumerTest extends BaseConsumerTest {
@@ -606,7 +607,7 @@ class PlaintextConsumerTest extends BaseConsumerTest {
for (i <- 0 until numRecords) {
val record = records.get(i)
assertEquals(s"key $i", new String(record.key()))
- assertEquals(s"value $i$appendStr".toUpperCase, new String(record.value()))
+ assertEquals(s"value $i$appendStr".toUpperCase(Locale.ROOT), new String(record.value()))
}
// commit sync and verify onCommit is called
diff --git a/examples/src/main/java/kafka/examples/KafkaConsumerProducerDemo.java b/examples/src/main/java/kafka/examples/KafkaConsumerProducerDemo.java
index 414a6f7deea..c1541bfc627 100644
--- a/examples/src/main/java/kafka/examples/KafkaConsumerProducerDemo.java
+++ b/examples/src/main/java/kafka/examples/KafkaConsumerProducerDemo.java
@@ -18,7 +18,7 @@ package kafka.examples;
public class KafkaConsumerProducerDemo {
public static void main(String[] args) {
- boolean isAsync = args.length == 0 || !args[0].trim().toLowerCase().equals("sync");
+ boolean isAsync = args.length == 0 || !args[0].trim().equalsIgnoreCase("sync");
Producer producerThread = new Producer(KafkaProperties.TOPIC, isAsync);
producerThread.start();
diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountDemo.java
index c12977f8d43..5b52803f807 100644
--- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountDemo.java
+++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountDemo.java
@@ -29,6 +29,7 @@ import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.apache.kafka.streams.kstream.ValueMapper;
import java.util.Arrays;
+import java.util.Locale;
import java.util.Properties;
/**
@@ -63,7 +64,7 @@ public class WordCountDemo {
.flatMapValues(new ValueMapper>() {
@Override
public Iterable apply(String value) {
- return Arrays.asList(value.toLowerCase().split(" "));
+ return Arrays.asList(value.toLowerCase(Locale.getDefault()).split(" "));
}
}).map(new KeyValueMapper>() {
@Override
diff --git a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java
index a5cddfd005e..34c35b7c2fc 100644
--- a/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java
+++ b/streams/examples/src/main/java/org/apache/kafka/streams/examples/wordcount/WordCountProcessorDemo.java
@@ -30,6 +30,7 @@ import org.apache.kafka.streams.state.KeyValueIterator;
import org.apache.kafka.streams.state.KeyValueStore;
import org.apache.kafka.streams.state.Stores;
+import java.util.Locale;
import java.util.Properties;
/**
@@ -63,7 +64,7 @@ public class WordCountProcessorDemo {
@Override
public void process(String dummy, String line) {
- String[] words = line.toLowerCase().split(" ");
+ String[] words = line.toLowerCase(Locale.getDefault()).split(" ");
for (String word : words) {
Integer oldValue = this.kvStore.get(word);
diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamFlatMapValuesTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamFlatMapValuesTest.java
index a904cb15c62..63f56364f57 100644
--- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamFlatMapValuesTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamFlatMapValuesTest.java
@@ -26,6 +26,7 @@ import org.apache.kafka.test.MockProcessorSupplier;
import org.junit.Test;
import java.util.ArrayList;
+import java.util.Locale;
import static org.junit.Assert.assertEquals;
@@ -42,7 +43,7 @@ public class KStreamFlatMapValuesTest {
@Override
public Iterable apply(String value) {
ArrayList result = new ArrayList();
- result.add(value.toLowerCase());
+ result.add(value.toLowerCase(Locale.ROOT));
result.add(value);
return result;
}
diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamForeachTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamForeachTest.java
index 65737790bcf..d0a182d8159 100644
--- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamForeachTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KStreamForeachTest.java
@@ -26,6 +26,7 @@ import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.test.KStreamTestDriver;
import org.junit.Test;
import java.util.List;
+import java.util.Locale;
import java.util.ArrayList;
import java.util.Arrays;
@@ -60,7 +61,7 @@ public class KStreamForeachTest {
new ForeachAction() {
@Override
public void apply(Integer key, String value) {
- actualRecords.add(new KeyValue<>(key * 2, value.toUpperCase()));
+ actualRecords.add(new KeyValue<>(key * 2, value.toUpperCase(Locale.ROOT)));
}
};
diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableForeachTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableForeachTest.java
index 4b612a52114..27a51146070 100644
--- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableForeachTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/KTableForeachTest.java
@@ -26,6 +26,7 @@ import org.apache.kafka.streams.kstream.KTable;
import org.apache.kafka.test.KStreamTestDriver;
import org.junit.Test;
import java.util.List;
+import java.util.Locale;
import java.util.ArrayList;
import java.util.Arrays;
@@ -60,7 +61,7 @@ public class KTableForeachTest {
new ForeachAction() {
@Override
public void apply(Integer key, String value) {
- actualRecords.add(new KeyValue<>(key * 2, value.toUpperCase()));
+ actualRecords.add(new KeyValue<>(key * 2, value.toUpperCase(Locale.ROOT)));
}
};