MINOR: replace hard-coding utf-8 with StandardCharsets.UTF_8 (#10079)

Reviewers: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
dengziming 2021-02-09 10:06:01 +08:00 committed by GitHub
parent 1f508ea8c3
commit 3769bc21b5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 49 additions and 39 deletions

View File

@ -19,6 +19,7 @@ package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
@ -26,7 +27,7 @@ import java.util.Map;
* value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
*/
public class StringDeserializer implements Deserializer<String> {
private String encoding = "UTF8";
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {

View File

@ -19,6 +19,7 @@ package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
@ -26,7 +27,7 @@ import java.util.Map;
* value.serializer.encoding or serializer.encoding. The first two take precedence over the last.
*/
public class StringSerializer implements Serializer<String> {
private String encoding = "UTF8";
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {

View File

@ -19,6 +19,7 @@ package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.UUID;
@ -27,7 +28,7 @@ import java.util.UUID;
* the property key.deserializer.encoding, value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
*/
public class UUIDDeserializer implements Deserializer<UUID> {
private String encoding = "UTF8";
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {

View File

@ -19,6 +19,7 @@ package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.UUID;
@ -27,7 +28,7 @@ import java.util.UUID;
* the property key.deserializer.encoding, value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
*/
public class UUIDSerializer implements Serializer<UUID> {
private String encoding = "UTF8";
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {

View File

@ -2096,7 +2096,7 @@ public class SaslAuthenticatorTest {
private void authenticateUsingSaslPlainAndCheckConnection(String node, boolean enableSaslAuthenticateHeader) throws Exception {
// Authenticate using PLAIN username/password
String authString = "\u0000" + TestJaasConfig.USERNAME + "\u0000" + TestJaasConfig.PASSWORD;
ByteBuffer authBuf = ByteBuffer.wrap(authString.getBytes("UTF-8"));
ByteBuffer authBuf = ByteBuffer.wrap(Utils.utf8(authString));
if (enableSaslAuthenticateHeader) {
SaslAuthenticateRequestData data = new SaslAuthenticateRequestData().setAuthBytes(authBuf.array());
SaslAuthenticateRequest request = new SaslAuthenticateRequest.Builder(data).build();

View File

@ -21,6 +21,7 @@ import org.apache.kafka.common.utils.Bytes;
import org.junit.jupiter.api.Test;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@ -92,7 +93,7 @@ public class SerializationTest {
@Test
public void stringSerdeShouldSupportDifferentEncodings() {
String str = "my string";
List<String> encodings = Arrays.asList("UTF8", "UTF-16");
List<String> encodings = Arrays.asList(StandardCharsets.UTF_8.name(), StandardCharsets.UTF_16.name());
for (String encoding : encodings) {
try (Serde<String> serDeser = getStringSerde(encoding)) {

View File

@ -21,6 +21,7 @@ import org.apache.kafka.common.config.ConfigDef.Importance;
import org.apache.kafka.common.config.ConfigDef.Type;
import org.apache.kafka.common.config.ConfigDef.Width;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
@ -29,7 +30,7 @@ import java.util.Map;
public class StringConverterConfig extends ConverterConfig {
public static final String ENCODING_CONFIG = "converter.encoding";
public static final String ENCODING_DEFAULT = "UTF8";
public static final String ENCODING_DEFAULT = StandardCharsets.UTF_8.name();
private static final String ENCODING_DOC = "The name of the Java character set to use for encoding strings as byte arrays.";
private static final String ENCODING_DISPLAY = "Encoding";

View File

@ -16,15 +16,16 @@
*/
package org.apache.kafka.connect.storage;
import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.data.SchemaAndValue;
import org.junit.jupiter.api.Test;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
public class StringConverterTest {
@ -34,13 +35,13 @@ public class StringConverterTest {
private StringConverter converter = new StringConverter();
@Test
public void testStringToBytes() throws UnsupportedEncodingException {
assertArrayEquals(SAMPLE_STRING.getBytes("UTF8"), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING));
public void testStringToBytes() {
assertArrayEquals(Utils.utf8(SAMPLE_STRING), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING));
}
@Test
public void testNonStringToBytes() throws UnsupportedEncodingException {
assertArrayEquals("true".getBytes("UTF8"), converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true));
public void testNonStringToBytes() {
assertArrayEquals(Utils.utf8("true"), converter.fromConnectData(TOPIC, Schema.BOOLEAN_SCHEMA, true));
}
@Test
@ -49,14 +50,14 @@ public class StringConverterTest {
}
@Test
public void testToBytesIgnoresSchema() throws UnsupportedEncodingException {
assertArrayEquals("true".getBytes("UTF8"), converter.fromConnectData(TOPIC, null, true));
public void testToBytesIgnoresSchema() {
assertArrayEquals(Utils.utf8("true"), converter.fromConnectData(TOPIC, null, true));
}
@Test
public void testToBytesNonUtf8Encoding() throws UnsupportedEncodingException {
converter.configure(Collections.singletonMap("converter.encoding", "UTF-16"), true);
assertArrayEquals(SAMPLE_STRING.getBytes("UTF-16"), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING));
public void testToBytesNonUtf8Encoding() {
converter.configure(Collections.singletonMap("converter.encoding", StandardCharsets.UTF_16.name()), true);
assertArrayEquals(SAMPLE_STRING.getBytes(StandardCharsets.UTF_16), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING));
}
@Test
@ -74,9 +75,9 @@ public class StringConverterTest {
}
@Test
public void testBytesToStringNonUtf8Encoding() throws UnsupportedEncodingException {
converter.configure(Collections.singletonMap("converter.encoding", "UTF-16"), true);
SchemaAndValue data = converter.toConnectData(TOPIC, SAMPLE_STRING.getBytes("UTF-16"));
public void testBytesToStringNonUtf8Encoding() {
converter.configure(Collections.singletonMap("converter.encoding", StandardCharsets.UTF_16.name()), true);
SchemaAndValue data = converter.toConnectData(TOPIC, SAMPLE_STRING.getBytes(StandardCharsets.UTF_16));
assertEquals(Schema.OPTIONAL_STRING_SCHEMA, data.schema());
assertEquals(SAMPLE_STRING, data.value());
}
@ -85,13 +86,13 @@ public class StringConverterTest {
// The following simply verify that the delegation works.
@Test
public void testStringHeaderValueToBytes() throws UnsupportedEncodingException {
assertArrayEquals(SAMPLE_STRING.getBytes("UTF8"), converter.fromConnectHeader(TOPIC, "hdr", Schema.STRING_SCHEMA, SAMPLE_STRING));
public void testStringHeaderValueToBytes() {
assertArrayEquals(Utils.utf8(SAMPLE_STRING), converter.fromConnectHeader(TOPIC, "hdr", Schema.STRING_SCHEMA, SAMPLE_STRING));
}
@Test
public void testNonStringHeaderValueToBytes() throws UnsupportedEncodingException {
assertArrayEquals("true".getBytes("UTF8"), converter.fromConnectHeader(TOPIC, "hdr", Schema.BOOLEAN_SCHEMA, true));
public void testNonStringHeaderValueToBytes() {
assertArrayEquals(Utils.utf8("true"), converter.fromConnectHeader(TOPIC, "hdr", Schema.BOOLEAN_SCHEMA, true));
}
@Test

View File

@ -36,7 +36,6 @@ import org.junit.jupiter.api.Test;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URISyntaxException;
@ -128,8 +127,8 @@ public class JsonConverterTest {
@Test
public void bytesToConnect() throws UnsupportedEncodingException {
ByteBuffer reference = ByteBuffer.wrap("test-string".getBytes("UTF-8"));
public void bytesToConnect() {
ByteBuffer reference = ByteBuffer.wrap(Utils.utf8("test-string"));
String msg = "{ \"schema\": { \"type\": \"bytes\" }, \"payload\": \"dGVzdC1zdHJpbmc=\" }";
SchemaAndValue schemaAndValue = converter.toConnectData(TOPIC, msg.getBytes());
ByteBuffer converted = ByteBuffer.wrap((byte[]) schemaAndValue.value());

View File

@ -646,7 +646,7 @@ public abstract class AbstractHerder implements Herder, TaskStatus.Listener, Con
ByteArrayOutputStream output = new ByteArrayOutputStream();
try {
t.printStackTrace(new PrintStream(output, false, StandardCharsets.UTF_8.name()));
return output.toString("UTF-8");
return output.toString(StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
return null;
}

View File

@ -183,7 +183,7 @@ public class DeadLetterQueueReporter implements ErrorReporter {
private byte[] stacktrace(Throwable error) {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
PrintStream stream = new PrintStream(bos, true, "UTF-8");
PrintStream stream = new PrintStream(bos, true, StandardCharsets.UTF_8.name());
error.printStackTrace(stream);
bos.close();
return bos.toByteArray();

View File

@ -470,7 +470,7 @@ public class RestServerTest {
private String executePut(String host, int port, String endpoint, String jsonBody) throws IOException {
HttpPut request = new HttpPut(endpoint);
StringEntity entity = new StringEntity(jsonBody, "UTF-8");
StringEntity entity = new StringEntity(jsonBody, StandardCharsets.UTF_8.name());
entity.setContentType("application/json");
request.setEntity(entity);
CloseableHttpClient httpClient = HttpClients.createMinimal();

View File

@ -18,6 +18,7 @@
package kafka.serializer
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import kafka.utils.VerifiableProperties
@ -44,9 +45,9 @@ class DefaultDecoder(props: VerifiableProperties = null) extends Decoder[Array[B
class StringDecoder(props: VerifiableProperties = null) extends Decoder[String] {
val encoding =
if(props == null)
"UTF8"
StandardCharsets.UTF_8.name()
else
props.getString("serializer.encoding", "UTF8")
props.getString("serializer.encoding", StandardCharsets.UTF_8.name())
def fromBytes(bytes: Array[Byte]): String = {
new String(bytes, encoding)

View File

@ -19,6 +19,8 @@
import org.gradle.api.internal.project.IsolatedAntBuilder
import java.nio.charset.StandardCharsets
apply plugin: RatPlugin
class RatTask extends DefaultTask {
@ -83,7 +85,7 @@ class RatTask extends DefaultTask {
}
def origEncoding = System.getProperty("file.encoding")
try {
System.setProperty("file.encoding", "UTF-8") //affects the output of the ant rat task
System.setProperty("file.encoding", StandardCharsets.UTF_8.name()) //affects the output of the ant rat task
generateXmlReport(reportDir)
printUnknownFiles()
generateHtmlReport()

View File

@ -37,6 +37,7 @@ import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@ -81,8 +82,8 @@ public class StreamsConfigTest {
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put("key.deserializer.encoding", "UTF8");
props.put("value.deserializer.encoding", "UTF-16");
props.put("key.deserializer.encoding", StandardCharsets.UTF_8.name());
props.put("value.deserializer.encoding", StandardCharsets.UTF_16.name());
streamsConfig = new StreamsConfig(props);
}
@ -208,8 +209,8 @@ public class StreamsConfigTest {
@Test
public void defaultSerdeShouldBeConfigured() {
final Map<String, Object> serializerConfigs = new HashMap<>();
serializerConfigs.put("key.serializer.encoding", "UTF8");
serializerConfigs.put("value.serializer.encoding", "UTF-16");
serializerConfigs.put("key.serializer.encoding", StandardCharsets.UTF_8.name());
serializerConfigs.put("value.serializer.encoding", StandardCharsets.UTF_16.name());
final Serializer<String> serializer = Serdes.String().serializer();
final String str = "my string for testing";

View File

@ -79,7 +79,7 @@ public class ProducerPerformance {
throw new IllegalArgumentException("File does not exist or empty file provided.");
}
String[] payloadList = new String(Files.readAllBytes(path), "UTF-8").split(payloadDelimiter);
String[] payloadList = new String(Files.readAllBytes(path), StandardCharsets.UTF_8).split(payloadDelimiter);
System.out.println("Number of messages read: " + payloadList.length);