diff --git a/clients/src/main/java/org/apache/kafka/common/header/internals/RecordHeaders.java b/clients/src/main/java/org/apache/kafka/common/header/internals/RecordHeaders.java index e6f05c1ab68..7137f723d8f 100644 --- a/clients/src/main/java/org/apache/kafka/common/header/internals/RecordHeaders.java +++ b/clients/src/main/java/org/apache/kafka/common/header/internals/RecordHeaders.java @@ -109,7 +109,7 @@ public class RecordHeaders implements Headers { } public Header[] toArray() { - return headers.isEmpty() ? Record.EMPTY_HEADERS : headers.toArray(new Header[headers.size()]); + return headers.isEmpty() ? Record.EMPTY_HEADERS : headers.toArray(new Header[0]); } private void checkKey(String key) { diff --git a/clients/src/main/java/org/apache/kafka/common/metrics/stats/Frequencies.java b/clients/src/main/java/org/apache/kafka/common/metrics/stats/Frequencies.java index a3d7d25a6e1..36daea6ca70 100644 --- a/clients/src/main/java/org/apache/kafka/common/metrics/stats/Frequencies.java +++ b/clients/src/main/java/org/apache/kafka/common/metrics/stats/Frequencies.java @@ -63,7 +63,7 @@ public class Frequencies extends SampledStat implements CompoundStat { if (frequencies.isEmpty()) { throw new IllegalArgumentException("Must specify at least one metric name"); } - Frequency[] frequencyArray = frequencies.toArray(new Frequency[frequencies.size()]); + Frequency[] frequencyArray = frequencies.toArray(new Frequency[0]); return new Frequencies(2, 0.0, 1.0, frequencyArray); } diff --git a/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslClient.java b/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslClient.java index 6ee2e31d90f..ac486b7bf41 100644 --- a/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslClient.java +++ b/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslClient.java @@ -241,7 +241,7 @@ public class ScramSaslClient implements SaslClient { @Override public String[] getMechanismNames(Map props) { Collection mechanisms = ScramMechanism.mechanismNames(); - return mechanisms.toArray(new String[mechanisms.size()]); + return mechanisms.toArray(new String[0]); } } } diff --git a/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslServer.java b/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslServer.java index f9bce80360e..1cc5b89309d 100644 --- a/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslServer.java +++ b/clients/src/main/java/org/apache/kafka/common/security/scram/internals/ScramSaslServer.java @@ -259,7 +259,7 @@ public class ScramSaslServer implements SaslServer { @Override public String[] getMechanismNames(Map props) { Collection mechanisms = ScramMechanism.mechanismNames(); - return mechanisms.toArray(new String[mechanisms.size()]); + return mechanisms.toArray(new String[0]); } } } diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/RestServer.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/RestServer.java index 408f72e1dd9..136c616a410 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/RestServer.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/RestServer.java @@ -133,7 +133,7 @@ public class RestServer { } } - jettyServer.setConnectors(connectors.toArray(new Connector[connectors.size()])); + jettyServer.setConnectors(connectors.toArray(new Connector[0])); if (adminListeners != null && !adminListeners.isEmpty()) { for (String adminListener : adminListeners) { @@ -300,7 +300,7 @@ public class RestServer { contextHandlers.add(new DefaultHandler()); contextHandlers.add(requestLogHandler); - handlers.setHandlers(contextHandlers.toArray(new Handler[]{})); + handlers.setHandlers(contextHandlers.toArray(new Handler[0])); try { context.start(); } catch (Exception e) { diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java index 6b391d96ada..bf22bb6d670 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/util/SSLUtils.java @@ -121,7 +121,7 @@ public class SSLUtils { @SuppressWarnings("unchecked") protected static void configureSslContextFactoryAlgorithms(SslContextFactory ssl, Map sslConfigValues) { List sslEnabledProtocols = (List) getOrDefault(sslConfigValues, SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG, Arrays.asList(COMMA_WITH_WHITESPACE.split(SslConfigs.DEFAULT_SSL_ENABLED_PROTOCOLS))); - ssl.setIncludeProtocols(sslEnabledProtocols.toArray(new String[sslEnabledProtocols.size()])); + ssl.setIncludeProtocols(sslEnabledProtocols.toArray(new String[0])); String sslProvider = (String) sslConfigValues.get(SslConfigs.SSL_PROVIDER_CONFIG); if (sslProvider != null) @@ -131,7 +131,7 @@ public class SSLUtils { List sslCipherSuites = (List) sslConfigValues.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG); if (sslCipherSuites != null) - ssl.setIncludeCipherSuites(sslCipherSuites.toArray(new String[sslCipherSuites.size()])); + ssl.setIncludeCipherSuites(sslCipherSuites.toArray(new String[0])); ssl.setKeyManagerFactoryAlgorithm((String) getOrDefault(sslConfigValues, SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG, SslConfigs.DEFAULT_SSL_KEYMANGER_ALGORITHM)); diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableAbstractJoinValueGetterSupplier.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableAbstractJoinValueGetterSupplier.java index f2de67f4385..924452ddfd3 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableAbstractJoinValueGetterSupplier.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableAbstractJoinValueGetterSupplier.java @@ -37,7 +37,7 @@ public abstract class KTableKTableAbstractJoinValueGetterSupplier final Set stores = new HashSet<>(storeNames1.length + storeNames2.length); Collections.addAll(stores, storeNames1); Collections.addAll(stores, storeNames2); - return stores.toArray(new String[stores.size()]); + return stores.toArray(new String[0]); } } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableJoinMerger.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableJoinMerger.java index c669fb219f7..3ca0160d242 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableJoinMerger.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/KTableKTableJoinMerger.java @@ -70,7 +70,7 @@ public class KTableKTableJoinMerger implements KTableProcessorSupplier stores = new HashSet<>(storeNames1.length + storeNames2.length); Collections.addAll(stores, storeNames1); Collections.addAll(stores, storeNames2); - return stores.toArray(new String[stores.size()]); + return stores.toArray(new String[0]); } }; } diff --git a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/graph/StreamSourceNode.java b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/graph/StreamSourceNode.java index fa585f16650..f4f9842995e 100644 --- a/streams/src/main/java/org/apache/kafka/streams/kstream/internals/graph/StreamSourceNode.java +++ b/streams/src/main/java/org/apache/kafka/streams/kstream/internals/graph/StreamSourceNode.java @@ -84,7 +84,7 @@ public class StreamSourceNode extends SourceGraphNode { consumedInternal().timestampExtractor(), consumedInternal().keyDeserializer(), consumedInternal().valueDeserializer(), - topicNames().toArray(new String[topicNames().size()])); + topicNames().toArray(new String[0])); } } diff --git a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java index f6e06e421c5..80e9a347851 100644 --- a/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/kstream/internals/TimeWindowTest.java @@ -128,7 +128,7 @@ public class TimeWindowTest { final TimeWindows windows = TimeWindows.of(ofMillis(12L)).advanceBy(ofMillis(5L)); final Map matched = windows.windowsFor(21L); - final Long[] expected = matched.keySet().toArray(new Long[matched.size()]); + final Long[] expected = matched.keySet().toArray(new Long[0]); assertEquals(expected[0].longValue(), 10L); assertEquals(expected[1].longValue(), 15L); assertEquals(expected[2].longValue(), 20L); diff --git a/tools/src/main/java/org/apache/kafka/trogdor/coordinator/CoordinatorClient.java b/tools/src/main/java/org/apache/kafka/trogdor/coordinator/CoordinatorClient.java index ba40e7bc0d6..078dbbcd158 100644 --- a/tools/src/main/java/org/apache/kafka/trogdor/coordinator/CoordinatorClient.java +++ b/tools/src/main/java/org/apache/kafka/trogdor/coordinator/CoordinatorClient.java @@ -174,7 +174,7 @@ public class CoordinatorClient { public TasksResponse tasks(TasksRequest request) throws Exception { UriBuilder uriBuilder = UriBuilder.fromPath(url("/coordinator/tasks")); - uriBuilder.queryParam("taskId", (Object[]) request.taskIds().toArray(new String[0])); + uriBuilder.queryParam("taskId", request.taskIds().toArray(new Object[0])); uriBuilder.queryParam("firstStartMs", request.firstStartMs()); uriBuilder.queryParam("lastStartMs", request.lastStartMs()); uriBuilder.queryParam("firstEndMs", request.firstEndMs()); diff --git a/tools/src/main/java/org/apache/kafka/trogdor/fault/DegradedNetworkFaultWorker.java b/tools/src/main/java/org/apache/kafka/trogdor/fault/DegradedNetworkFaultWorker.java index 292c18ded45..d071d12a845 100644 --- a/tools/src/main/java/org/apache/kafka/trogdor/fault/DegradedNetworkFaultWorker.java +++ b/tools/src/main/java/org/apache/kafka/trogdor/fault/DegradedNetworkFaultWorker.java @@ -109,19 +109,19 @@ public class DegradedNetworkFaultWorker implements TaskWorker { List delay = new ArrayList<>(); rootHandler(networkDevice, delay::add); netemDelay(delayMs, deviationMs, delay::add); - platform.runCommand(delay.toArray(new String[]{})); + platform.runCommand(delay.toArray(new String[0])); if (rateLimitKbps > 0) { List rate = new ArrayList<>(); childHandler(networkDevice, rate::add); tbfRate(rateLimitKbps, rate::add); - platform.runCommand(rate.toArray(new String[]{})); + platform.runCommand(rate.toArray(new String[0])); } } else if (rateLimitKbps > 0) { List rate = new ArrayList<>(); rootHandler(networkDevice, rate::add); tbfRate(rateLimitKbps, rate::add); - platform.runCommand(rate.toArray(new String[]{})); + platform.runCommand(rate.toArray(new String[0])); } else { log.warn("Not applying any rate limiting or latency"); }