MINOR: use enum map for error counts map (#19314)

Java provides a specialised Map where Enums are the keys, which can
provide some performance improvements.

https://docs.oracle.com/javase/8/docs/api/java/util/EnumMap.html

I have updated the Java code where possible to use an EnumMap rather
than a HashMap and run the unit tests under the requests directory.

Reviewers: Matthias J. Sax <matthias@confluent.io>, Lianet Magrans
<lmagrans@confluent.io>, Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
lorcan 2025-04-08 19:01:02 +01:00 committed by GitHub
parent dcf6f9d4c9
commit 434b0d39ae
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
55 changed files with 112 additions and 100 deletions

View File

@ -25,7 +25,7 @@ import org.apache.kafka.common.protocol.SendBuilder;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -71,14 +71,14 @@ public abstract class AbstractResponse implements AbstractRequestResponse {
}
protected static Map<Errors, Integer> errorCounts(Collection<Errors> errors) {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
for (Errors error : errors)
updateErrorCounts(errorCounts, error);
return errorCounts;
}
protected static Map<Errors, Integer> apiErrorCounts(Map<?, ApiError> errors) {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
for (ApiError apiError : errors.values())
updateErrorCounts(errorCounts, apiError.error());
return errorCounts;

View File

@ -27,6 +27,7 @@ import org.apache.kafka.common.quota.ClientQuotaEntity;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -74,7 +75,7 @@ public class AlterClientQuotasResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.entries().forEach(entry ->
updateErrorCounts(counts, Errors.forCode(entry.errorCode()))
);

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class AlterPartitionReassignmentsResponse extends AbstractResponse {
@ -62,7 +62,7 @@ public class AlterPartitionReassignmentsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
data.responses().forEach(topicResponse ->

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class AlterPartitionResponse extends AbstractResponse {
@ -42,7 +42,7 @@ public class AlterPartitionResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
data.topics().forEach(topicResponse -> topicResponse.partitions().forEach(partitionResponse ->
updateErrorCounts(counts, Errors.forCode(partitionResponse.errorCode()))

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -60,7 +60,7 @@ public class AlterReplicaLogDirsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.results().forEach(topicResult ->
topicResult.partitions().forEach(partitionResult ->
updateErrorCounts(errorCounts, Errors.forCode(partitionResult.errorCode()))));

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class AlterShareGroupOffsetsResponse extends AbstractResponse {
@ -37,7 +37,7 @@ public class AlterShareGroupOffsetsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.responses().forEach(topic -> topic.partitions().forEach(partitionResponse ->
updateErrorCounts(counts, Errors.forCode(partitionResponse.errorCode()))
));

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -49,7 +49,7 @@ public class BeginQuorumEpochResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new HashMap<>();
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
errors.put(Errors.forCode(data.errorCode()), 1);

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class BrokerHeartbeatResponse extends AbstractResponse {
@ -51,7 +51,7 @@ public class BrokerHeartbeatResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
errorCounts.put(Errors.forCode(data.errorCode()), 1);
return errorCounts;
}

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class BrokerRegistrationResponse extends AbstractResponse {
@ -51,7 +51,7 @@ public class BrokerRegistrationResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
errorCounts.put(Errors.forCode(data.errorCode()), 1);
return errorCounts;
}

View File

@ -22,7 +22,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -53,7 +53,7 @@ public class ConsumerGroupDescribeResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.groups().forEach(
group -> updateErrorCounts(counts, Errors.forCode(group.errorCode()))
);

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class CreatePartitionsResponse extends AbstractResponse {
@ -42,7 +42,7 @@ public class CreatePartitionsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.results().forEach(result ->
updateErrorCounts(counts, Errors.forCode(result.errorCode()))
);

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class CreateTopicsResponse extends AbstractResponse {
@ -67,7 +67,7 @@ public class CreateTopicsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.topics().forEach(result ->
updateErrorCounts(counts, Errors.forCode(result.errorCode()))
);

View File

@ -23,6 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
@ -69,7 +70,7 @@ public class DeleteGroupsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.results().forEach(result ->
updateErrorCounts(counts, Errors.forCode(result.errorCode()))
);

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class DeleteRecordsResponse extends AbstractResponse {
@ -63,7 +63,7 @@ public class DeleteRecordsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.topics().forEach(topicResponses ->
topicResponses.partitions().forEach(response ->
updateErrorCounts(errorCounts, Errors.forCode(response.errorCode()))

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class DeleteShareGroupOffsetsResponse extends AbstractResponse {
@ -41,7 +41,7 @@ public class DeleteShareGroupOffsetsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.responses().forEach(
topicResult -> topicResult.partitions().forEach(
partitionResult -> updateErrorCounts(counts, Errors.forCode(partitionResult.errorCode()))

View File

@ -22,7 +22,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
@ -62,7 +62,7 @@ public class DeleteTopicsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.responses().forEach(result ->
updateErrorCounts(counts, Errors.forCode(result.errorCode()))
);

View File

@ -25,7 +25,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
import java.util.Objects;
import java.util.function.Function;
@ -241,7 +241,7 @@ public class DescribeConfigsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.results().forEach(response ->
updateErrorCounts(errorCounts, Errors.forCode(response.errorCode()))
);

View File

@ -26,7 +26,7 @@ import org.apache.kafka.common.utils.Utils;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@ -139,7 +139,7 @@ public class DescribeGroupsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.groups().forEach(describedGroup ->
updateErrorCounts(errorCounts, Errors.forCode(describedGroup.errorCode())));
return errorCounts;

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
@ -56,7 +56,7 @@ public class DescribeLogDirsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
errorCounts.put(Errors.forCode(data.errorCode()), 1);
data.results().forEach(result ->
updateErrorCounts(errorCounts, Errors.forCode(result.errorCode()))

View File

@ -24,7 +24,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -48,7 +48,7 @@ public class DescribeQuorumResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new HashMap<>();
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
errors.put(Errors.forCode(data.errorCode()), 1);

View File

@ -25,6 +25,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -77,7 +78,7 @@ public class DescribeShareGroupOffsetsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
groupLevelErrors.values().forEach(exception -> updateErrorCounts(counts, Errors.forException(exception)));
for (DescribeShareGroupOffsetsResponseGroup group : data.groups()) {
group.topics().forEach(topic ->

View File

@ -26,7 +26,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@ -61,7 +61,7 @@ public class DescribeTopicPartitionsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.topics().forEach(topicResponse -> {
topicResponse.partitions().forEach(p -> updateErrorCounts(errorCounts, Errors.forCode(p.errorCode())));
updateErrorCounts(errorCounts, Errors.forCode(topicResponse.errorCode()));

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class DescribeTransactionsResponse extends AbstractResponse {
@ -41,7 +41,7 @@ public class DescribeTransactionsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
for (TransactionState transactionState : data.transactionStates()) {
Errors error = Errors.forCode(transactionState.errorCode());
updateErrorCounts(errorCounts, error);

View File

@ -25,6 +25,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -69,7 +70,7 @@ public class ElectLeadersResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
data.replicaElectionResults().forEach(result ->
result.partitionResult().forEach(partitionResult ->

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -49,7 +49,7 @@ public class EndQuorumEpochResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new HashMap<>();
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
errors.put(Errors.forCode(data.errorCode()), 1);

View File

@ -31,7 +31,7 @@ import org.apache.kafka.common.record.Records;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
@ -129,7 +129,7 @@ public class FetchResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
updateErrorCounts(errorCounts, error());
data.responses().forEach(topicResponse ->
topicResponse.partitions().forEach(partition ->

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
import java.util.Optional;
@ -37,7 +37,7 @@ public final class FetchSnapshotResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new HashMap<>();
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
Errors topLevelError = Errors.forCode(data.errorCode());
if (topLevelError != Errors.NONE) {

View File

@ -26,7 +26,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -98,7 +98,7 @@ public class FindCoordinatorResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
if (!data.coordinators().isEmpty()) {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
for (Coordinator coordinator : data.coordinators()) {
updateErrorCounts(errorCounts, Errors.forCode(coordinator.errorCode()));
}

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class GetTelemetrySubscriptionsResponse extends AbstractResponse {
@ -42,7 +42,7 @@ public class GetTelemetrySubscriptionsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
return counts;
}

View File

@ -26,6 +26,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -73,7 +74,7 @@ public class IncrementalAlterConfigsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.responses().forEach(response ->
updateErrorCounts(counts, Errors.forCode(response.errorCode()))
);

View File

@ -26,7 +26,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
@ -45,7 +45,7 @@ public class InitializeShareGroupStateResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.results().forEach(
result -> result.partitions().forEach(
partitionResult -> updateErrorCounts(counts, Errors.forCode(partitionResult.errorCode()))

View File

@ -24,7 +24,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -133,7 +133,7 @@ public class LeaveGroupResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> combinedErrorCounts = new HashMap<>();
Map<Errors, Integer> combinedErrorCounts = new EnumMap<>(Errors.class);
// Top level error.
updateErrorCounts(combinedErrorCounts, Errors.forCode(data.errorCode()));

View File

@ -27,7 +27,7 @@ import org.apache.kafka.common.record.RecordBatch;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
@ -80,7 +80,7 @@ public class ListOffsetsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
topics().forEach(topic ->
topic.partitions().forEach(partition ->
updateErrorCounts(errorCounts, Errors.forCode(partition.errorCode()))

View File

@ -22,7 +22,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class ListTransactionsResponse extends AbstractResponse {
@ -39,7 +39,7 @@ public class ListTransactionsResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
updateErrorCounts(errorCounts, Errors.forCode(data.errorCode()));
return errorCounts;
}

View File

@ -33,6 +33,7 @@ import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
@ -126,7 +127,7 @@ public class MetadataResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.topics().forEach(metadata -> {
metadata.partitions().forEach(p -> updateErrorCounts(errorCounts, Errors.forCode(p.errorCode())));
updateErrorCounts(errorCounts, Errors.forCode(metadata.errorCode()));

View File

@ -24,7 +24,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
@ -140,7 +140,7 @@ public class OffsetDeleteResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
data.topics().forEach(topic ->
topic.partitions().forEach(partition ->

View File

@ -31,6 +31,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -325,7 +326,7 @@ public class OffsetFetchResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
if (!groupLevelErrors.isEmpty()) {
// built response with v8 or above
for (Map.Entry<String, Errors> entry : groupLevelErrors.entrySet()) {

View File

@ -22,7 +22,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
import static org.apache.kafka.common.record.RecordBatch.NO_PARTITION_LEADER_EPOCH;
@ -57,7 +57,7 @@ public class OffsetsForLeaderEpochResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.topics().forEach(topic ->
topic.partitions().forEach(partition ->
updateErrorCounts(errorCounts, Errors.forCode(partition.errorCode()))));

View File

@ -27,7 +27,7 @@ import org.apache.kafka.common.record.RecordBatch;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@ -151,7 +151,7 @@ public class ProduceResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
data.responses().forEach(t -> t.partitionResponses().forEach(p -> updateErrorCounts(errorCounts, Errors.forCode(p.errorCode()))));
return errorCounts;
}

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class PushTelemetryResponse extends AbstractResponse {
@ -42,7 +42,7 @@ public class PushTelemetryResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
return counts;
}

View File

@ -27,7 +27,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
@ -46,7 +46,7 @@ public class ReadShareGroupStateResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.results().forEach(
result -> result.partitions().forEach(
partitionResult -> updateErrorCounts(counts, Errors.forCode(partitionResult.errorCode()))

View File

@ -26,7 +26,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
@ -64,7 +64,7 @@ public class ShareAcknowledgeResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
data.responses().forEach(
topic -> topic.partitions().forEach(

View File

@ -31,7 +31,7 @@ import org.apache.kafka.common.record.Records;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
@ -71,7 +71,7 @@ public class ShareFetchResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
updateErrorCounts(counts, Errors.forCode(data.errorCode()));
data.responses().forEach(
topic -> topic.partitions().forEach(

View File

@ -22,7 +22,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -53,7 +53,7 @@ public class ShareGroupDescribeResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.groups().forEach(
group -> updateErrorCounts(counts, Errors.forCode(group.errorCode()))
);

View File

@ -22,7 +22,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -53,7 +53,7 @@ public class StreamsGroupDescribeResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
HashMap<Errors, Integer> counts = new HashMap<>();
Map<Errors, Integer> counts = new EnumMap<>(Errors.class);
data.groups().forEach(
group -> updateErrorCounts(counts, Errors.forCode(group.errorCode()))
);

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
public class UnregisterBrokerResponse extends AbstractResponse {
@ -51,7 +51,7 @@ public class UnregisterBrokerResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
if (data.errorCode() != 0) {
errorCounts.put(Errors.forCode(data.errorCode()), 1);
}

View File

@ -24,7 +24,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
import java.util.Set;
@ -51,7 +51,7 @@ public class UpdateFeaturesResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
updateErrorCounts(errorCounts, Errors.forCode(data.errorCode()));
for (UpdatableFeatureResult result : data.results()) {
updateErrorCounts(errorCounts, Errors.forCode(result.errorCode()));

View File

@ -23,7 +23,7 @@ import org.apache.kafka.common.protocol.ByteBufferAccessor;
import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Map;
/**
@ -49,7 +49,7 @@ public class VoteResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errors = new HashMap<>();
Map<Errors, Integer> errors = new EnumMap<>(Errors.class);
errors.put(Errors.forCode(data.errorCode()), 1);

View File

@ -27,6 +27,7 @@ import org.apache.kafka.common.protocol.Errors;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -115,7 +116,7 @@ public class WriteTxnMarkersResponse extends AbstractResponse {
@Override
public Map<Errors, Integer> errorCounts() {
Map<Errors, Integer> errorCounts = new HashMap<>();
Map<Errors, Integer> errorCounts = new EnumMap<>(Errors.class);
for (WritableTxnMarkerResult marker : data.markers()) {
for (WritableTxnMarkerTopicResult topic : marker.topics()) {
for (WritableTxnMarkerPartitionResult partitionResult : topic.partitions())

View File

@ -32,6 +32,7 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
@ -57,7 +58,7 @@ public class AddPartitionsToTxnResponseTest {
@BeforeEach
public void setUp() {
expectedErrorCounts = new HashMap<>();
expectedErrorCounts = new EnumMap<>(Errors.class);
expectedErrorCounts.put(errorOne, 1);
expectedErrorCounts.put(errorTwo, 1);
@ -107,7 +108,7 @@ public class AddPartitionsToTxnResponseTest {
.setThrottleTimeMs(throttleTimeMs);
AddPartitionsToTxnResponse response = new AddPartitionsToTxnResponse(data);
Map<Errors, Integer> newExpectedErrorCounts = new HashMap<>();
Map<Errors, Integer> newExpectedErrorCounts = new EnumMap<>(Errors.class);
newExpectedErrorCounts.put(Errors.NONE, 1); // top level error
newExpectedErrorCounts.put(errorOne, 2);
newExpectedErrorCounts.put(errorTwo, 1);

View File

@ -24,6 +24,7 @@ import org.apache.kafka.common.protocol.Errors;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
@ -60,7 +61,7 @@ public class DeleteGroupsResponseTest {
expectedErrors.put(GROUP_ID_2, Errors.GROUP_AUTHORIZATION_FAILED);
assertEquals(expectedErrors, DELETE_GROUPS_RESPONSE.errors());
Map<Errors, Integer> expectedErrorCounts = new HashMap<>();
Map<Errors, Integer> expectedErrorCounts = new EnumMap<>(Errors.class);
expectedErrorCounts.put(Errors.NONE, 1);
expectedErrorCounts.put(Errors.GROUP_AUTHORIZATION_FAILED, 1);
assertEquals(expectedErrorCounts, DELETE_GROUPS_RESPONSE.errorCounts());

View File

@ -31,7 +31,7 @@ import org.junit.jupiter.params.ParameterizedTest;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
@ -62,7 +62,7 @@ public class LeaveGroupResponseTest {
@Test
public void testConstructorWithMemberResponses() {
Map<Errors, Integer> expectedErrorCounts = new HashMap<>();
Map<Errors, Integer> expectedErrorCounts = new EnumMap<>(Errors.class);
expectedErrorCounts.put(Errors.NONE, 1); // top level
expectedErrorCounts.put(Errors.UNKNOWN_MEMBER_ID, 1);
expectedErrorCounts.put(Errors.FENCED_INSTANCE_ID, 1);

View File

@ -30,6 +30,7 @@ import org.junit.jupiter.api.Test;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
@ -54,7 +55,7 @@ public class OffsetCommitResponseTest {
@BeforeEach
public void setUp() {
expectedErrorCounts = new HashMap<>();
expectedErrorCounts = new EnumMap<>(Errors.class);
expectedErrorCounts.put(errorOne, 1);
expectedErrorCounts.put(errorTwo, 1);

View File

@ -23,6 +23,7 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
@ -50,7 +51,7 @@ public class WriteTxnMarkersResponseTest {
@Test
public void testConstructor() {
Map<Errors, Integer> expectedErrorCounts = new HashMap<>();
Map<Errors, Integer> expectedErrorCounts = new EnumMap<>(Errors.class);
expectedErrorCounts.put(Errors.UNKNOWN_PRODUCER_ID, 1);
expectedErrorCounts.put(Errors.INVALID_PRODUCER_EPOCH, 1);
WriteTxnMarkersResponse response = new WriteTxnMarkersResponse(errorMap);

View File

@ -24,7 +24,7 @@ import org.apache.kafka.server.metrics.KafkaMetricsGroup;
import com.yammer.metrics.core.Histogram;
import com.yammer.metrics.core.Meter;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Objects;
@ -82,7 +82,7 @@ public class RequestMetrics {
private final Map<String, String> tags;
private final ConcurrentMap<Short, Meter> requestRateInternal = new ConcurrentHashMap<>();
private final ConcurrentMap<DeprecatedRequestRateKey, Meter> deprecatedRequestRateInternal = new ConcurrentHashMap<>();
private final Map<Errors, ErrorMeter> errorMeters = new HashMap<>();
private final Map<Errors, ErrorMeter> errorMeters = new EnumMap<>(Errors.class);
public RequestMetrics(String name) {
this.name = name;