mirror of https://github.com/apache/kafka.git
MINOR: Clean up group-coordinator (#19008)
Given that now we support Java 17 on our brokers, this PR replace the use of : - `Collections.singletonList()` and `Collections.emptyList()` with `List.of()` - `Collections.singletonMap()` and `Collections.emptyMap()` with `Map.of()` - `Collections.singleton()` and `Collections.emptySet()` with `Set.of()` Reviewers: Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
parent
e0c77140b2
commit
3c73c9bdc1
|
@ -477,7 +477,7 @@ public class GroupCoordinatorRecordHelpers {
|
||||||
.setGeneration(0)
|
.setGeneration(0)
|
||||||
.setLeader(null)
|
.setLeader(null)
|
||||||
.setCurrentStateTimestamp(group.currentStateTimestampOrDefault())
|
.setCurrentStateTimestamp(group.currentStateTimestampOrDefault())
|
||||||
.setMembers(Collections.emptyList()),
|
.setMembers(List.of()),
|
||||||
GROUP_METADATA_VALUE_VERSION
|
GROUP_METADATA_VALUE_VERSION
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -115,7 +115,6 @@ import java.util.concurrent.ExecutionException;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.function.IntSupplier;
|
import java.util.function.IntSupplier;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.apache.kafka.coordinator.common.runtime.CoordinatorOperationExceptionHelper.handleOperationException;
|
import static org.apache.kafka.coordinator.common.runtime.CoordinatorOperationExceptionHelper.handleOperationException;
|
||||||
|
|
||||||
|
@ -400,7 +399,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
return CompletableFuture.completedFuture(
|
return CompletableFuture.completedFuture(
|
||||||
new StreamsGroupHeartbeatResult(
|
new StreamsGroupHeartbeatResult(
|
||||||
new StreamsGroupHeartbeatResponseData().setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()),
|
new StreamsGroupHeartbeatResponseData().setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -419,7 +418,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
new StreamsGroupHeartbeatResponseData()
|
new StreamsGroupHeartbeatResponseData()
|
||||||
.setErrorCode(error.code())
|
.setErrorCode(error.code())
|
||||||
.setErrorMessage(message),
|
.setErrorMessage(message),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
),
|
),
|
||||||
log
|
log
|
||||||
));
|
));
|
||||||
|
@ -633,7 +632,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
.setMemberId(member.memberId())
|
.setMemberId(member.memberId())
|
||||||
.setGroupInstanceId(member.groupInstanceId())
|
.setGroupInstanceId(member.groupInstanceId())
|
||||||
.setErrorCode(Errors.UNKNOWN_MEMBER_ID.code()))
|
.setErrorCode(Errors.UNKNOWN_MEMBER_ID.code()))
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
return new LeaveGroupResponseData()
|
return new LeaveGroupResponseData()
|
||||||
.setMembers(memberResponses);
|
.setMembers(memberResponses);
|
||||||
} else {
|
} else {
|
||||||
|
@ -671,7 +670,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
exception -> {
|
exception -> {
|
||||||
exception = Errors.maybeUnwrapException(exception);
|
exception = Errors.maybeUnwrapException(exception);
|
||||||
if (exception instanceof NotCoordinatorException) {
|
if (exception instanceof NotCoordinatorException) {
|
||||||
return Collections.emptyList();
|
return List.of();
|
||||||
} else {
|
} else {
|
||||||
throw new CompletionException(exception);
|
throw new CompletionException(exception);
|
||||||
}
|
}
|
||||||
|
@ -714,7 +713,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
.computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>())
|
.computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>())
|
||||||
.add(groupId);
|
.add(groupId);
|
||||||
} else {
|
} else {
|
||||||
futures.add(CompletableFuture.completedFuture(Collections.singletonList(
|
futures.add(CompletableFuture.completedFuture(List.of(
|
||||||
new ConsumerGroupDescribeResponseData.DescribedGroup()
|
new ConsumerGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId(null)
|
.setGroupId(null)
|
||||||
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
||||||
|
@ -766,7 +765,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
.computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>())
|
.computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>())
|
||||||
.add(groupId);
|
.add(groupId);
|
||||||
} else {
|
} else {
|
||||||
futures.add(CompletableFuture.completedFuture(Collections.singletonList(
|
futures.add(CompletableFuture.completedFuture(List.of(
|
||||||
new StreamsGroupDescribeResponseData.DescribedGroup()
|
new StreamsGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId(null)
|
.setGroupId(null)
|
||||||
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
||||||
|
@ -817,7 +816,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
.computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>())
|
.computeIfAbsent(topicPartitionFor(groupId), __ -> new ArrayList<>())
|
||||||
.add(groupId);
|
.add(groupId);
|
||||||
} else {
|
} else {
|
||||||
futures.add(CompletableFuture.completedFuture(Collections.singletonList(
|
futures.add(CompletableFuture.completedFuture(List.of(
|
||||||
new ShareGroupDescribeResponseData.DescribedGroup()
|
new ShareGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId(null)
|
.setGroupId(null)
|
||||||
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
||||||
|
@ -866,7 +865,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
groupIds.forEach(groupId -> {
|
groupIds.forEach(groupId -> {
|
||||||
// For backwards compatibility, we support DescribeGroups for the empty group id.
|
// For backwards compatibility, we support DescribeGroups for the empty group id.
|
||||||
if (groupId == null) {
|
if (groupId == null) {
|
||||||
futures.add(CompletableFuture.completedFuture(Collections.singletonList(
|
futures.add(CompletableFuture.completedFuture(List.of(
|
||||||
new DescribeGroupsResponseData.DescribedGroup()
|
new DescribeGroupsResponseData.DescribedGroup()
|
||||||
.setGroupId(null)
|
.setGroupId(null)
|
||||||
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
.setErrorCode(Errors.INVALID_GROUP_ID.code())
|
||||||
|
@ -1149,7 +1148,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
topicPartitionFor(request.groupId()),
|
topicPartitionFor(request.groupId()),
|
||||||
Duration.ofMillis(config.offsetCommitTimeoutMs()),
|
Duration.ofMillis(config.offsetCommitTimeoutMs()),
|
||||||
coordinator -> new CoordinatorResult<>(
|
coordinator -> new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
coordinator.fetchOffsets(request, Long.MAX_VALUE)
|
coordinator.fetchOffsets(request, Long.MAX_VALUE)
|
||||||
)
|
)
|
||||||
).exceptionally(exception -> handleOffsetFetchException(
|
).exceptionally(exception -> handleOffsetFetchException(
|
||||||
|
@ -1204,7 +1203,7 @@ public class GroupCoordinatorService implements GroupCoordinator {
|
||||||
topicPartitionFor(request.groupId()),
|
topicPartitionFor(request.groupId()),
|
||||||
Duration.ofMillis(config.offsetCommitTimeoutMs()),
|
Duration.ofMillis(config.offsetCommitTimeoutMs()),
|
||||||
coordinator -> new CoordinatorResult<>(
|
coordinator -> new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
coordinator.fetchAllOffsets(request, Long.MAX_VALUE)
|
coordinator.fetchAllOffsets(request, Long.MAX_VALUE)
|
||||||
)
|
)
|
||||||
).exceptionally(exception -> handleOffsetFetchException(
|
).exceptionally(exception -> handleOffsetFetchException(
|
||||||
|
|
|
@ -456,7 +456,7 @@ public class GroupMetadataManager {
|
||||||
* Package private for testing.
|
* Package private for testing.
|
||||||
*/
|
*/
|
||||||
static final CoordinatorResult<Void, CoordinatorRecord> EMPTY_RESULT =
|
static final CoordinatorResult<Void, CoordinatorRecord> EMPTY_RESULT =
|
||||||
new CoordinatorResult<>(Collections.emptyList(), CompletableFuture.completedFuture(null), false);
|
new CoordinatorResult<>(List.of(), CompletableFuture.completedFuture(null), false);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The share group partition assignor.
|
* The share group partition assignor.
|
||||||
|
@ -574,7 +574,7 @@ public class GroupMetadataManager {
|
||||||
return groupStream
|
return groupStream
|
||||||
.filter(combinedFilter)
|
.filter(combinedFilter)
|
||||||
.map(group -> group.asListedGroup(committedOffset))
|
.map(group -> group.asListedGroup(committedOffset))
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -701,7 +701,7 @@ public class GroupMetadataManager {
|
||||||
.setProtocolData(group.protocolName().get())
|
.setProtocolData(group.protocolName().get())
|
||||||
.setMembers(group.allMembers().stream()
|
.setMembers(group.allMembers().stream()
|
||||||
.map(member -> member.describe(group.protocolName().get()))
|
.map(member -> member.describe(group.protocolName().get()))
|
||||||
.collect(Collectors.toList())
|
.toList()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
@ -711,7 +711,7 @@ public class GroupMetadataManager {
|
||||||
.setProtocolType(group.protocolType().orElse(""))
|
.setProtocolType(group.protocolType().orElse(""))
|
||||||
.setMembers(group.allMembers().stream()
|
.setMembers(group.allMembers().stream()
|
||||||
.map(ClassicGroupMember::describeNoMetadata)
|
.map(ClassicGroupMember::describeNoMetadata)
|
||||||
.collect(Collectors.toList())
|
.toList()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1792,7 +1792,7 @@ public class GroupMetadataManager {
|
||||||
.map(keyValue -> new ConsumerGroupHeartbeatResponseData.TopicPartitions()
|
.map(keyValue -> new ConsumerGroupHeartbeatResponseData.TopicPartitions()
|
||||||
.setTopicId(keyValue.getKey())
|
.setTopicId(keyValue.getKey())
|
||||||
.setPartitions(new ArrayList<>(keyValue.getValue())))
|
.setPartitions(new ArrayList<>(keyValue.getValue())))
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<ShareGroupHeartbeatResponseData.TopicPartitions> fromShareGroupAssignmentMap(
|
private List<ShareGroupHeartbeatResponseData.TopicPartitions> fromShareGroupAssignmentMap(
|
||||||
|
@ -1802,7 +1802,7 @@ public class GroupMetadataManager {
|
||||||
.map(keyValue -> new ShareGroupHeartbeatResponseData.TopicPartitions()
|
.map(keyValue -> new ShareGroupHeartbeatResponseData.TopicPartitions()
|
||||||
.setTopicId(keyValue.getKey())
|
.setTopicId(keyValue.getKey())
|
||||||
.setPartitions(new ArrayList<>(keyValue.getValue())))
|
.setPartitions(new ArrayList<>(keyValue.getValue())))
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -2038,7 +2038,7 @@ public class GroupMetadataManager {
|
||||||
group,
|
group,
|
||||||
memberId,
|
memberId,
|
||||||
-1,
|
-1,
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
true,
|
true,
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
|
@ -2048,7 +2048,7 @@ public class GroupMetadataManager {
|
||||||
memberId,
|
memberId,
|
||||||
-1,
|
-1,
|
||||||
instanceId,
|
instanceId,
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
isUnknownMember,
|
isUnknownMember,
|
||||||
true,
|
true,
|
||||||
records
|
records
|
||||||
|
@ -2145,7 +2145,7 @@ public class GroupMetadataManager {
|
||||||
if (downgrade) {
|
if (downgrade) {
|
||||||
convertToClassicGroup(
|
convertToClassicGroup(
|
||||||
group,
|
group,
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
updatedMember,
|
updatedMember,
|
||||||
records
|
records
|
||||||
);
|
);
|
||||||
|
@ -2727,7 +2727,7 @@ public class GroupMetadataManager {
|
||||||
if (exception != null) {
|
if (exception != null) {
|
||||||
log.error("[GroupId {}] Couldn't update regular expression due to: {}",
|
log.error("[GroupId {}] Couldn't update regular expression due to: {}",
|
||||||
groupId, exception.getMessage());
|
groupId, exception.getMessage());
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (log.isDebugEnabled()) {
|
if (log.isDebugEnabled()) {
|
||||||
|
@ -3197,11 +3197,11 @@ public class GroupMetadataManager {
|
||||||
// We will write a member epoch of -2 for this departing static member.
|
// We will write a member epoch of -2 for this departing static member.
|
||||||
ConsumerGroupMember leavingStaticMember = new ConsumerGroupMember.Builder(member)
|
ConsumerGroupMember leavingStaticMember = new ConsumerGroupMember.Builder(member)
|
||||||
.setMemberEpoch(LEAVE_GROUP_STATIC_MEMBER_EPOCH)
|
.setMemberEpoch(LEAVE_GROUP_STATIC_MEMBER_EPOCH)
|
||||||
.setPartitionsPendingRevocation(Collections.emptyMap())
|
.setPartitionsPendingRevocation(Map.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
return new CoordinatorResult<>(
|
return new CoordinatorResult<>(
|
||||||
Collections.singletonList(newConsumerGroupCurrentAssignmentRecord(group.groupId(), leavingStaticMember)),
|
List.of(newConsumerGroupCurrentAssignmentRecord(group.groupId(), leavingStaticMember)),
|
||||||
new ConsumerGroupHeartbeatResponseData()
|
new ConsumerGroupHeartbeatResponseData()
|
||||||
.setMemberId(member.memberId())
|
.setMemberId(member.memberId())
|
||||||
.setMemberEpoch(LEAVE_GROUP_STATIC_MEMBER_EPOCH)
|
.setMemberEpoch(LEAVE_GROUP_STATIC_MEMBER_EPOCH)
|
||||||
|
@ -3266,7 +3266,7 @@ public class GroupMetadataManager {
|
||||||
) {
|
) {
|
||||||
if (members.isEmpty()) {
|
if (members.isEmpty()) {
|
||||||
// No members to fence. Don't bump the group epoch.
|
// No members to fence. Don't bump the group epoch.
|
||||||
return new CoordinatorResult<>(Collections.emptyList(), response);
|
return new CoordinatorResult<>(List.of(), response);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<CoordinatorRecord> records = new ArrayList<>();
|
List<CoordinatorRecord> records = new ArrayList<>();
|
||||||
|
@ -3563,7 +3563,7 @@ public class GroupMetadataManager {
|
||||||
groupId, memberId);
|
groupId, memberId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -3596,7 +3596,7 @@ public class GroupMetadataManager {
|
||||||
groupId, memberId);
|
groupId, memberId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -3629,7 +3629,7 @@ public class GroupMetadataManager {
|
||||||
groupId, memberId);
|
groupId, memberId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -3715,7 +3715,7 @@ public class GroupMetadataManager {
|
||||||
} else {
|
} else {
|
||||||
log.debug("[GroupId {}] Ignoring rebalance timeout for {} because the member " +
|
log.debug("[GroupId {}] Ignoring rebalance timeout for {} because the member " +
|
||||||
"left the epoch {}.", groupId, memberId, memberEpoch);
|
"left the epoch {}.", groupId, memberId, memberEpoch);
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
}
|
}
|
||||||
} catch (GroupIdNotFoundException ex) {
|
} catch (GroupIdNotFoundException ex) {
|
||||||
log.debug("[GroupId {}] Could not fence {}} because the group does not exist.",
|
log.debug("[GroupId {}] Could not fence {}} because the group does not exist.",
|
||||||
|
@ -3725,7 +3725,7 @@ public class GroupMetadataManager {
|
||||||
groupId, memberId);
|
groupId, memberId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3758,7 +3758,7 @@ public class GroupMetadataManager {
|
||||||
} else {
|
} else {
|
||||||
log.debug("[GroupId {}] Ignoring rebalance timeout for {} because the member " +
|
log.debug("[GroupId {}] Ignoring rebalance timeout for {} because the member " +
|
||||||
"is not in epoch {} anymore.", groupId, memberId, memberEpoch);
|
"is not in epoch {} anymore.", groupId, memberId, memberEpoch);
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
}
|
}
|
||||||
} catch (GroupIdNotFoundException ex) {
|
} catch (GroupIdNotFoundException ex) {
|
||||||
log.debug("[GroupId {}] Could not fence {}} because the group does not exist.",
|
log.debug("[GroupId {}] Could not fence {}} because the group does not exist.",
|
||||||
|
@ -3768,7 +3768,7 @@ public class GroupMetadataManager {
|
||||||
groupId, memberId);
|
groupId, memberId);
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CoordinatorResult<>(Collections.emptyList());
|
return new CoordinatorResult<>(List.of());
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3935,7 +3935,7 @@ public class GroupMetadataManager {
|
||||||
if (streamsGroup.topology().isPresent()) {
|
if (streamsGroup.topology().isPresent()) {
|
||||||
oldSubscribedTopicNames = streamsGroup.topology().get().requiredTopics();
|
oldSubscribedTopicNames = streamsGroup.topology().get().requiredTopics();
|
||||||
} else {
|
} else {
|
||||||
oldSubscribedTopicNames = Collections.emptySet();
|
oldSubscribedTopicNames = Set.of();
|
||||||
}
|
}
|
||||||
if (value != null) {
|
if (value != null) {
|
||||||
StreamsTopology topology = StreamsTopology.fromRecord(value);
|
StreamsTopology topology = StreamsTopology.fromRecord(value);
|
||||||
|
@ -3943,7 +3943,7 @@ public class GroupMetadataManager {
|
||||||
Set<String> newSubscribedTopicNames = topology.requiredTopics();
|
Set<String> newSubscribedTopicNames = topology.requiredTopics();
|
||||||
updateGroupsByTopics(groupId, oldSubscribedTopicNames, newSubscribedTopicNames);
|
updateGroupsByTopics(groupId, oldSubscribedTopicNames, newSubscribedTopicNames);
|
||||||
} else {
|
} else {
|
||||||
updateGroupsByTopics(groupId, oldSubscribedTopicNames, Collections.emptySet());
|
updateGroupsByTopics(groupId, oldSubscribedTopicNames, Set.of());
|
||||||
streamsGroup.setTopology(null);
|
streamsGroup.setTopology(null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4039,7 +4039,7 @@ public class GroupMetadataManager {
|
||||||
*/
|
*/
|
||||||
public Set<String> groupsSubscribedToTopic(String topicName) {
|
public Set<String> groupsSubscribedToTopic(String topicName) {
|
||||||
Set<String> groups = groupsByTopics.get(topicName);
|
Set<String> groups = groupsByTopics.get(topicName);
|
||||||
return groups != null ? groups : Collections.emptySet();
|
return groups != null ? groups : Set.of();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -4180,7 +4180,7 @@ public class GroupMetadataManager {
|
||||||
});
|
});
|
||||||
group.setSubscriptionMetadata(subscriptionMetadata);
|
group.setSubscriptionMetadata(subscriptionMetadata);
|
||||||
} else {
|
} else {
|
||||||
group.setSubscriptionMetadata(Collections.emptyMap());
|
group.setSubscriptionMetadata(Map.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4287,8 +4287,8 @@ public class GroupMetadataManager {
|
||||||
ConsumerGroupMember newMember = new ConsumerGroupMember.Builder(oldMember)
|
ConsumerGroupMember newMember = new ConsumerGroupMember.Builder(oldMember)
|
||||||
.setMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
.setMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
||||||
.setPreviousMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
.setPreviousMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
||||||
.setAssignedPartitions(Collections.emptyMap())
|
.setAssignedPartitions(Map.of())
|
||||||
.setPartitionsPendingRevocation(Collections.emptyMap())
|
.setPartitionsPendingRevocation(Map.of())
|
||||||
.build();
|
.build();
|
||||||
group.updateMember(newMember);
|
group.updateMember(newMember);
|
||||||
}
|
}
|
||||||
|
@ -4395,7 +4395,7 @@ public class GroupMetadataManager {
|
||||||
});
|
});
|
||||||
streamsGroup.setPartitionMetadata(partitionMetadata);
|
streamsGroup.setPartitionMetadata(partitionMetadata);
|
||||||
} else {
|
} else {
|
||||||
streamsGroup.setPartitionMetadata(Collections.emptyMap());
|
streamsGroup.setPartitionMetadata(Map.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4657,7 +4657,7 @@ public class GroupMetadataManager {
|
||||||
);
|
);
|
||||||
group.setSubscriptionMetadata(subscriptionMetadata);
|
group.setSubscriptionMetadata(subscriptionMetadata);
|
||||||
} else {
|
} else {
|
||||||
group.setSubscriptionMetadata(Collections.emptyMap());
|
group.setSubscriptionMetadata(Map.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4735,7 +4735,7 @@ public class GroupMetadataManager {
|
||||||
ShareGroupMember newMember = new ShareGroupMember.Builder(oldMember)
|
ShareGroupMember newMember = new ShareGroupMember.Builder(oldMember)
|
||||||
.setMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
.setMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
||||||
.setPreviousMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
.setPreviousMemberEpoch(LEAVE_GROUP_MEMBER_EPOCH)
|
||||||
.setAssignedPartitions(Collections.emptyMap())
|
.setAssignedPartitions(Map.of())
|
||||||
.build();
|
.build();
|
||||||
group.updateMember(newMember);
|
group.updateMember(newMember);
|
||||||
}
|
}
|
||||||
|
@ -5410,7 +5410,7 @@ public class GroupMetadataManager {
|
||||||
// for the current generation.
|
// for the current generation.
|
||||||
responseFuture.complete(new JoinGroupResponseData()
|
responseFuture.complete(new JoinGroupResponseData()
|
||||||
.setMembers(group.isLeader(memberId) ?
|
.setMembers(group.isLeader(memberId) ?
|
||||||
group.currentClassicGroupMembers() : Collections.emptyList())
|
group.currentClassicGroupMembers() : List.of())
|
||||||
.setMemberId(memberId)
|
.setMemberId(memberId)
|
||||||
.setGenerationId(group.generationId())
|
.setGenerationId(group.generationId())
|
||||||
.setProtocolName(group.protocolName().orElse(null))
|
.setProtocolName(group.protocolName().orElse(null))
|
||||||
|
@ -5455,7 +5455,7 @@ public class GroupMetadataManager {
|
||||||
// For followers with no actual change to their metadata, just return group information
|
// For followers with no actual change to their metadata, just return group information
|
||||||
// for the current generation which will allow them to issue SyncGroup.
|
// for the current generation which will allow them to issue SyncGroup.
|
||||||
responseFuture.complete(new JoinGroupResponseData()
|
responseFuture.complete(new JoinGroupResponseData()
|
||||||
.setMembers(Collections.emptyList())
|
.setMembers(List.of())
|
||||||
.setMemberId(memberId)
|
.setMemberId(memberId)
|
||||||
.setGenerationId(group.generationId())
|
.setGenerationId(group.generationId())
|
||||||
.setProtocolName(group.protocolName().orElse(null))
|
.setProtocolName(group.protocolName().orElse(null))
|
||||||
|
@ -5562,8 +5562,8 @@ public class GroupMetadataManager {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
List<CoordinatorRecord> records = Collections.singletonList(GroupCoordinatorRecordHelpers.newGroupMetadataRecord(
|
List<CoordinatorRecord> records = List.of(GroupCoordinatorRecordHelpers.newGroupMetadataRecord(
|
||||||
group, Collections.emptyMap()));
|
group, Map.of()));
|
||||||
|
|
||||||
return new CoordinatorResult<>(records, appendFuture, false);
|
return new CoordinatorResult<>(records, appendFuture, false);
|
||||||
|
|
||||||
|
@ -5573,7 +5573,7 @@ public class GroupMetadataManager {
|
||||||
|
|
||||||
// Complete the awaiting join group response future for all the members after rebalancing
|
// Complete the awaiting join group response future for all the members after rebalancing
|
||||||
group.allMembers().forEach(member -> {
|
group.allMembers().forEach(member -> {
|
||||||
List<JoinGroupResponseData.JoinGroupResponseMember> members = Collections.emptyList();
|
List<JoinGroupResponseData.JoinGroupResponseMember> members = List.of();
|
||||||
if (group.isLeader(member.memberId())) {
|
if (group.isLeader(member.memberId())) {
|
||||||
members = group.currentClassicGroupMembers();
|
members = group.currentClassicGroupMembers();
|
||||||
}
|
}
|
||||||
|
@ -6204,7 +6204,7 @@ public class GroupMetadataManager {
|
||||||
boolean isLeader = group.isLeader(newMemberId);
|
boolean isLeader = group.isLeader(newMemberId);
|
||||||
|
|
||||||
group.completeJoinFuture(newMember, new JoinGroupResponseData()
|
group.completeJoinFuture(newMember, new JoinGroupResponseData()
|
||||||
.setMembers(isLeader ? group.currentClassicGroupMembers() : Collections.emptyList())
|
.setMembers(isLeader ? group.currentClassicGroupMembers() : List.of())
|
||||||
.setMemberId(newMemberId)
|
.setMemberId(newMemberId)
|
||||||
.setGenerationId(group.generationId())
|
.setGenerationId(group.generationId())
|
||||||
.setProtocolName(group.protocolName().orElse(null))
|
.setProtocolName(group.protocolName().orElse(null))
|
||||||
|
@ -6224,7 +6224,7 @@ public class GroupMetadataManager {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
List<CoordinatorRecord> records = Collections.singletonList(
|
List<CoordinatorRecord> records = List.of(
|
||||||
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(group, groupAssignment)
|
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(group, groupAssignment)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -6371,7 +6371,7 @@ public class GroupMetadataManager {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
List<CoordinatorRecord> records = Collections.singletonList(
|
List<CoordinatorRecord> records = List.of(
|
||||||
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(group, assignment)
|
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(group, assignment)
|
||||||
);
|
);
|
||||||
return new CoordinatorResult<>(records, appendFuture, false);
|
return new CoordinatorResult<>(records, appendFuture, false);
|
||||||
|
@ -6433,7 +6433,7 @@ public class GroupMetadataManager {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
return new CoordinatorResult<>(Collections.emptyList(), appendFuture, false);
|
return new CoordinatorResult<>(List.of(), appendFuture, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -6586,14 +6586,14 @@ public class GroupMetadataManager {
|
||||||
switch (group.currentState()) {
|
switch (group.currentState()) {
|
||||||
case EMPTY:
|
case EMPTY:
|
||||||
return new CoordinatorResult<>(
|
return new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new HeartbeatResponseData().setErrorCode(Errors.UNKNOWN_MEMBER_ID.code())
|
new HeartbeatResponseData().setErrorCode(Errors.UNKNOWN_MEMBER_ID.code())
|
||||||
);
|
);
|
||||||
|
|
||||||
case PREPARING_REBALANCE:
|
case PREPARING_REBALANCE:
|
||||||
rescheduleClassicGroupMemberHeartbeat(group, group.member(request.memberId()));
|
rescheduleClassicGroupMemberHeartbeat(group, group.member(request.memberId()));
|
||||||
return new CoordinatorResult<>(
|
return new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new HeartbeatResponseData().setErrorCode(Errors.REBALANCE_IN_PROGRESS.code())
|
new HeartbeatResponseData().setErrorCode(Errors.REBALANCE_IN_PROGRESS.code())
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -6604,7 +6604,7 @@ public class GroupMetadataManager {
|
||||||
// normal heartbeat requests and reset the timer
|
// normal heartbeat requests and reset the timer
|
||||||
rescheduleClassicGroupMemberHeartbeat(group, group.member(request.memberId()));
|
rescheduleClassicGroupMemberHeartbeat(group, group.member(request.memberId()));
|
||||||
return new CoordinatorResult<>(
|
return new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new HeartbeatResponseData()
|
new HeartbeatResponseData()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -6687,7 +6687,7 @@ public class GroupMetadataManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
return new CoordinatorResult<>(
|
return new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new HeartbeatResponseData().setErrorCode(error.code())
|
new HeartbeatResponseData().setErrorCode(error.code())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -6827,7 +6827,7 @@ public class GroupMetadataManager {
|
||||||
) throws UnknownMemberIdException {
|
) throws UnknownMemberIdException {
|
||||||
if (group.isInState(DEAD)) {
|
if (group.isInState(DEAD)) {
|
||||||
return new CoordinatorResult<>(
|
return new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new LeaveGroupResponseData()
|
new LeaveGroupResponseData()
|
||||||
.setErrorCode(COORDINATOR_NOT_AVAILABLE.code())
|
.setErrorCode(COORDINATOR_NOT_AVAILABLE.code())
|
||||||
);
|
);
|
||||||
|
@ -6897,7 +6897,7 @@ public class GroupMetadataManager {
|
||||||
List<String> validLeaveGroupMembers = memberResponses.stream()
|
List<String> validLeaveGroupMembers = memberResponses.stream()
|
||||||
.filter(response -> response.errorCode() == Errors.NONE.code())
|
.filter(response -> response.errorCode() == Errors.NONE.code())
|
||||||
.map(MemberResponse::memberId)
|
.map(MemberResponse::memberId)
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
|
|
||||||
String reason = "explicit `LeaveGroup` request for (" + String.join(", ", validLeaveGroupMembers) + ") members.";
|
String reason = "explicit `LeaveGroup` request for (" + String.join(", ", validLeaveGroupMembers) + ") members.";
|
||||||
CoordinatorResult<Void, CoordinatorRecord> coordinatorResult = EMPTY_RESULT;
|
CoordinatorResult<Void, CoordinatorRecord> coordinatorResult = EMPTY_RESULT;
|
||||||
|
|
|
@ -55,7 +55,6 @@ import org.apache.kafka.timeline.TimelineHashSet;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -799,7 +798,7 @@ public class OffsetMetadataManager {
|
||||||
} catch (GroupIdNotFoundException ex) {
|
} catch (GroupIdNotFoundException ex) {
|
||||||
return new OffsetFetchResponseData.OffsetFetchResponseGroup()
|
return new OffsetFetchResponseData.OffsetFetchResponseGroup()
|
||||||
.setGroupId(request.groupId())
|
.setGroupId(request.groupId())
|
||||||
.setTopics(Collections.emptyList());
|
.setTopics(List.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
final List<OffsetFetchResponseData.OffsetFetchResponseTopics> topicResponses = new ArrayList<>();
|
final List<OffsetFetchResponseData.OffsetFetchResponseTopics> topicResponses = new ArrayList<>();
|
||||||
|
|
|
@ -28,7 +28,6 @@ import org.apache.kafka.coordinator.group.api.assignor.SubscriptionType;
|
||||||
import org.apache.kafka.coordinator.group.modern.MemberAssignmentImpl;
|
import org.apache.kafka.coordinator.group.modern.MemberAssignmentImpl;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -306,7 +305,7 @@ public class RangeAssignor implements ConsumerGroupPartitionAssignor {
|
||||||
SubscribedTopicDescriber subscribedTopicDescriber
|
SubscribedTopicDescriber subscribedTopicDescriber
|
||||||
) throws PartitionAssignorException {
|
) throws PartitionAssignorException {
|
||||||
if (groupSpec.memberIds().isEmpty()) {
|
if (groupSpec.memberIds().isEmpty()) {
|
||||||
return new GroupAssignment(Collections.emptyMap());
|
return new GroupAssignment(Map.of());
|
||||||
} else if (groupSpec.subscriptionType() == SubscriptionType.HOMOGENEOUS) {
|
} else if (groupSpec.subscriptionType() == SubscriptionType.HOMOGENEOUS) {
|
||||||
return assignHomogeneousGroup(groupSpec, subscribedTopicDescriber);
|
return assignHomogeneousGroup(groupSpec, subscribedTopicDescriber);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.kafka.server.common.TopicIdPartition;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
@ -227,7 +226,7 @@ public class SimpleAssignor implements ShareGroupPartitionAssignor {
|
||||||
// When combining current assignment, we need to only consider the member topic subscription in current assignment
|
// When combining current assignment, we need to only consider the member topic subscription in current assignment
|
||||||
// which is being subscribed in the new assignment as well.
|
// which is being subscribed in the new assignment as well.
|
||||||
currentAssignment.forEach((topicIdPartition, members) -> members.forEach(member -> {
|
currentAssignment.forEach((topicIdPartition, members) -> members.forEach(member -> {
|
||||||
if (topicToMemberSubscription.getOrDefault(topicIdPartition.topicId(), Collections.emptySet()).contains(member)
|
if (topicToMemberSubscription.getOrDefault(topicIdPartition.topicId(), Set.of()).contains(member)
|
||||||
&& !newAssignment.containsKey(topicIdPartition))
|
&& !newAssignment.containsKey(topicIdPartition))
|
||||||
finalAssignment.computeIfAbsent(member, k -> new HashSet<>()).add(topicIdPartition);
|
finalAssignment.computeIfAbsent(member, k -> new HashSet<>()).add(topicIdPartition);
|
||||||
}));
|
}));
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.kafka.coordinator.group.api.assignor.SubscribedTopicDescriber;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.apache.kafka.coordinator.group.api.assignor.SubscriptionType.HOMOGENEOUS;
|
import static org.apache.kafka.coordinator.group.api.assignor.SubscriptionType.HOMOGENEOUS;
|
||||||
|
|
||||||
|
@ -73,7 +73,7 @@ public class UniformAssignor implements ConsumerGroupPartitionAssignor {
|
||||||
SubscribedTopicDescriber subscribedTopicDescriber
|
SubscribedTopicDescriber subscribedTopicDescriber
|
||||||
) throws PartitionAssignorException {
|
) throws PartitionAssignorException {
|
||||||
if (groupSpec.memberIds().isEmpty())
|
if (groupSpec.memberIds().isEmpty())
|
||||||
return new GroupAssignment(Collections.emptyMap());
|
return new GroupAssignment(Map.of());
|
||||||
|
|
||||||
if (groupSpec.subscriptionType().equals(HOMOGENEOUS)) {
|
if (groupSpec.subscriptionType().equals(HOMOGENEOUS)) {
|
||||||
LOG.debug("Detected that all members are subscribed to the same set of topics, invoking the "
|
LOG.debug("Detected that all members are subscribed to the same set of topics, invoking the "
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.kafka.coordinator.group.modern.MemberAssignmentImpl;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -238,7 +237,7 @@ public class UniformHeterogeneousAssignmentBuilder {
|
||||||
*/
|
*/
|
||||||
public GroupAssignment build() {
|
public GroupAssignment build() {
|
||||||
if (subscribedTopicIds.isEmpty()) {
|
if (subscribedTopicIds.isEmpty()) {
|
||||||
return new GroupAssignment(Collections.emptyMap());
|
return new GroupAssignment(Map.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
maybeRevokePartitions();
|
maybeRevokePartitions();
|
||||||
|
|
|
@ -26,7 +26,6 @@ import org.apache.kafka.coordinator.group.modern.MemberAssignmentImpl;
|
||||||
import org.apache.kafka.server.common.TopicIdPartition;
|
import org.apache.kafka.server.common.TopicIdPartition;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -109,7 +108,7 @@ public class UniformHomogeneousAssignmentBuilder {
|
||||||
*/
|
*/
|
||||||
public GroupAssignment build() throws PartitionAssignorException {
|
public GroupAssignment build() throws PartitionAssignorException {
|
||||||
if (subscribedTopicIds.isEmpty()) {
|
if (subscribedTopicIds.isEmpty()) {
|
||||||
return new GroupAssignment(Collections.emptyMap());
|
return new GroupAssignment(Map.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Compute the list of unassigned partitions.
|
// Compute the list of unassigned partitions.
|
||||||
|
|
|
@ -46,7 +46,6 @@ import org.slf4j.Logger;
|
||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
@ -551,7 +550,7 @@ public class ClassicGroup implements Group {
|
||||||
|
|
||||||
// Fence potential duplicate member immediately if someone awaits join/sync future.
|
// Fence potential duplicate member immediately if someone awaits join/sync future.
|
||||||
JoinGroupResponseData joinGroupResponse = new JoinGroupResponseData()
|
JoinGroupResponseData joinGroupResponse = new JoinGroupResponseData()
|
||||||
.setMembers(Collections.emptyList())
|
.setMembers(List.of())
|
||||||
.setMemberId(oldMemberId)
|
.setMemberId(oldMemberId)
|
||||||
.setProtocolName(null)
|
.setProtocolName(null)
|
||||||
.setProtocolType(null)
|
.setProtocolType(null)
|
||||||
|
@ -1150,7 +1149,7 @@ public class ClassicGroup implements Group {
|
||||||
return Optional.empty();
|
return Optional.empty();
|
||||||
}
|
}
|
||||||
if (members.isEmpty()) {
|
if (members.isEmpty()) {
|
||||||
return Optional.of(Collections.emptySet());
|
return Optional.of(Set.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
if (protocolName.isPresent()) {
|
if (protocolName.isPresent()) {
|
||||||
|
@ -1310,7 +1309,7 @@ public class ClassicGroup implements Group {
|
||||||
.setMemberId(member.memberId())
|
.setMemberId(member.memberId())
|
||||||
.setGroupInstanceId(member.groupInstanceId().orElse(null))
|
.setGroupInstanceId(member.groupInstanceId().orElse(null))
|
||||||
.setMetadata(member.metadata(protocolName.orElse(null))))
|
.setMetadata(member.metadata(protocolName.orElse(null))))
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -28,7 +28,6 @@ import java.util.HashSet;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class encapsulates a classic group member's metadata.
|
* This class encapsulates a classic group member's metadata.
|
||||||
|
@ -430,7 +429,7 @@ public class ClassicGroupMember {
|
||||||
", protocolType='" + protocolType + '\'' +
|
", protocolType='" + protocolType + '\'' +
|
||||||
", supportedProtocols=" + supportedProtocols.stream()
|
", supportedProtocols=" + supportedProtocols.stream()
|
||||||
.map(JoinGroupRequestProtocol::name)
|
.map(JoinGroupRequestProtocol::name)
|
||||||
.collect(Collectors.toList()) +
|
.toList() +
|
||||||
')';
|
')';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -126,49 +126,49 @@ public class GroupCoordinatorMetrics extends CoordinatorMetrics implements AutoC
|
||||||
GROUP_COUNT_METRIC_NAME,
|
GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The total number of groups using the classic rebalance protocol.",
|
"The total number of groups using the classic rebalance protocol.",
|
||||||
Collections.singletonMap(GROUP_COUNT_PROTOCOL_TAG, Group.GroupType.CLASSIC.toString())
|
Map.of(GROUP_COUNT_PROTOCOL_TAG, Group.GroupType.CLASSIC.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
consumerGroupCountMetricName = metrics.metricName(
|
consumerGroupCountMetricName = metrics.metricName(
|
||||||
GROUP_COUNT_METRIC_NAME,
|
GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The total number of groups using the consumer rebalance protocol.",
|
"The total number of groups using the consumer rebalance protocol.",
|
||||||
Collections.singletonMap(GROUP_COUNT_PROTOCOL_TAG, Group.GroupType.CONSUMER.toString())
|
Map.of(GROUP_COUNT_PROTOCOL_TAG, Group.GroupType.CONSUMER.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
consumerGroupCountEmptyMetricName = metrics.metricName(
|
consumerGroupCountEmptyMetricName = metrics.metricName(
|
||||||
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of consumer groups in empty state.",
|
"The number of consumer groups in empty state.",
|
||||||
Collections.singletonMap(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.EMPTY.toString())
|
Map.of(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.EMPTY.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
consumerGroupCountAssigningMetricName = metrics.metricName(
|
consumerGroupCountAssigningMetricName = metrics.metricName(
|
||||||
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of consumer groups in assigning state.",
|
"The number of consumer groups in assigning state.",
|
||||||
Collections.singletonMap(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.ASSIGNING.toString())
|
Map.of(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.ASSIGNING.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
consumerGroupCountReconcilingMetricName = metrics.metricName(
|
consumerGroupCountReconcilingMetricName = metrics.metricName(
|
||||||
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of consumer groups in reconciling state.",
|
"The number of consumer groups in reconciling state.",
|
||||||
Collections.singletonMap(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.RECONCILING.toString())
|
Map.of(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.RECONCILING.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
consumerGroupCountStableMetricName = metrics.metricName(
|
consumerGroupCountStableMetricName = metrics.metricName(
|
||||||
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of consumer groups in stable state.",
|
"The number of consumer groups in stable state.",
|
||||||
Collections.singletonMap(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.STABLE.toString())
|
Map.of(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.STABLE.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
consumerGroupCountDeadMetricName = metrics.metricName(
|
consumerGroupCountDeadMetricName = metrics.metricName(
|
||||||
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
CONSUMER_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of consumer groups in dead state.",
|
"The number of consumer groups in dead state.",
|
||||||
Collections.singletonMap(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.DEAD.toString())
|
Map.of(CONSUMER_GROUP_COUNT_STATE_TAG, ConsumerGroupState.DEAD.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
shareGroupCountMetricName = metrics.metricName(
|
shareGroupCountMetricName = metrics.metricName(
|
||||||
|
@ -203,49 +203,49 @@ public class GroupCoordinatorMetrics extends CoordinatorMetrics implements AutoC
|
||||||
GROUP_COUNT_METRIC_NAME,
|
GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The total number of groups using the streams rebalance protocol.",
|
"The total number of groups using the streams rebalance protocol.",
|
||||||
Collections.singletonMap(GROUP_COUNT_PROTOCOL_TAG, Group.GroupType.STREAMS.toString())
|
Map.of(GROUP_COUNT_PROTOCOL_TAG, Group.GroupType.STREAMS.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
streamsGroupCountEmptyMetricName = metrics.metricName(
|
streamsGroupCountEmptyMetricName = metrics.metricName(
|
||||||
STREAMS_GROUP_COUNT_METRIC_NAME,
|
STREAMS_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of streams groups in empty state.",
|
"The number of streams groups in empty state.",
|
||||||
Collections.singletonMap(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.EMPTY.toString())
|
Map.of(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.EMPTY.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
streamsGroupCountAssigningMetricName = metrics.metricName(
|
streamsGroupCountAssigningMetricName = metrics.metricName(
|
||||||
STREAMS_GROUP_COUNT_METRIC_NAME,
|
STREAMS_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of streams groups in assigning state.",
|
"The number of streams groups in assigning state.",
|
||||||
Collections.singletonMap(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.ASSIGNING.toString())
|
Map.of(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.ASSIGNING.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
streamsGroupCountReconcilingMetricName = metrics.metricName(
|
streamsGroupCountReconcilingMetricName = metrics.metricName(
|
||||||
STREAMS_GROUP_COUNT_METRIC_NAME,
|
STREAMS_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of streams groups in reconciling state.",
|
"The number of streams groups in reconciling state.",
|
||||||
Collections.singletonMap(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.RECONCILING.toString())
|
Map.of(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.RECONCILING.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
streamsGroupCountStableMetricName = metrics.metricName(
|
streamsGroupCountStableMetricName = metrics.metricName(
|
||||||
STREAMS_GROUP_COUNT_METRIC_NAME,
|
STREAMS_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of streams groups in stable state.",
|
"The number of streams groups in stable state.",
|
||||||
Collections.singletonMap(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.STABLE.toString())
|
Map.of(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.STABLE.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
streamsGroupCountDeadMetricName = metrics.metricName(
|
streamsGroupCountDeadMetricName = metrics.metricName(
|
||||||
STREAMS_GROUP_COUNT_METRIC_NAME,
|
STREAMS_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of streams groups in dead state.",
|
"The number of streams groups in dead state.",
|
||||||
Collections.singletonMap(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.DEAD.toString())
|
Map.of(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.DEAD.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
streamsGroupCountNotReadyMetricName = metrics.metricName(
|
streamsGroupCountNotReadyMetricName = metrics.metricName(
|
||||||
STREAMS_GROUP_COUNT_METRIC_NAME,
|
STREAMS_GROUP_COUNT_METRIC_NAME,
|
||||||
METRICS_GROUP,
|
METRICS_GROUP,
|
||||||
"The number of streams groups in not ready state.",
|
"The number of streams groups in not ready state.",
|
||||||
Collections.singletonMap(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.NOT_READY.toString())
|
Map.of(STREAMS_GROUP_COUNT_STATE_TAG, StreamsGroupState.NOT_READY.toString())
|
||||||
);
|
);
|
||||||
|
|
||||||
registerGauges();
|
registerGauges();
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.kafka.timeline.TimelineLong;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.concurrent.atomic.AtomicLong;
|
import java.util.concurrent.atomic.AtomicLong;
|
||||||
|
@ -112,10 +111,10 @@ public class GroupCoordinatorMetricsShard implements CoordinatorMetricsShard {
|
||||||
numOffsetsTimelineGaugeCounter = new TimelineGaugeCounter(new TimelineLong(snapshotRegistry), new AtomicLong(0));
|
numOffsetsTimelineGaugeCounter = new TimelineGaugeCounter(new TimelineLong(snapshotRegistry), new AtomicLong(0));
|
||||||
numClassicGroupsTimelineCounter = new TimelineGaugeCounter(new TimelineLong(snapshotRegistry), new AtomicLong(0));
|
numClassicGroupsTimelineCounter = new TimelineGaugeCounter(new TimelineLong(snapshotRegistry), new AtomicLong(0));
|
||||||
|
|
||||||
this.classicGroupGauges = Collections.emptyMap();
|
this.classicGroupGauges = Map.of();
|
||||||
this.consumerGroupGauges = Collections.emptyMap();
|
this.consumerGroupGauges = Map.of();
|
||||||
this.streamsGroupGauges = Collections.emptyMap();
|
this.streamsGroupGauges = Map.of();
|
||||||
this.shareGroupGauges = Collections.emptyMap();
|
this.shareGroupGauges = Map.of();
|
||||||
|
|
||||||
this.globalSensors = Objects.requireNonNull(globalSensors);
|
this.globalSensors = Objects.requireNonNull(globalSensors);
|
||||||
this.topicPartition = Objects.requireNonNull(topicPartition);
|
this.topicPartition = Objects.requireNonNull(topicPartition);
|
||||||
|
|
|
@ -32,7 +32,7 @@ import java.util.stream.Collectors;
|
||||||
* An immutable assignment for a member.
|
* An immutable assignment for a member.
|
||||||
*/
|
*/
|
||||||
public class Assignment implements MemberAssignment {
|
public class Assignment implements MemberAssignment {
|
||||||
public static final Assignment EMPTY = new Assignment(Collections.emptyMap());
|
public static final Assignment EMPTY = new Assignment(Map.of());
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The partitions assigned to the member.
|
* The partitions assigned to the member.
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.kafka.coordinator.group.api.assignor.MemberSubscription;
|
||||||
import org.apache.kafka.coordinator.group.api.assignor.SubscriptionType;
|
import org.apache.kafka.coordinator.group.api.assignor.SubscriptionType;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
|
@ -104,7 +103,7 @@ public class GroupSpecImpl implements GroupSpec {
|
||||||
public MemberAssignment memberAssignment(String memberId) {
|
public MemberAssignment memberAssignment(String memberId) {
|
||||||
MemberSubscriptionAndAssignmentImpl member = members.get(memberId);
|
MemberSubscriptionAndAssignmentImpl member = members.get(memberId);
|
||||||
if (member == null) {
|
if (member == null) {
|
||||||
return new MemberAssignmentImpl(Collections.emptyMap());
|
return new MemberAssignmentImpl(Map.of());
|
||||||
}
|
}
|
||||||
return member;
|
return member;
|
||||||
}
|
}
|
||||||
|
|
|
@ -270,7 +270,7 @@ public abstract class ModernGroup<T extends ModernGroupMember> implements Group
|
||||||
public void updateTargetAssignment(String memberId, Assignment newTargetAssignment) {
|
public void updateTargetAssignment(String memberId, Assignment newTargetAssignment) {
|
||||||
updateInvertedTargetAssignment(
|
updateInvertedTargetAssignment(
|
||||||
memberId,
|
memberId,
|
||||||
targetAssignment.getOrDefault(memberId, new Assignment(Collections.emptyMap())),
|
targetAssignment.getOrDefault(memberId, new Assignment(Map.of())),
|
||||||
newTargetAssignment
|
newTargetAssignment
|
||||||
);
|
);
|
||||||
targetAssignment.put(memberId, newTargetAssignment);
|
targetAssignment.put(memberId, newTargetAssignment);
|
||||||
|
@ -294,8 +294,8 @@ public abstract class ModernGroup<T extends ModernGroupMember> implements Group
|
||||||
allTopicIds.addAll(newTargetAssignment.partitions().keySet());
|
allTopicIds.addAll(newTargetAssignment.partitions().keySet());
|
||||||
|
|
||||||
for (Uuid topicId : allTopicIds) {
|
for (Uuid topicId : allTopicIds) {
|
||||||
Set<Integer> oldPartitions = oldTargetAssignment.partitions().getOrDefault(topicId, Collections.emptySet());
|
Set<Integer> oldPartitions = oldTargetAssignment.partitions().getOrDefault(topicId, Set.of());
|
||||||
Set<Integer> newPartitions = newTargetAssignment.partitions().getOrDefault(topicId, Collections.emptySet());
|
Set<Integer> newPartitions = newTargetAssignment.partitions().getOrDefault(topicId, Set.of());
|
||||||
|
|
||||||
TimelineHashMap<Integer, String> topicPartitionAssignment = invertedTargetAssignment.computeIfAbsent(
|
TimelineHashMap<Integer, String> topicPartitionAssignment = invertedTargetAssignment.computeIfAbsent(
|
||||||
topicId, k -> new TimelineHashMap<>(snapshotRegistry, Math.max(oldPartitions.size(), newPartitions.size()))
|
topicId, k -> new TimelineHashMap<>(snapshotRegistry, Math.max(oldPartitions.size(), newPartitions.size()))
|
||||||
|
|
|
@ -20,7 +20,6 @@ import org.apache.kafka.common.Uuid;
|
||||||
import org.apache.kafka.coordinator.group.api.assignor.PartitionAssignor;
|
import org.apache.kafka.coordinator.group.api.assignor.PartitionAssignor;
|
||||||
import org.apache.kafka.coordinator.group.api.assignor.SubscribedTopicDescriber;
|
import org.apache.kafka.coordinator.group.api.assignor.SubscribedTopicDescriber;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -72,7 +71,7 @@ public class SubscribedTopicDescriberImpl implements SubscribedTopicDescriber {
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public Set<String> racksForPartition(Uuid topicId, int partition) {
|
public Set<String> racksForPartition(Uuid topicId, int partition) {
|
||||||
return Collections.emptySet();
|
return Set.of();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -97,7 +97,7 @@ public abstract class TargetAssignmentBuilder<T extends ModernGroupMember, U ext
|
||||||
/**
|
/**
|
||||||
* The resolved regular expressions.
|
* The resolved regular expressions.
|
||||||
*/
|
*/
|
||||||
private Map<String, ResolvedRegularExpression> resolvedRegularExpressions = Collections.emptyMap();
|
private Map<String, ResolvedRegularExpression> resolvedRegularExpressions = Map.of();
|
||||||
|
|
||||||
public ConsumerTargetAssignmentBuilder(
|
public ConsumerTargetAssignmentBuilder(
|
||||||
String groupId,
|
String groupId,
|
||||||
|
@ -248,12 +248,12 @@ public abstract class TargetAssignmentBuilder<T extends ModernGroupMember, U ext
|
||||||
/**
|
/**
|
||||||
* The members in the group.
|
* The members in the group.
|
||||||
*/
|
*/
|
||||||
private Map<String, T> members = Collections.emptyMap();
|
private Map<String, T> members = Map.of();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The subscription metadata.
|
* The subscription metadata.
|
||||||
*/
|
*/
|
||||||
private Map<String, TopicMetadata> subscriptionMetadata = Collections.emptyMap();
|
private Map<String, TopicMetadata> subscriptionMetadata = Map.of();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The subscription type of the consumer group.
|
* The subscription type of the consumer group.
|
||||||
|
@ -263,13 +263,13 @@ public abstract class TargetAssignmentBuilder<T extends ModernGroupMember, U ext
|
||||||
/**
|
/**
|
||||||
* The existing target assignment.
|
* The existing target assignment.
|
||||||
*/
|
*/
|
||||||
private Map<String, Assignment> targetAssignment = Collections.emptyMap();
|
private Map<String, Assignment> targetAssignment = Map.of();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reverse lookup map representing topic partitions with
|
* Reverse lookup map representing topic partitions with
|
||||||
* their current member assignments.
|
* their current member assignments.
|
||||||
*/
|
*/
|
||||||
private Map<Uuid, Map<Integer, String>> invertedTargetAssignment = Collections.emptyMap();
|
private Map<Uuid, Map<Integer, String>> invertedTargetAssignment = Map.of();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The topics image.
|
* The topics image.
|
||||||
|
|
|
@ -1151,7 +1151,7 @@ public class ConsumerGroup extends ModernGroup<ConsumerGroupMember> {
|
||||||
// We should accept the empty assignment.
|
// We should accept the empty assignment.
|
||||||
Map<Uuid, Set<Integer>> assignedPartitions;
|
Map<Uuid, Set<Integer>> assignedPartitions;
|
||||||
if (Arrays.equals(classicGroupMember.assignment(), EMPTY_ASSIGNMENT)) {
|
if (Arrays.equals(classicGroupMember.assignment(), EMPTY_ASSIGNMENT)) {
|
||||||
assignedPartitions = Collections.emptyMap();
|
assignedPartitions = Map.of();
|
||||||
} else {
|
} else {
|
||||||
ConsumerProtocolAssignment assignment = ConsumerProtocol.deserializeConsumerProtocolAssignment(
|
ConsumerProtocolAssignment assignment = ConsumerProtocol.deserializeConsumerProtocolAssignment(
|
||||||
ByteBuffer.wrap(classicGroupMember.assignment())
|
ByteBuffer.wrap(classicGroupMember.assignment())
|
||||||
|
@ -1287,7 +1287,7 @@ public class ConsumerGroup extends ModernGroup<ConsumerGroupMember> {
|
||||||
if (member.state() == MemberState.UNRELEASED_PARTITIONS) {
|
if (member.state() == MemberState.UNRELEASED_PARTITIONS) {
|
||||||
for (Map.Entry<Uuid, Set<Integer>> entry : targetAssignment().get(member.memberId()).partitions().entrySet()) {
|
for (Map.Entry<Uuid, Set<Integer>> entry : targetAssignment().get(member.memberId()).partitions().entrySet()) {
|
||||||
Uuid topicId = entry.getKey();
|
Uuid topicId = entry.getKey();
|
||||||
Set<Integer> assignedPartitions = member.assignedPartitions().getOrDefault(topicId, Collections.emptySet());
|
Set<Integer> assignedPartitions = member.assignedPartitions().getOrDefault(topicId, Set.of());
|
||||||
|
|
||||||
for (int partition : entry.getValue()) {
|
for (int partition : entry.getValue()) {
|
||||||
if (!assignedPartitions.contains(partition) && currentPartitionEpoch(topicId, partition) != -1) {
|
if (!assignedPartitions.contains(partition) && currentPartitionEpoch(topicId, partition) != -1) {
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.kafka.image.TopicImage;
|
||||||
import org.apache.kafka.image.TopicsImage;
|
import org.apache.kafka.image.TopicsImage;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -62,11 +61,11 @@ public class ConsumerGroupMember extends ModernGroupMember {
|
||||||
private int rebalanceTimeoutMs = -1;
|
private int rebalanceTimeoutMs = -1;
|
||||||
private String clientId = "";
|
private String clientId = "";
|
||||||
private String clientHost = "";
|
private String clientHost = "";
|
||||||
private Set<String> subscribedTopicNames = Collections.emptySet();
|
private Set<String> subscribedTopicNames = Set.of();
|
||||||
private String subscribedTopicRegex = "";
|
private String subscribedTopicRegex = "";
|
||||||
private String serverAssignorName = null;
|
private String serverAssignorName = null;
|
||||||
private Map<Uuid, Set<Integer>> assignedPartitions = Collections.emptyMap();
|
private Map<Uuid, Set<Integer>> assignedPartitions = Map.of();
|
||||||
private Map<Uuid, Set<Integer>> partitionsPendingRevocation = Collections.emptyMap();
|
private Map<Uuid, Set<Integer>> partitionsPendingRevocation = Map.of();
|
||||||
private ConsumerGroupMemberMetadataValue.ClassicMemberMetadata classicMemberMetadata = null;
|
private ConsumerGroupMemberMetadataValue.ClassicMemberMetadata classicMemberMetadata = null;
|
||||||
|
|
||||||
public Builder(String memberId) {
|
public Builder(String memberId) {
|
||||||
|
@ -400,7 +399,7 @@ public class ConsumerGroupMember extends ModernGroupMember {
|
||||||
.setTopicPartitions(topicPartitionsFromMap(assignedPartitions, topicsImage)))
|
.setTopicPartitions(topicPartitionsFromMap(assignedPartitions, topicsImage)))
|
||||||
.setTargetAssignment(new ConsumerGroupDescribeResponseData.Assignment()
|
.setTargetAssignment(new ConsumerGroupDescribeResponseData.Assignment()
|
||||||
.setTopicPartitions(topicPartitionsFromMap(
|
.setTopicPartitions(topicPartitionsFromMap(
|
||||||
targetAssignment != null ? targetAssignment.partitions() : Collections.emptyMap(),
|
targetAssignment != null ? targetAssignment.partitions() : Map.of(),
|
||||||
topicsImage
|
topicsImage
|
||||||
)))
|
)))
|
||||||
.setClientHost(clientHost)
|
.setClientHost(clientHost)
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.kafka.common.message.ConsumerGroupHeartbeatRequestData;
|
||||||
import org.apache.kafka.coordinator.group.modern.Assignment;
|
import org.apache.kafka.coordinator.group.modern.Assignment;
|
||||||
import org.apache.kafka.coordinator.group.modern.MemberState;
|
import org.apache.kafka.coordinator.group.modern.MemberState;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -204,7 +203,7 @@ public class CurrentAssignmentBuilder {
|
||||||
|
|
||||||
for (ConsumerGroupHeartbeatRequestData.TopicPartitions topicPartitions : ownedTopicPartitions) {
|
for (ConsumerGroupHeartbeatRequestData.TopicPartitions topicPartitions : ownedTopicPartitions) {
|
||||||
Set<Integer> partitionsPendingRevocation =
|
Set<Integer> partitionsPendingRevocation =
|
||||||
assignment.getOrDefault(topicPartitions.topicId(), Collections.emptySet());
|
assignment.getOrDefault(topicPartitions.topicId(), Set.of());
|
||||||
|
|
||||||
for (Integer partitionId : topicPartitions.partitions()) {
|
for (Integer partitionId : topicPartitions.partitions()) {
|
||||||
if (partitionsPendingRevocation.contains(partitionId)) {
|
if (partitionsPendingRevocation.contains(partitionId)) {
|
||||||
|
@ -238,9 +237,9 @@ public class CurrentAssignmentBuilder {
|
||||||
|
|
||||||
for (Uuid topicId : allTopicIds) {
|
for (Uuid topicId : allTopicIds) {
|
||||||
Set<Integer> target = targetAssignment.partitions()
|
Set<Integer> target = targetAssignment.partitions()
|
||||||
.getOrDefault(topicId, Collections.emptySet());
|
.getOrDefault(topicId, Set.of());
|
||||||
Set<Integer> currentAssignedPartitions = memberAssignedPartitions
|
Set<Integer> currentAssignedPartitions = memberAssignedPartitions
|
||||||
.getOrDefault(topicId, Collections.emptySet());
|
.getOrDefault(topicId, Set.of());
|
||||||
|
|
||||||
// New Assigned Partitions = Previous Assigned Partitions ∩ Target
|
// New Assigned Partitions = Previous Assigned Partitions ∩ Target
|
||||||
Set<Integer> assignedPartitions = new HashSet<>(currentAssignedPartitions);
|
Set<Integer> assignedPartitions = new HashSet<>(currentAssignedPartitions);
|
||||||
|
@ -295,7 +294,7 @@ public class CurrentAssignmentBuilder {
|
||||||
.setState(newState)
|
.setState(newState)
|
||||||
.updateMemberEpoch(targetAssignmentEpoch)
|
.updateMemberEpoch(targetAssignmentEpoch)
|
||||||
.setAssignedPartitions(newAssignedPartitions)
|
.setAssignedPartitions(newAssignedPartitions)
|
||||||
.setPartitionsPendingRevocation(Collections.emptyMap())
|
.setPartitionsPendingRevocation(Map.of())
|
||||||
.build();
|
.build();
|
||||||
} else if (hasUnreleasedPartitions) {
|
} else if (hasUnreleasedPartitions) {
|
||||||
// If there are no partitions to be revoked nor to be assigned but some
|
// If there are no partitions to be revoked nor to be assigned but some
|
||||||
|
@ -305,7 +304,7 @@ public class CurrentAssignmentBuilder {
|
||||||
.setState(MemberState.UNRELEASED_PARTITIONS)
|
.setState(MemberState.UNRELEASED_PARTITIONS)
|
||||||
.updateMemberEpoch(targetAssignmentEpoch)
|
.updateMemberEpoch(targetAssignmentEpoch)
|
||||||
.setAssignedPartitions(newAssignedPartitions)
|
.setAssignedPartitions(newAssignedPartitions)
|
||||||
.setPartitionsPendingRevocation(Collections.emptyMap())
|
.setPartitionsPendingRevocation(Map.of())
|
||||||
.build();
|
.build();
|
||||||
} else {
|
} else {
|
||||||
// Otherwise, the member transitions to the target epoch and to the
|
// Otherwise, the member transitions to the target epoch and to the
|
||||||
|
@ -314,7 +313,7 @@ public class CurrentAssignmentBuilder {
|
||||||
.setState(MemberState.STABLE)
|
.setState(MemberState.STABLE)
|
||||||
.updateMemberEpoch(targetAssignmentEpoch)
|
.updateMemberEpoch(targetAssignmentEpoch)
|
||||||
.setAssignedPartitions(newAssignedPartitions)
|
.setAssignedPartitions(newAssignedPartitions)
|
||||||
.setPartitionsPendingRevocation(Collections.emptyMap())
|
.setPartitionsPendingRevocation(Map.of())
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ import java.util.Set;
|
||||||
* The metadata associated with a regular expression in a Consumer Group.
|
* The metadata associated with a regular expression in a Consumer Group.
|
||||||
*/
|
*/
|
||||||
public class ResolvedRegularExpression {
|
public class ResolvedRegularExpression {
|
||||||
public static final ResolvedRegularExpression EMPTY = new ResolvedRegularExpression(Collections.emptySet(), -1L, -1L);
|
public static final ResolvedRegularExpression EMPTY = new ResolvedRegularExpression(Set.of(), -1L, -1L);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The set of resolved topics.
|
* The set of resolved topics.
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.kafka.image.TopicImage;
|
||||||
import org.apache.kafka.image.TopicsImage;
|
import org.apache.kafka.image.TopicsImage;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -56,8 +55,8 @@ public class ShareGroupMember extends ModernGroupMember {
|
||||||
private String rackId = null;
|
private String rackId = null;
|
||||||
private String clientId = "";
|
private String clientId = "";
|
||||||
private String clientHost = "";
|
private String clientHost = "";
|
||||||
private Set<String> subscribedTopicNames = Collections.emptySet();
|
private Set<String> subscribedTopicNames = Set.of();
|
||||||
private Map<Uuid, Set<Integer>> assignedPartitions = Collections.emptyMap();
|
private Map<Uuid, Set<Integer>> assignedPartitions = Map.of();
|
||||||
|
|
||||||
public Builder(String memberId) {
|
public Builder(String memberId) {
|
||||||
this.memberId = Objects.requireNonNull(memberId);
|
this.memberId = Objects.requireNonNull(memberId);
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.kafka.coordinator.group.streams;
|
||||||
|
|
||||||
import org.apache.kafka.common.errors.FencedMemberEpochException;
|
import org.apache.kafka.common.errors.FencedMemberEpochException;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -251,8 +250,8 @@ public class CurrentAssignmentBuilder {
|
||||||
for (String subtopologyId : allSubtopologyIds) {
|
for (String subtopologyId : allSubtopologyIds) {
|
||||||
hasUnreleasedTasks |= computeAssignmentDifferenceForOneSubtopology(
|
hasUnreleasedTasks |= computeAssignmentDifferenceForOneSubtopology(
|
||||||
subtopologyId,
|
subtopologyId,
|
||||||
currentAssignment.getOrDefault(subtopologyId, Collections.emptySet()),
|
currentAssignment.getOrDefault(subtopologyId, Set.of()),
|
||||||
targetAssignment.getOrDefault(subtopologyId, Collections.emptySet()),
|
targetAssignment.getOrDefault(subtopologyId, Set.of()),
|
||||||
resultAssignedTasks,
|
resultAssignedTasks,
|
||||||
resultTasksPendingRevocation,
|
resultTasksPendingRevocation,
|
||||||
resultTasksPendingAssignment,
|
resultTasksPendingAssignment,
|
||||||
|
|
|
@ -40,7 +40,6 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class contains helper methods to create records stored in the __consumer_offsets topic.
|
* This class contains helper methods to create records stored in the __consumer_offsets topic.
|
||||||
|
@ -72,7 +71,7 @@ public class StreamsCoordinatorRecordHelpers {
|
||||||
new StreamsGroupMemberMetadataValue.KeyValue()
|
new StreamsGroupMemberMetadataValue.KeyValue()
|
||||||
.setKey(e.getKey())
|
.setKey(e.getKey())
|
||||||
.setValue(e.getValue())
|
.setValue(e.getValue())
|
||||||
).sorted(Comparator.comparing(StreamsGroupMemberMetadataValue.KeyValue::key)).collect(Collectors.toList())),
|
).sorted(Comparator.comparing(StreamsGroupMemberMetadataValue.KeyValue::key)).toList()),
|
||||||
(short) 0
|
(short) 0
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -401,12 +400,12 @@ public class StreamsCoordinatorRecordHelpers {
|
||||||
List<StreamsGroupTopologyValue.TopicInfo> repartitionSourceTopics =
|
List<StreamsGroupTopologyValue.TopicInfo> repartitionSourceTopics =
|
||||||
subtopology.repartitionSourceTopics().stream()
|
subtopology.repartitionSourceTopics().stream()
|
||||||
.map(StreamsCoordinatorRecordHelpers::convertToTopicInfo)
|
.map(StreamsCoordinatorRecordHelpers::convertToTopicInfo)
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
|
|
||||||
List<StreamsGroupTopologyValue.TopicInfo> stateChangelogTopics =
|
List<StreamsGroupTopologyValue.TopicInfo> stateChangelogTopics =
|
||||||
subtopology.stateChangelogTopics().stream()
|
subtopology.stateChangelogTopics().stream()
|
||||||
.map(StreamsCoordinatorRecordHelpers::convertToTopicInfo)
|
.map(StreamsCoordinatorRecordHelpers::convertToTopicInfo)
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
|
|
||||||
List<StreamsGroupTopologyValue.CopartitionGroup> copartitionGroups =
|
List<StreamsGroupTopologyValue.CopartitionGroup> copartitionGroups =
|
||||||
subtopology.copartitionGroups().stream()
|
subtopology.copartitionGroups().stream()
|
||||||
|
@ -415,7 +414,7 @@ public class StreamsCoordinatorRecordHelpers {
|
||||||
.setSourceTopicRegex(copartitionGroup.sourceTopicRegex())
|
.setSourceTopicRegex(copartitionGroup.sourceTopicRegex())
|
||||||
.setRepartitionSourceTopics(copartitionGroup.repartitionSourceTopics())
|
.setRepartitionSourceTopics(copartitionGroup.repartitionSourceTopics())
|
||||||
)
|
)
|
||||||
.collect(Collectors.toList());
|
.toList();
|
||||||
|
|
||||||
value.subtopologies().add(
|
value.subtopologies().add(
|
||||||
new StreamsGroupTopologyValue.Subtopology()
|
new StreamsGroupTopologyValue.Subtopology()
|
||||||
|
@ -434,7 +433,7 @@ public class StreamsCoordinatorRecordHelpers {
|
||||||
private static StreamsGroupTopologyValue.TopicInfo convertToTopicInfo(StreamsGroupHeartbeatRequestData.TopicInfo topicInfo) {
|
private static StreamsGroupTopologyValue.TopicInfo convertToTopicInfo(StreamsGroupHeartbeatRequestData.TopicInfo topicInfo) {
|
||||||
List<StreamsGroupTopologyValue.TopicConfig> topicConfigs = topicInfo.topicConfigs() != null ? topicInfo.topicConfigs().stream()
|
List<StreamsGroupTopologyValue.TopicConfig> topicConfigs = topicInfo.topicConfigs() != null ? topicInfo.topicConfigs().stream()
|
||||||
.map(config -> new StreamsGroupTopologyValue.TopicConfig().setKey(config.key()).setValue(config.value()))
|
.map(config -> new StreamsGroupTopologyValue.TopicConfig().setKey(config.key()).setValue(config.value()))
|
||||||
.collect(Collectors.toList()) : null;
|
.toList() : null;
|
||||||
return new StreamsGroupTopologyValue.TopicInfo()
|
return new StreamsGroupTopologyValue.TopicInfo()
|
||||||
.setName(topicInfo.name())
|
.setName(topicInfo.name())
|
||||||
.setTopicConfigs(topicConfigs)
|
.setTopicConfigs(topicConfigs)
|
||||||
|
|
|
@ -511,9 +511,9 @@ public class StreamsGroup implements Group {
|
||||||
) {
|
) {
|
||||||
Map<Integer, Set<String>> tasks = currentStandbyTaskToProcessIds.get(subtopologyId);
|
Map<Integer, Set<String>> tasks = currentStandbyTaskToProcessIds.get(subtopologyId);
|
||||||
if (tasks == null) {
|
if (tasks == null) {
|
||||||
return Collections.emptySet();
|
return Set.of();
|
||||||
} else {
|
} else {
|
||||||
return tasks.getOrDefault(taskId, Collections.emptySet());
|
return tasks.getOrDefault(taskId, Set.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -529,9 +529,9 @@ public class StreamsGroup implements Group {
|
||||||
) {
|
) {
|
||||||
Map<Integer, Set<String>> tasks = currentWarmupTaskToProcessIds.get(subtopologyId);
|
Map<Integer, Set<String>> tasks = currentWarmupTaskToProcessIds.get(subtopologyId);
|
||||||
if (tasks == null) {
|
if (tasks == null) {
|
||||||
return Collections.emptySet();
|
return Set.of();
|
||||||
} else {
|
} else {
|
||||||
return tasks.getOrDefault(taskId, Collections.emptySet());
|
return tasks.getOrDefault(taskId, Set.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -341,7 +341,7 @@ public record StreamsGroupMember(String memberId,
|
||||||
entry -> new StreamsGroupDescribeResponseData.KeyValue()
|
entry -> new StreamsGroupDescribeResponseData.KeyValue()
|
||||||
.setKey(entry.getKey())
|
.setKey(entry.getKey())
|
||||||
.setValue(entry.getValue())
|
.setValue(entry.getValue())
|
||||||
).collect(Collectors.toList()))
|
).toList())
|
||||||
.setProcessId(processId)
|
.setProcessId(processId)
|
||||||
.setTopologyEpoch(topologyEpoch)
|
.setTopologyEpoch(topologyEpoch)
|
||||||
.setUserEndpoint(
|
.setUserEndpoint(
|
||||||
|
|
|
@ -50,9 +50,9 @@ public record TasksTuple(Map<String, Set<Integer>> activeTasks,
|
||||||
* An empty task tuple.
|
* An empty task tuple.
|
||||||
*/
|
*/
|
||||||
public static final TasksTuple EMPTY = new TasksTuple(
|
public static final TasksTuple EMPTY = new TasksTuple(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -16,7 +16,6 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.kafka.coordinator.group.streams.assignor;
|
package org.apache.kafka.coordinator.group.streams.assignor;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -37,6 +36,6 @@ public record MemberAssignment(Map<String, Set<Integer>> activeTasks,
|
||||||
}
|
}
|
||||||
|
|
||||||
public static MemberAssignment empty() {
|
public static MemberAssignment empty() {
|
||||||
return new MemberAssignment(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
|
return new MemberAssignment(Map.of(), Map.of(), Map.of());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.util.Collections;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Captures the properties required for configuring the internal topics we create for changelogs and repartitioning etc.
|
* Captures the properties required for configuring the internal topics we create for changelogs and repartitioning etc.
|
||||||
|
@ -57,12 +56,11 @@ public record ConfiguredInternalTopic(String name,
|
||||||
.setPartitions(numberOfPartitions)
|
.setPartitions(numberOfPartitions)
|
||||||
.setReplicationFactor(replicationFactor.orElse((short) 0))
|
.setReplicationFactor(replicationFactor.orElse((short) 0))
|
||||||
.setTopicConfigs(
|
.setTopicConfigs(
|
||||||
topicConfigs != null ?
|
topicConfigs.entrySet().stream().map(
|
||||||
topicConfigs.entrySet().stream().map(
|
y -> new StreamsGroupDescribeResponseData.KeyValue()
|
||||||
y -> new StreamsGroupDescribeResponseData.KeyValue()
|
.setKey(y.getKey())
|
||||||
.setKey(y.getKey())
|
.setValue(y.getValue())
|
||||||
.setValue(y.getValue())
|
).toList()
|
||||||
).collect(Collectors.toList()) : null
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.kafka.common.message.StreamsGroupDescribeResponseData;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Internal representation of a subtopology.
|
* Internal representation of a subtopology.
|
||||||
|
@ -51,12 +50,12 @@ public record ConfiguredSubtopology(Set<String> sourceTopics,
|
||||||
public StreamsGroupDescribeResponseData.Subtopology asStreamsGroupDescribeSubtopology(String subtopologyId) {
|
public StreamsGroupDescribeResponseData.Subtopology asStreamsGroupDescribeSubtopology(String subtopologyId) {
|
||||||
return new StreamsGroupDescribeResponseData.Subtopology()
|
return new StreamsGroupDescribeResponseData.Subtopology()
|
||||||
.setSubtopologyId(subtopologyId)
|
.setSubtopologyId(subtopologyId)
|
||||||
.setSourceTopics(sourceTopics.stream().sorted().collect(Collectors.toList()))
|
.setSourceTopics(sourceTopics.stream().sorted().toList())
|
||||||
.setRepartitionSinkTopics(repartitionSinkTopics.stream().sorted().collect(Collectors.toList()))
|
.setRepartitionSinkTopics(repartitionSinkTopics.stream().sorted().toList())
|
||||||
.setRepartitionSourceTopics(repartitionSourceTopics.values().stream()
|
.setRepartitionSourceTopics(repartitionSourceTopics.values().stream()
|
||||||
.map(ConfiguredInternalTopic::asStreamsGroupDescribeTopicInfo).sorted().collect(Collectors.toList()))
|
.map(ConfiguredInternalTopic::asStreamsGroupDescribeTopicInfo).sorted().toList())
|
||||||
.setStateChangelogTopics(stateChangelogTopics.values().stream()
|
.setStateChangelogTopics(stateChangelogTopics.values().stream()
|
||||||
.map(ConfiguredInternalTopic::asStreamsGroupDescribeTopicInfo).sorted().collect(Collectors.toList()));
|
.map(ConfiguredInternalTopic::asStreamsGroupDescribeTopicInfo).sorted().toList());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
|
@ -19,12 +19,11 @@ package org.apache.kafka.coordinator.group.streams.topics;
|
||||||
import org.apache.kafka.common.message.CreateTopicsRequestData.CreatableTopic;
|
import org.apache.kafka.common.message.CreateTopicsRequestData.CreatableTopic;
|
||||||
import org.apache.kafka.common.message.StreamsGroupDescribeResponseData;
|
import org.apache.kafka.common.message.StreamsGroupDescribeResponseData;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.SortedMap;
|
import java.util.SortedMap;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class captures the result of taking a topology definition sent by the client and using the current state of the topics inside the
|
* This class captures the result of taking a topology definition sent by the client and using the current state of the topics inside the
|
||||||
|
@ -68,7 +67,7 @@ public record ConfiguredTopology(int topologyEpoch,
|
||||||
.setSubtopologies(
|
.setSubtopologies(
|
||||||
subtopologies.map(stringConfiguredSubtopologyMap -> stringConfiguredSubtopologyMap.entrySet().stream().map(
|
subtopologies.map(stringConfiguredSubtopologyMap -> stringConfiguredSubtopologyMap.entrySet().stream().map(
|
||||||
entry -> entry.getValue().asStreamsGroupDescribeSubtopology(entry.getKey())
|
entry -> entry.getValue().asStreamsGroupDescribeSubtopology(entry.getKey())
|
||||||
).collect(Collectors.toList())).orElse(Collections.emptyList())
|
).toList()).orElse(List.of())
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.kafka.common.utils.LogContext;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
|
@ -74,7 +73,7 @@ public class CopartitionedTopicsEnforcer {
|
||||||
final Set<String> flexibleRepartitionTopics) throws StreamsInvalidTopologyException {
|
final Set<String> flexibleRepartitionTopics) throws StreamsInvalidTopologyException {
|
||||||
if (copartitionedTopics.isEmpty()) {
|
if (copartitionedTopics.isEmpty()) {
|
||||||
log.debug("Ignoring unexpected empty copartitioned topics set.");
|
log.debug("Ignoring unexpected empty copartitioned topics set.");
|
||||||
return Collections.emptyMap();
|
return Map.of();
|
||||||
}
|
}
|
||||||
final Map<String, Integer> returnedPartitionCounts = new HashMap<>();
|
final Map<String, Integer> returnedPartitionCounts = new HashMap<>();
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@ import org.apache.kafka.coordinator.group.streams.TopicMetadata;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -290,7 +289,7 @@ public class InternalTopicManager {
|
||||||
topicInfo.topicConfigs() != null ? topicInfo.topicConfigs().stream()
|
topicInfo.topicConfigs() != null ? topicInfo.topicConfigs().stream()
|
||||||
.collect(Collectors.toMap(StreamsGroupTopologyValue.TopicConfig::key,
|
.collect(Collectors.toMap(StreamsGroupTopologyValue.TopicConfig::key,
|
||||||
StreamsGroupTopologyValue.TopicConfig::value))
|
StreamsGroupTopologyValue.TopicConfig::value))
|
||||||
: Collections.emptyMap()
|
: Map.of()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -304,6 +303,6 @@ public class InternalTopicManager {
|
||||||
copartitionGroup.repartitionSourceTopics().stream()
|
copartitionGroup.repartitionSourceTopics().stream()
|
||||||
.map(i -> subtopology.repartitionSourceTopics().get(i).name())
|
.map(i -> subtopology.repartitionSourceTopics().get(i).name())
|
||||||
).collect(Collectors.toSet())
|
).collect(Collectors.toSet())
|
||||||
).collect(Collectors.toList());
|
).toList();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,7 +41,6 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.junit.jupiter.api.AssertionFailureBuilder.assertionFailure;
|
import static org.junit.jupiter.api.AssertionFailureBuilder.assertionFailure;
|
||||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||||
|
@ -115,12 +114,12 @@ public class Assertions {
|
||||||
slice
|
slice
|
||||||
.stream()
|
.stream()
|
||||||
.sorted(Comparator.comparing(Object::toString))
|
.sorted(Comparator.comparing(Object::toString))
|
||||||
.collect(Collectors.toList()),
|
.toList(),
|
||||||
actualRecords
|
actualRecords
|
||||||
.subList(j, j + slice.size())
|
.subList(j, j + slice.size())
|
||||||
.stream()
|
.stream()
|
||||||
.sorted(Comparator.comparing(Object::toString))
|
.sorted(Comparator.comparing(Object::toString))
|
||||||
.collect(Collectors.toList())
|
.toList()
|
||||||
);
|
);
|
||||||
|
|
||||||
j += slice.size();
|
j += slice.size();
|
||||||
|
|
|
@ -34,7 +34,6 @@ import org.apache.kafka.coordinator.group.assignor.UniformAssignor;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -144,7 +143,7 @@ public class GroupCoordinatorConfigTest {
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_SESSION_TIMEOUT_MS_CONFIG, 555);
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_SESSION_TIMEOUT_MS_CONFIG, 555);
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 200);
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 200);
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MAX_SIZE_CONFIG, 55);
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MAX_SIZE_CONFIG, 55);
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, Collections.singletonList(RangeAssignor.class));
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, List.of(RangeAssignor.class));
|
||||||
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_SEGMENT_BYTES_CONFIG, 2222);
|
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_SEGMENT_BYTES_CONFIG, 2222);
|
||||||
configs.put(GroupCoordinatorConfig.OFFSET_METADATA_MAX_SIZE_CONFIG, 3333);
|
configs.put(GroupCoordinatorConfig.OFFSET_METADATA_MAX_SIZE_CONFIG, 3333);
|
||||||
configs.put(GroupCoordinatorConfig.GROUP_MAX_SIZE_CONFIG, 60);
|
configs.put(GroupCoordinatorConfig.GROUP_MAX_SIZE_CONFIG, 60);
|
||||||
|
@ -243,7 +242,7 @@ public class GroupCoordinatorConfigTest {
|
||||||
assertThrows(ConfigException.class, () -> createConfig(configs)).getMessage());
|
assertThrows(ConfigException.class, () -> createConfig(configs)).getMessage());
|
||||||
|
|
||||||
configs.clear();
|
configs.clear();
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, Collections.singletonList(Object.class));
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, List.of(Object.class));
|
||||||
assertEquals("class java.lang.Object is not an instance of org.apache.kafka.coordinator.group.api.assignor.ConsumerGroupPartitionAssignor",
|
assertEquals("class java.lang.Object is not an instance of org.apache.kafka.coordinator.group.api.assignor.ConsumerGroupPartitionAssignor",
|
||||||
assertThrows(KafkaException.class, () -> createConfig(configs)).getMessage());
|
assertThrows(KafkaException.class, () -> createConfig(configs)).getMessage());
|
||||||
|
|
||||||
|
@ -297,7 +296,7 @@ public class GroupCoordinatorConfigTest {
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIN_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MIN_HEARTBEAT_INTERVAL_MS_CONFIG, 5);
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MAX_SIZE_CONFIG, Integer.MAX_VALUE);
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_MAX_SIZE_CONFIG, Integer.MAX_VALUE);
|
||||||
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, Collections.singletonList(RangeAssignor.class));
|
configs.put(GroupCoordinatorConfig.CONSUMER_GROUP_ASSIGNORS_CONFIG, List.of(RangeAssignor.class));
|
||||||
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_SEGMENT_BYTES_CONFIG, 1000);
|
configs.put(GroupCoordinatorConfig.OFFSETS_TOPIC_SEGMENT_BYTES_CONFIG, 1000);
|
||||||
configs.put(GroupCoordinatorConfig.OFFSET_METADATA_MAX_SIZE_CONFIG, offsetMetadataMaxSize);
|
configs.put(GroupCoordinatorConfig.OFFSET_METADATA_MAX_SIZE_CONFIG, offsetMetadataMaxSize);
|
||||||
configs.put(GroupCoordinatorConfig.GROUP_MAX_SIZE_CONFIG, Integer.MAX_VALUE);
|
configs.put(GroupCoordinatorConfig.GROUP_MAX_SIZE_CONFIG, Integer.MAX_VALUE);
|
||||||
|
|
|
@ -55,7 +55,6 @@ import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
|
@ -570,7 +569,7 @@ public class GroupCoordinatorRecordHelpersTest {
|
||||||
assertThrows(IllegalStateException.class, () ->
|
assertThrows(IllegalStateException.class, () ->
|
||||||
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(
|
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(
|
||||||
group,
|
group,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -620,7 +619,7 @@ public class GroupCoordinatorRecordHelpersTest {
|
||||||
assertThrows(IllegalStateException.class, () ->
|
assertThrows(IllegalStateException.class, () ->
|
||||||
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(
|
GroupCoordinatorRecordHelpers.newGroupMetadataRecord(
|
||||||
group,
|
group,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -628,7 +627,7 @@ public class GroupCoordinatorRecordHelpersTest {
|
||||||
public void testEmptyGroupMetadataRecord() {
|
public void testEmptyGroupMetadataRecord() {
|
||||||
Time time = new MockTime();
|
Time time = new MockTime();
|
||||||
|
|
||||||
List<GroupMetadataValue.MemberMetadata> expectedMembers = Collections.emptyList();
|
List<GroupMetadataValue.MemberMetadata> expectedMembers = List.of();
|
||||||
|
|
||||||
CoordinatorRecord expectedRecord = CoordinatorRecord.record(
|
CoordinatorRecord expectedRecord = CoordinatorRecord.record(
|
||||||
new GroupMetadataKey()
|
new GroupMetadataKey()
|
||||||
|
|
|
@ -113,7 +113,6 @@ import org.mockito.ArgumentMatchers;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.OptionalInt;
|
import java.util.OptionalInt;
|
||||||
|
@ -289,7 +288,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new StreamsGroupHeartbeatResult(
|
new StreamsGroupHeartbeatResult(
|
||||||
new StreamsGroupHeartbeatResponseData().setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()),
|
new StreamsGroupHeartbeatResponseData().setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
),
|
),
|
||||||
future.get()
|
future.get()
|
||||||
);
|
);
|
||||||
|
@ -314,7 +313,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
)).thenReturn(CompletableFuture.completedFuture(
|
)).thenReturn(CompletableFuture.completedFuture(
|
||||||
new StreamsGroupHeartbeatResult(
|
new StreamsGroupHeartbeatResult(
|
||||||
new StreamsGroupHeartbeatResponseData(),
|
new StreamsGroupHeartbeatResponseData(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -323,7 +322,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
request
|
request
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(new StreamsGroupHeartbeatResult(new StreamsGroupHeartbeatResponseData(), Collections.emptyMap()), future.get(5, TimeUnit.SECONDS));
|
assertEquals(new StreamsGroupHeartbeatResult(new StreamsGroupHeartbeatResponseData(), Map.of()), future.get(5, TimeUnit.SECONDS));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Stream<Arguments> testStreamsGroupHeartbeatWithExceptionSource() {
|
private static Stream<Arguments> testStreamsGroupHeartbeatWithExceptionSource() {
|
||||||
|
@ -376,7 +375,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
new StreamsGroupHeartbeatResponseData()
|
new StreamsGroupHeartbeatResponseData()
|
||||||
.setErrorCode(expectedErrorCode)
|
.setErrorCode(expectedErrorCode)
|
||||||
.setErrorMessage(expectedErrorMessage),
|
.setErrorMessage(expectedErrorMessage),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
),
|
),
|
||||||
future.get(5, TimeUnit.SECONDS)
|
future.get(5, TimeUnit.SECONDS)
|
||||||
);
|
);
|
||||||
|
@ -880,9 +879,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("list-groups"),
|
ArgumentMatchers.eq("list-groups"),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(Arrays.asList(
|
)).thenReturn(Arrays.asList(
|
||||||
CompletableFuture.completedFuture(Collections.singletonList(expectedResults.get(0))),
|
CompletableFuture.completedFuture(List.of(expectedResults.get(0))),
|
||||||
CompletableFuture.completedFuture(Collections.singletonList(expectedResults.get(1))),
|
CompletableFuture.completedFuture(List.of(expectedResults.get(1))),
|
||||||
CompletableFuture.completedFuture(Collections.singletonList(expectedResults.get(2)))
|
CompletableFuture.completedFuture(List.of(expectedResults.get(2)))
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<ListGroupsResponseData> responseFuture = service.listGroups(
|
CompletableFuture<ListGroupsResponseData> responseFuture = service.listGroups(
|
||||||
|
@ -920,8 +919,8 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("list-groups"),
|
ArgumentMatchers.eq("list-groups"),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(Arrays.asList(
|
)).thenReturn(Arrays.asList(
|
||||||
CompletableFuture.completedFuture(Collections.singletonList(expectedResults.get(0))),
|
CompletableFuture.completedFuture(List.of(expectedResults.get(0))),
|
||||||
CompletableFuture.completedFuture(Collections.singletonList(expectedResults.get(1))),
|
CompletableFuture.completedFuture(List.of(expectedResults.get(1))),
|
||||||
FutureUtils.failedFuture(new NotCoordinatorException(""))
|
FutureUtils.failedFuture(new NotCoordinatorException(""))
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -947,8 +946,8 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("list-groups"),
|
ArgumentMatchers.eq("list-groups"),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(Arrays.asList(
|
)).thenReturn(Arrays.asList(
|
||||||
CompletableFuture.completedFuture(Collections.emptyList()),
|
CompletableFuture.completedFuture(List.of()),
|
||||||
CompletableFuture.completedFuture(Collections.emptyList()),
|
CompletableFuture.completedFuture(List.of()),
|
||||||
FutureUtils.failedFuture(new CoordinatorLoadInProgressException(""))
|
FutureUtils.failedFuture(new CoordinatorLoadInProgressException(""))
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -1033,7 +1032,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("describe-groups"),
|
ArgumentMatchers.eq("describe-groups"),
|
||||||
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup1)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup1)));
|
||||||
|
|
||||||
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
||||||
when(runtime.scheduleReadOperation(
|
when(runtime.scheduleReadOperation(
|
||||||
|
@ -1046,7 +1045,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
service.describeGroups(requestContext(ApiKeys.DESCRIBE_GROUPS), Arrays.asList("group-id-1", "group-id-2"));
|
service.describeGroups(requestContext(ApiKeys.DESCRIBE_GROUPS), Arrays.asList("group-id-1", "group-id-2"));
|
||||||
|
|
||||||
assertFalse(future.isDone());
|
assertFalse(future.isDone());
|
||||||
describedGroupFuture.complete(Collections.singletonList(describedGroup2));
|
describedGroupFuture.complete(List.of(describedGroup2));
|
||||||
assertEquals(expectedDescribedGroups, future.get());
|
assertEquals(expectedDescribedGroups, future.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1074,7 +1073,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("describe-groups"),
|
ArgumentMatchers.eq("describe-groups"),
|
||||||
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup)));
|
||||||
|
|
||||||
CompletableFuture<List<DescribeGroupsResponseData.DescribedGroup>> future =
|
CompletableFuture<List<DescribeGroupsResponseData.DescribedGroup>> future =
|
||||||
service.describeGroups(requestContext(ApiKeys.DESCRIBE_GROUPS), Arrays.asList("", null));
|
service.describeGroups(requestContext(ApiKeys.DESCRIBE_GROUPS), Arrays.asList("", null));
|
||||||
|
@ -1102,10 +1101,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<DescribeGroupsResponseData.DescribedGroup>> future =
|
CompletableFuture<List<DescribeGroupsResponseData.DescribedGroup>> future =
|
||||||
service.describeGroups(requestContext(ApiKeys.DESCRIBE_GROUPS), Collections.singletonList("group-id"));
|
service.describeGroups(requestContext(ApiKeys.DESCRIBE_GROUPS), List.of("group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new DescribeGroupsResponseData.DescribedGroup()
|
List.of(new DescribeGroupsResponseData.DescribedGroup()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
||||||
),
|
),
|
||||||
|
@ -1123,11 +1122,11 @@ public class GroupCoordinatorServiceTest {
|
||||||
|
|
||||||
CompletableFuture<List<DescribeGroupsResponseData.DescribedGroup>> future = service.describeGroups(
|
CompletableFuture<List<DescribeGroupsResponseData.DescribedGroup>> future = service.describeGroups(
|
||||||
requestContext(ApiKeys.DESCRIBE_GROUPS),
|
requestContext(ApiKeys.DESCRIBE_GROUPS),
|
||||||
Collections.singletonList("group-id")
|
List.of("group-id")
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new DescribeGroupsResponseData.DescribedGroup()
|
List.of(new DescribeGroupsResponseData.DescribedGroup()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
||||||
),
|
),
|
||||||
|
@ -1157,17 +1156,17 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setGroupId("group");
|
.setGroupId("group");
|
||||||
if (!fetchAllOffsets) {
|
if (!fetchAllOffsets) {
|
||||||
request
|
request
|
||||||
.setTopics(Collections.singletonList(new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
.setTopics(List.of(new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))));
|
.setPartitionIndexes(List.of(0))));
|
||||||
}
|
}
|
||||||
|
|
||||||
OffsetFetchResponseData.OffsetFetchResponseGroup response =
|
OffsetFetchResponseData.OffsetFetchResponseGroup response =
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseGroup()
|
new OffsetFetchResponseData.OffsetFetchResponseGroup()
|
||||||
.setGroupId("group")
|
.setGroupId("group")
|
||||||
.setTopics(Collections.singletonList(new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
.setTopics(List.of(new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitions(Collections.singletonList(new OffsetFetchResponseData.OffsetFetchResponsePartitions()
|
.setPartitions(List.of(new OffsetFetchResponseData.OffsetFetchResponsePartitions()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)))));
|
.setCommittedOffset(100L)))));
|
||||||
|
|
||||||
|
@ -1219,9 +1218,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setGroupId("group");
|
.setGroupId("group");
|
||||||
if (!fetchAllOffsets) {
|
if (!fetchAllOffsets) {
|
||||||
request
|
request
|
||||||
.setTopics(Collections.singletonList(new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
.setTopics(List.of(new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))));
|
.setPartitionIndexes(List.of(0))));
|
||||||
}
|
}
|
||||||
|
|
||||||
TriFunction<RequestContext, OffsetFetchRequestData.OffsetFetchRequestGroup, Boolean, CompletableFuture<OffsetFetchResponseData.OffsetFetchResponseGroup>> fetchOffsets =
|
TriFunction<RequestContext, OffsetFetchRequestData.OffsetFetchRequestGroup, Boolean, CompletableFuture<OffsetFetchResponseData.OffsetFetchResponseGroup>> fetchOffsets =
|
||||||
|
@ -1269,9 +1268,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setGroupId("group");
|
.setGroupId("group");
|
||||||
if (!fetchAllOffsets) {
|
if (!fetchAllOffsets) {
|
||||||
request
|
request
|
||||||
.setTopics(Collections.singletonList(new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
.setTopics(List.of(new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))));
|
.setPartitionIndexes(List.of(0))));
|
||||||
}
|
}
|
||||||
|
|
||||||
when(runtime.scheduleWriteOperation(
|
when(runtime.scheduleWriteOperation(
|
||||||
|
@ -1422,7 +1421,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("consumer-group-describe"),
|
ArgumentMatchers.eq("consumer-group-describe"),
|
||||||
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup1)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup1)));
|
||||||
|
|
||||||
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
||||||
when(runtime.scheduleReadOperation(
|
when(runtime.scheduleReadOperation(
|
||||||
|
@ -1435,7 +1434,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), Arrays.asList("group-id-1", "group-id-2"));
|
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), Arrays.asList("group-id-1", "group-id-2"));
|
||||||
|
|
||||||
assertFalse(future.isDone());
|
assertFalse(future.isDone());
|
||||||
describedGroupFuture.complete(Collections.singletonList(describedGroup2));
|
describedGroupFuture.complete(List.of(describedGroup2));
|
||||||
assertEquals(expectedDescribedGroups, future.get());
|
assertEquals(expectedDescribedGroups, future.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1463,7 +1462,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("consumer-group-describe"),
|
ArgumentMatchers.eq("consumer-group-describe"),
|
||||||
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup)));
|
||||||
|
|
||||||
CompletableFuture<List<ConsumerGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<ConsumerGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), Arrays.asList("", null));
|
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), Arrays.asList("", null));
|
||||||
|
@ -1490,10 +1489,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<ConsumerGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<ConsumerGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), Collections.singletonList("group-id"));
|
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), List.of("group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new ConsumerGroupDescribeResponseData.DescribedGroup()
|
List.of(new ConsumerGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
||||||
),
|
),
|
||||||
|
@ -1517,10 +1516,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<ConsumerGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<ConsumerGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), Collections.singletonList("group-id"));
|
service.consumerGroupDescribe(requestContext(ApiKeys.CONSUMER_GROUP_DESCRIBE), List.of("group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new ConsumerGroupDescribeResponseData.DescribedGroup()
|
List.of(new ConsumerGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
||||||
),
|
),
|
||||||
|
@ -1551,7 +1550,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("streams-group-describe"),
|
ArgumentMatchers.eq("streams-group-describe"),
|
||||||
ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)),
|
ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup1)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup1)));
|
||||||
|
|
||||||
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
||||||
when(runtime.scheduleReadOperation(
|
when(runtime.scheduleReadOperation(
|
||||||
|
@ -1564,7 +1563,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), Arrays.asList("group-id-1", "group-id-2"));
|
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), Arrays.asList("group-id-1", "group-id-2"));
|
||||||
|
|
||||||
assertFalse(future.isDone());
|
assertFalse(future.isDone());
|
||||||
describedGroupFuture.complete(Collections.singletonList(describedGroup2));
|
describedGroupFuture.complete(List.of(describedGroup2));
|
||||||
assertEquals(expectedDescribedGroups, future.get());
|
assertEquals(expectedDescribedGroups, future.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1592,7 +1591,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("streams-group-describe"),
|
ArgumentMatchers.eq("streams-group-describe"),
|
||||||
ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)),
|
ArgumentMatchers.eq(new TopicPartition("__consumer_offsets", 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup)));
|
||||||
|
|
||||||
CompletableFuture<List<StreamsGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<StreamsGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), Arrays.asList("", null));
|
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), Arrays.asList("", null));
|
||||||
|
@ -1619,10 +1618,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<StreamsGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<StreamsGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), Collections.singletonList("group-id"));
|
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), List.of("group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new StreamsGroupDescribeResponseData.DescribedGroup()
|
List.of(new StreamsGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
||||||
),
|
),
|
||||||
|
@ -1646,10 +1645,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<StreamsGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<StreamsGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), Collections.singletonList("group-id"));
|
service.streamsGroupDescribe(requestContext(ApiKeys.STREAMS_GROUP_DESCRIBE), List.of("group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new StreamsGroupDescribeResponseData.DescribedGroup()
|
List.of(new StreamsGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
||||||
),
|
),
|
||||||
|
@ -1667,10 +1666,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
.build(true);
|
.build(true);
|
||||||
|
|
||||||
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(Collections.singletonList(
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(0)
|
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(0)
|
||||||
))
|
))
|
||||||
).iterator());
|
).iterator());
|
||||||
|
@ -1679,11 +1678,11 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setTopics(requestTopicCollection);
|
.setTopics(requestTopicCollection);
|
||||||
|
|
||||||
OffsetDeleteResponseData.OffsetDeleteResponsePartitionCollection responsePartitionCollection =
|
OffsetDeleteResponseData.OffsetDeleteResponsePartitionCollection responsePartitionCollection =
|
||||||
new OffsetDeleteResponseData.OffsetDeleteResponsePartitionCollection(Collections.singletonList(
|
new OffsetDeleteResponseData.OffsetDeleteResponsePartitionCollection(List.of(
|
||||||
new OffsetDeleteResponseData.OffsetDeleteResponsePartition().setPartitionIndex(0)
|
new OffsetDeleteResponseData.OffsetDeleteResponsePartition().setPartitionIndex(0)
|
||||||
).iterator());
|
).iterator());
|
||||||
OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection responseTopicCollection =
|
OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection responseTopicCollection =
|
||||||
new OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection(Collections.singletonList(
|
new OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection(List.of(
|
||||||
new OffsetDeleteResponseData.OffsetDeleteResponseTopic().setPartitions(responsePartitionCollection)
|
new OffsetDeleteResponseData.OffsetDeleteResponseTopic().setPartitions(responsePartitionCollection)
|
||||||
).iterator());
|
).iterator());
|
||||||
OffsetDeleteResponseData response = new OffsetDeleteResponseData()
|
OffsetDeleteResponseData response = new OffsetDeleteResponseData()
|
||||||
|
@ -1716,10 +1715,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
.build(true);
|
.build(true);
|
||||||
|
|
||||||
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(Collections.singletonList(
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(0)
|
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(0)
|
||||||
))
|
))
|
||||||
).iterator());
|
).iterator());
|
||||||
|
@ -1760,10 +1759,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
.build(true);
|
.build(true);
|
||||||
|
|
||||||
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(Collections.singletonList(
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(0)
|
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(0)
|
||||||
))
|
))
|
||||||
).iterator());
|
).iterator());
|
||||||
|
@ -2272,12 +2271,12 @@ public class GroupCoordinatorServiceTest {
|
||||||
CompletableFuture<DeleteGroupsResponseData.DeletableGroupResultCollection> future =
|
CompletableFuture<DeleteGroupsResponseData.DeletableGroupResultCollection> future =
|
||||||
service.deleteGroups(
|
service.deleteGroups(
|
||||||
requestContext(ApiKeys.DELETE_GROUPS),
|
requestContext(ApiKeys.DELETE_GROUPS),
|
||||||
Collections.singletonList("group-id"),
|
List.of("group-id"),
|
||||||
BufferSupplier.NO_CACHING
|
BufferSupplier.NO_CACHING
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new DeleteGroupsResponseData.DeletableGroupResultCollection(Collections.singletonList(
|
new DeleteGroupsResponseData.DeletableGroupResultCollection(List.of(
|
||||||
new DeleteGroupsResponseData.DeletableGroupResult()
|
new DeleteGroupsResponseData.DeletableGroupResult()
|
||||||
.setGroupId("group-id")
|
.setGroupId("group-id")
|
||||||
.setErrorCode(expectedErrorCode)
|
.setErrorCode(expectedErrorCode)
|
||||||
|
@ -2297,13 +2296,13 @@ public class GroupCoordinatorServiceTest {
|
||||||
|
|
||||||
CompletableFuture<DeleteGroupsResponseData.DeletableGroupResultCollection> future = service.deleteGroups(
|
CompletableFuture<DeleteGroupsResponseData.DeletableGroupResultCollection> future = service.deleteGroups(
|
||||||
requestContext(ApiKeys.DELETE_GROUPS),
|
requestContext(ApiKeys.DELETE_GROUPS),
|
||||||
Collections.singletonList("foo"),
|
List.of("foo"),
|
||||||
BufferSupplier.NO_CACHING
|
BufferSupplier.NO_CACHING
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new DeleteGroupsResponseData.DeletableGroupResultCollection(
|
new DeleteGroupsResponseData.DeletableGroupResultCollection(
|
||||||
Collections.singletonList(new DeleteGroupsResponseData.DeletableGroupResult()
|
List.of(new DeleteGroupsResponseData.DeletableGroupResult()
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
||||||
).iterator()
|
).iterator()
|
||||||
|
@ -2325,9 +2324,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setTransactionalId("transactional-id")
|
.setTransactionalId("transactional-id")
|
||||||
.setMemberId("member-id")
|
.setMemberId("member-id")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
.setTopics(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
.setPartitions(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100)))));
|
.setCommittedOffset(100)))));
|
||||||
|
|
||||||
|
@ -2339,9 +2338,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new TxnOffsetCommitResponseData()
|
new TxnOffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
.setTopics(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
.setPartitions(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()))))),
|
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code()))))),
|
||||||
future.get()
|
future.get()
|
||||||
|
@ -2363,9 +2362,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setTransactionalId("transactional-id")
|
.setTransactionalId("transactional-id")
|
||||||
.setMemberId("member-id")
|
.setMemberId("member-id")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
.setTopics(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
.setPartitions(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100)))));
|
.setCommittedOffset(100)))));
|
||||||
|
|
||||||
|
@ -2377,9 +2376,9 @@ public class GroupCoordinatorServiceTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new TxnOffsetCommitResponseData()
|
new TxnOffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
.setTopics(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
.setPartitions(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.INVALID_GROUP_ID.code()))))),
|
.setErrorCode(Errors.INVALID_GROUP_ID.code()))))),
|
||||||
future.get()
|
future.get()
|
||||||
|
@ -2402,16 +2401,16 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setProducerEpoch((short) 5)
|
.setProducerEpoch((short) 5)
|
||||||
.setMemberId("member-id")
|
.setMemberId("member-id")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
.setTopics(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
.setPartitions(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100)))));
|
.setCommittedOffset(100)))));
|
||||||
|
|
||||||
TxnOffsetCommitResponseData response = new TxnOffsetCommitResponseData()
|
TxnOffsetCommitResponseData response = new TxnOffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
.setTopics(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
.setPartitions(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())))));
|
.setErrorCode(Errors.NONE.code())))));
|
||||||
|
|
||||||
|
@ -2457,16 +2456,16 @@ public class GroupCoordinatorServiceTest {
|
||||||
.setProducerEpoch((short) 5)
|
.setProducerEpoch((short) 5)
|
||||||
.setMemberId("member-id")
|
.setMemberId("member-id")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
.setTopics(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
.setPartitions(List.of(new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100)))));
|
.setCommittedOffset(100)))));
|
||||||
|
|
||||||
TxnOffsetCommitResponseData response = new TxnOffsetCommitResponseData()
|
TxnOffsetCommitResponseData response = new TxnOffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
.setTopics(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
||||||
.setName(TOPIC_NAME)
|
.setName(TOPIC_NAME)
|
||||||
.setPartitions(Collections.singletonList(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
.setPartitions(List.of(new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(expectedError.code())))));
|
.setErrorCode(expectedError.code())))));
|
||||||
|
|
||||||
|
@ -2582,7 +2581,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
// The exception is logged and swallowed.
|
// The exception is logged and swallowed.
|
||||||
assertDoesNotThrow(() ->
|
assertDoesNotThrow(() ->
|
||||||
service.onPartitionsDeleted(
|
service.onPartitionsDeleted(
|
||||||
Collections.singletonList(new TopicPartition("foo", 0)),
|
List.of(new TopicPartition("foo", 0)),
|
||||||
BufferSupplier.NO_CACHING
|
BufferSupplier.NO_CACHING
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -2597,7 +2596,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertThrows(CoordinatorNotAvailableException.class, () -> service.onPartitionsDeleted(
|
assertThrows(CoordinatorNotAvailableException.class, () -> service.onPartitionsDeleted(
|
||||||
Collections.singletonList(new TopicPartition("foo", 0)),
|
List.of(new TopicPartition("foo", 0)),
|
||||||
BufferSupplier.NO_CACHING
|
BufferSupplier.NO_CACHING
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -2708,7 +2707,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("share-group-describe"),
|
ArgumentMatchers.eq("share-group-describe"),
|
||||||
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup1)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup1)));
|
||||||
|
|
||||||
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
CompletableFuture<Object> describedGroupFuture = new CompletableFuture<>();
|
||||||
when(runtime.scheduleReadOperation(
|
when(runtime.scheduleReadOperation(
|
||||||
|
@ -2721,7 +2720,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Arrays.asList("share-group-id-1", "share-group-id-2"));
|
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Arrays.asList("share-group-id-1", "share-group-id-2"));
|
||||||
|
|
||||||
assertFalse(future.isDone());
|
assertFalse(future.isDone());
|
||||||
describedGroupFuture.complete(Collections.singletonList(describedGroup2));
|
describedGroupFuture.complete(List.of(describedGroup2));
|
||||||
assertEquals(expectedDescribedGroups, future.get());
|
assertEquals(expectedDescribedGroups, future.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2749,7 +2748,7 @@ public class GroupCoordinatorServiceTest {
|
||||||
ArgumentMatchers.eq("share-group-describe"),
|
ArgumentMatchers.eq("share-group-describe"),
|
||||||
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
ArgumentMatchers.eq(new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)),
|
||||||
ArgumentMatchers.any()
|
ArgumentMatchers.any()
|
||||||
)).thenReturn(CompletableFuture.completedFuture(Collections.singletonList(describedGroup)));
|
)).thenReturn(CompletableFuture.completedFuture(List.of(describedGroup)));
|
||||||
|
|
||||||
CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Arrays.asList("", null));
|
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Arrays.asList("", null));
|
||||||
|
@ -2776,10 +2775,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Collections.singletonList("share-group-id"));
|
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), List.of("share-group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new ShareGroupDescribeResponseData.DescribedGroup()
|
List.of(new ShareGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId("share-group-id")
|
.setGroupId("share-group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
.setErrorCode(Errors.COORDINATOR_LOAD_IN_PROGRESS.code())
|
||||||
),
|
),
|
||||||
|
@ -2803,10 +2802,10 @@ public class GroupCoordinatorServiceTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future =
|
CompletableFuture<List<ShareGroupDescribeResponseData.DescribedGroup>> future =
|
||||||
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), Collections.singletonList("share-group-id"));
|
service.shareGroupDescribe(requestContext(ApiKeys.SHARE_GROUP_DESCRIBE), List.of("share-group-id"));
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new ShareGroupDescribeResponseData.DescribedGroup()
|
List.of(new ShareGroupDescribeResponseData.DescribedGroup()
|
||||||
.setGroupId("share-group-id")
|
.setGroupId("share-group-id")
|
||||||
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
.setErrorCode(Errors.COORDINATOR_NOT_AVAILABLE.code())
|
||||||
),
|
),
|
||||||
|
|
|
@ -102,7 +102,6 @@ import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -151,7 +150,7 @@ public class GroupCoordinatorShardTest {
|
||||||
RequestContext context = requestContext(ApiKeys.CONSUMER_GROUP_HEARTBEAT);
|
RequestContext context = requestContext(ApiKeys.CONSUMER_GROUP_HEARTBEAT);
|
||||||
ConsumerGroupHeartbeatRequestData request = new ConsumerGroupHeartbeatRequestData();
|
ConsumerGroupHeartbeatRequestData request = new ConsumerGroupHeartbeatRequestData();
|
||||||
CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
CoordinatorResult<ConsumerGroupHeartbeatResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new ConsumerGroupHeartbeatResponseData()
|
new ConsumerGroupHeartbeatResponseData()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -183,8 +182,8 @@ public class GroupCoordinatorShardTest {
|
||||||
RequestContext context = requestContext(ApiKeys.STREAMS_GROUP_HEARTBEAT);
|
RequestContext context = requestContext(ApiKeys.STREAMS_GROUP_HEARTBEAT);
|
||||||
StreamsGroupHeartbeatRequestData request = new StreamsGroupHeartbeatRequestData();
|
StreamsGroupHeartbeatRequestData request = new StreamsGroupHeartbeatRequestData();
|
||||||
CoordinatorResult<StreamsGroupHeartbeatResult, CoordinatorRecord> result = new CoordinatorResult<>(
|
CoordinatorResult<StreamsGroupHeartbeatResult, CoordinatorRecord> result = new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new StreamsGroupHeartbeatResult(new StreamsGroupHeartbeatResponseData(), Collections.emptyMap())
|
new StreamsGroupHeartbeatResult(new StreamsGroupHeartbeatResponseData(), Map.of())
|
||||||
);
|
);
|
||||||
|
|
||||||
when(groupMetadataManager.streamsGroupHeartbeat(
|
when(groupMetadataManager.streamsGroupHeartbeat(
|
||||||
|
@ -215,7 +214,7 @@ public class GroupCoordinatorShardTest {
|
||||||
RequestContext context = requestContext(ApiKeys.OFFSET_COMMIT);
|
RequestContext context = requestContext(ApiKeys.OFFSET_COMMIT);
|
||||||
OffsetCommitRequestData request = new OffsetCommitRequestData();
|
OffsetCommitRequestData request = new OffsetCommitRequestData();
|
||||||
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -247,7 +246,7 @@ public class GroupCoordinatorShardTest {
|
||||||
RequestContext context = requestContext(ApiKeys.TXN_OFFSET_COMMIT);
|
RequestContext context = requestContext(ApiKeys.TXN_OFFSET_COMMIT);
|
||||||
TxnOffsetCommitRequestData request = new TxnOffsetCommitRequestData();
|
TxnOffsetCommitRequestData request = new TxnOffsetCommitRequestData();
|
||||||
CoordinatorResult<TxnOffsetCommitResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
CoordinatorResult<TxnOffsetCommitResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new TxnOffsetCommitResponseData()
|
new TxnOffsetCommitResponseData()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -1440,7 +1439,7 @@ public class GroupCoordinatorShardTest {
|
||||||
records.add(offsetCommitTombstone);
|
records.add(offsetCommitTombstone);
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
when(offsetMetadataManager.cleanupExpiredOffsets("other-group-id", Collections.emptyList())).thenReturn(false);
|
when(offsetMetadataManager.cleanupExpiredOffsets("other-group-id", List.of())).thenReturn(false);
|
||||||
doAnswer(invocation -> {
|
doAnswer(invocation -> {
|
||||||
List<CoordinatorRecord> records = recordsCapture.getValue();
|
List<CoordinatorRecord> records = recordsCapture.getValue();
|
||||||
records.add(groupMetadataTombstone);
|
records.add(groupMetadataTombstone);
|
||||||
|
@ -1551,18 +1550,18 @@ public class GroupCoordinatorShardTest {
|
||||||
metricsShard
|
metricsShard
|
||||||
);
|
);
|
||||||
|
|
||||||
List<CoordinatorRecord> records = Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord(
|
List<CoordinatorRecord> records = List.of(GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord(
|
||||||
"group",
|
"group",
|
||||||
"foo",
|
"foo",
|
||||||
0
|
0
|
||||||
));
|
));
|
||||||
|
|
||||||
when(offsetMetadataManager.onPartitionsDeleted(
|
when(offsetMetadataManager.onPartitionsDeleted(
|
||||||
Collections.singletonList(new TopicPartition("foo", 0))
|
List.of(new TopicPartition("foo", 0))
|
||||||
)).thenReturn(records);
|
)).thenReturn(records);
|
||||||
|
|
||||||
CoordinatorResult<Void, CoordinatorRecord> result = coordinator.onPartitionsDeleted(
|
CoordinatorResult<Void, CoordinatorRecord> result = coordinator.onPartitionsDeleted(
|
||||||
Collections.singletonList(new TopicPartition("foo", 0))
|
List.of(new TopicPartition("foo", 0))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(records, result.records());
|
assertEquals(records, result.records());
|
||||||
|
@ -1611,7 +1610,7 @@ public class GroupCoordinatorShardTest {
|
||||||
RequestContext context = requestContext(ApiKeys.SHARE_GROUP_HEARTBEAT);
|
RequestContext context = requestContext(ApiKeys.SHARE_GROUP_HEARTBEAT);
|
||||||
ShareGroupHeartbeatRequestData request = new ShareGroupHeartbeatRequestData();
|
ShareGroupHeartbeatRequestData request = new ShareGroupHeartbeatRequestData();
|
||||||
CoordinatorResult<ShareGroupHeartbeatResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
CoordinatorResult<ShareGroupHeartbeatResponseData, CoordinatorRecord> result = new CoordinatorResult<>(
|
||||||
Collections.emptyList(),
|
List.of(),
|
||||||
new ShareGroupHeartbeatResponseData()
|
new ShareGroupHeartbeatResponseData()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -406,7 +406,7 @@ public class GroupMetadataManagerTestContext {
|
||||||
String protocolType = "consumer";
|
String protocolType = "consumer";
|
||||||
String protocolName = "range";
|
String protocolName = "range";
|
||||||
int generationId = 0;
|
int generationId = 0;
|
||||||
List<SyncGroupRequestData.SyncGroupRequestAssignment> assignment = Collections.emptyList();
|
List<SyncGroupRequestData.SyncGroupRequestAssignment> assignment = List.of();
|
||||||
|
|
||||||
SyncGroupRequestBuilder withGroupId(String groupId) {
|
SyncGroupRequestBuilder withGroupId(String groupId) {
|
||||||
this.groupId = groupId;
|
this.groupId = groupId;
|
||||||
|
@ -1131,7 +1131,7 @@ public class GroupMetadataManagerTestContext {
|
||||||
SyncResult followerSyncResult = sendClassicGroupSync(
|
SyncResult followerSyncResult = sendClassicGroupSync(
|
||||||
syncRequest.setGroupInstanceId(followerInstanceId)
|
syncRequest.setGroupInstanceId(followerInstanceId)
|
||||||
.setMemberId(followerId)
|
.setMemberId(followerId)
|
||||||
.setAssignments(Collections.emptyList())
|
.setAssignments(List.of())
|
||||||
);
|
);
|
||||||
|
|
||||||
assertTrue(followerSyncResult.records.isEmpty());
|
assertTrue(followerSyncResult.records.isEmpty());
|
||||||
|
@ -1273,7 +1273,7 @@ public class GroupMetadataManagerTestContext {
|
||||||
List<CoordinatorRecord> expectedRecords = List.of(newGroupMetadataRecord(
|
List<CoordinatorRecord> expectedRecords = List.of(newGroupMetadataRecord(
|
||||||
group.groupId(),
|
group.groupId(),
|
||||||
new GroupMetadataValue()
|
new GroupMetadataValue()
|
||||||
.setMembers(Collections.emptyList())
|
.setMembers(List.of())
|
||||||
.setGeneration(group.generationId())
|
.setGeneration(group.generationId())
|
||||||
.setLeader(null)
|
.setLeader(null)
|
||||||
.setProtocolType("consumer")
|
.setProtocolType("consumer")
|
||||||
|
@ -1498,10 +1498,10 @@ public class GroupMetadataManagerTestContext {
|
||||||
|
|
||||||
public void verifyDescribeGroupsBeforeV6ReturnsDeadGroup(String groupId) {
|
public void verifyDescribeGroupsBeforeV6ReturnsDeadGroup(String groupId) {
|
||||||
List<DescribeGroupsResponseData.DescribedGroup> describedGroups =
|
List<DescribeGroupsResponseData.DescribedGroup> describedGroups =
|
||||||
describeGroups(Collections.singletonList(groupId), (short) 5);
|
describeGroups(List.of(groupId), (short) 5);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(new DescribeGroupsResponseData.DescribedGroup()
|
List.of(new DescribeGroupsResponseData.DescribedGroup()
|
||||||
.setGroupId(groupId)
|
.setGroupId(groupId)
|
||||||
.setGroupState(DEAD.toString())
|
.setGroupState(DEAD.toString())
|
||||||
),
|
),
|
||||||
|
@ -1527,7 +1527,7 @@ public class GroupMetadataManagerTestContext {
|
||||||
.withProtocolType(protocolType)
|
.withProtocolType(protocolType)
|
||||||
.build()
|
.build()
|
||||||
);
|
);
|
||||||
assertEquals(Collections.emptyList(), syncResult.records);
|
assertEquals(List.of(), syncResult.records);
|
||||||
assertFalse(syncResult.syncFuture.isDone());
|
assertFalse(syncResult.syncFuture.isDone());
|
||||||
|
|
||||||
// Simulate a successful write to log.
|
// Simulate a successful write to log.
|
||||||
|
|
|
@ -73,12 +73,13 @@ import java.net.InetAddress;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.OptionalInt;
|
import java.util.OptionalInt;
|
||||||
import java.util.OptionalLong;
|
import java.util.OptionalLong;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.apache.kafka.common.requests.OffsetFetchResponse.INVALID_OFFSET;
|
import static org.apache.kafka.common.requests.OffsetFetchResponse.INVALID_OFFSET;
|
||||||
import static org.apache.kafka.coordinator.group.metrics.GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME;
|
import static org.apache.kafka.coordinator.group.metrics.GroupCoordinatorMetrics.OFFSET_COMMITS_SENSOR_NAME;
|
||||||
|
@ -140,7 +141,7 @@ public class OffsetMetadataManagerTest {
|
||||||
.withMetadataImage(metadataImage)
|
.withMetadataImage(metadataImage)
|
||||||
.withGroupCoordinatorMetricsShard(metrics)
|
.withGroupCoordinatorMetricsShard(metrics)
|
||||||
.withGroupConfigManager(configManager)
|
.withGroupConfigManager(configManager)
|
||||||
.withConfig(GroupCoordinatorConfig.fromProps(Collections.emptyMap()))
|
.withConfig(GroupCoordinatorConfig.fromProps(Map.of()))
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -529,10 +530,10 @@ public class OffsetMetadataManagerTest {
|
||||||
Errors expectedError
|
Errors expectedError
|
||||||
) {
|
) {
|
||||||
final OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
final OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(Collections.singletonList(
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
||||||
.setName(topic)
|
.setName(topic)
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(partition)
|
new OffsetDeleteRequestData.OffsetDeleteRequestPartition().setPartitionIndex(partition)
|
||||||
))
|
))
|
||||||
).iterator());
|
).iterator());
|
||||||
|
@ -546,15 +547,15 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
final OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection expectedResponseTopicCollection =
|
final OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection expectedResponseTopicCollection =
|
||||||
new OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection(Collections.singletonList(
|
new OffsetDeleteResponseData.OffsetDeleteResponseTopicCollection(List.of(
|
||||||
new OffsetDeleteResponseData.OffsetDeleteResponseTopic()
|
new OffsetDeleteResponseData.OffsetDeleteResponseTopic()
|
||||||
.setName(topic)
|
.setName(topic)
|
||||||
.setPartitions(expectedResponsePartitionCollection)
|
.setPartitions(expectedResponsePartitionCollection)
|
||||||
).iterator());
|
).iterator());
|
||||||
|
|
||||||
List<CoordinatorRecord> expectedRecords = Collections.emptyList();
|
List<CoordinatorRecord> expectedRecords = List.of();
|
||||||
if (hasOffset(groupId, topic, partition) && expectedError == Errors.NONE) {
|
if (hasOffset(groupId, topic, partition) && expectedError == Errors.NONE) {
|
||||||
expectedRecords = Collections.singletonList(
|
expectedRecords = List.of(
|
||||||
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord(groupId, topic, partition)
|
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord(groupId, topic, partition)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -598,10 +599,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -628,10 +629,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -657,10 +658,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -694,10 +695,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -727,10 +728,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGroupInstanceId("instanceid")
|
.setGroupInstanceId("instanceid")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -760,10 +761,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGroupInstanceId("old-instance-id")
|
.setGroupInstanceId("old-instance-id")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -797,10 +798,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(1)
|
.setGenerationIdOrMemberEpoch(1)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -832,10 +833,10 @@ public class OffsetMetadataManagerTest {
|
||||||
assertThrows(UnknownMemberIdException.class, () -> context.commitOffset(
|
assertThrows(UnknownMemberIdException.class, () -> context.commitOffset(
|
||||||
new OffsetCommitRequestData()
|
new OffsetCommitRequestData()
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -870,10 +871,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(1)
|
.setGenerationIdOrMemberEpoch(1)
|
||||||
.setRetentionTimeMs(1234L)
|
.setRetentionTimeMs(1234L)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -883,10 +884,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -896,7 +897,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -938,7 +939,7 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
// Advance time by half of the session timeout. No timeouts are
|
// Advance time by half of the session timeout. No timeouts are
|
||||||
// expired.
|
// expired.
|
||||||
assertEquals(Collections.emptyList(), context.sleep(5000 / 2));
|
assertEquals(List.of(), context.sleep(5000 / 2));
|
||||||
|
|
||||||
// Commit.
|
// Commit.
|
||||||
context.commitOffset(
|
context.commitOffset(
|
||||||
|
@ -947,10 +948,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(1)
|
.setGenerationIdOrMemberEpoch(1)
|
||||||
.setRetentionTimeMs(1234L)
|
.setRetentionTimeMs(1234L)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -960,7 +961,7 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
// Advance time by half of the session timeout. No timeouts are
|
// Advance time by half of the session timeout. No timeouts are
|
||||||
// expired.
|
// expired.
|
||||||
assertEquals(Collections.emptyList(), context.sleep(5000 / 2));
|
assertEquals(List.of(), context.sleep(5000 / 2));
|
||||||
|
|
||||||
// Advance time by half of the session timeout again. The timeout should
|
// Advance time by half of the session timeout again. The timeout should
|
||||||
// expire and the member is removed from the group.
|
// expire and the member is removed from the group.
|
||||||
|
@ -977,10 +978,10 @@ public class OffsetMetadataManagerTest {
|
||||||
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = context.commitOffset(
|
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = context.commitOffset(
|
||||||
new OffsetCommitRequestData()
|
new OffsetCommitRequestData()
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -990,10 +991,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -1003,7 +1004,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -1036,10 +1037,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
// Instance id should be ignored.
|
// Instance id should be ignored.
|
||||||
.setGroupInstanceId("instance-id")
|
.setGroupInstanceId("instance-id")
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1049,10 +1050,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -1062,7 +1063,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -1094,10 +1095,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1128,10 +1129,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(9)
|
.setGenerationIdOrMemberEpoch(9)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1168,10 +1169,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(9)
|
.setGenerationIdOrMemberEpoch(9)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1199,10 +1200,10 @@ public class OffsetMetadataManagerTest {
|
||||||
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = context.commitOffset(
|
CoordinatorResult<OffsetCommitResponseData, CoordinatorRecord> result = context.commitOffset(
|
||||||
new OffsetCommitRequestData()
|
new OffsetCommitRequestData()
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1212,10 +1213,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -1225,7 +1226,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -1263,10 +1264,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
new OffsetCommitRequestData.OffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1278,10 +1279,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
new OffsetCommitResponseData.OffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -1291,7 +1292,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -1331,7 +1332,7 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(10)
|
.setGenerationIdOrMemberEpoch(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Arrays.asList(
|
.setPartitions(Arrays.asList(
|
||||||
|
@ -1351,7 +1352,7 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new OffsetCommitResponseData()
|
new OffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
new OffsetCommitResponseData.OffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Arrays.asList(
|
.setPartitions(Arrays.asList(
|
||||||
|
@ -1367,7 +1368,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
1,
|
1,
|
||||||
|
@ -1405,10 +1406,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1420,10 +1421,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new TxnOffsetCommitResponseData()
|
new TxnOffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -1433,7 +1434,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -1458,10 +1459,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1487,10 +1488,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1523,10 +1524,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(100)
|
.setGenerationId(100)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1562,10 +1563,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(1)
|
.setGenerationId(1)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1577,10 +1578,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
new TxnOffsetCommitResponseData()
|
new TxnOffsetCommitResponseData()
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
new TxnOffsetCommitResponseData.TxnOffsetCommitResponseTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
new TxnOffsetCommitResponseData.TxnOffsetCommitResponsePartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setErrorCode(Errors.NONE.code())
|
.setErrorCode(Errors.NONE.code())
|
||||||
|
@ -1590,7 +1591,7 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.singletonList(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
List.of(GroupCoordinatorRecordHelpers.newOffsetCommitRecord(
|
||||||
"foo",
|
"foo",
|
||||||
"bar",
|
"bar",
|
||||||
0,
|
0,
|
||||||
|
@ -1615,10 +1616,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1644,10 +1645,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(10)
|
.setGenerationId(10)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1683,10 +1684,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.setGroupId("foo")
|
.setGroupId("foo")
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationId(100)
|
.setGenerationId(100)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
new TxnOffsetCommitRequestData.TxnOffsetCommitRequestPartition()
|
||||||
.setPartitionIndex(0)
|
.setPartitionIndex(0)
|
||||||
.setCommittedOffset(100L)
|
.setCommittedOffset(100L)
|
||||||
|
@ -1714,7 +1715,7 @@ public class OffsetMetadataManagerTest {
|
||||||
.setPartitionIndexes(Arrays.asList(0, 1)),
|
.setPartitionIndexes(Arrays.asList(0, 1)),
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
List<OffsetFetchResponseData.OffsetFetchResponseTopics> expectedResponse = Arrays.asList(
|
List<OffsetFetchResponseData.OffsetFetchResponseTopics> expectedResponse = Arrays.asList(
|
||||||
|
@ -1726,7 +1727,7 @@ public class OffsetMetadataManagerTest {
|
||||||
)),
|
)),
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
mkInvalidOffsetPartitionResponse(0)
|
mkInvalidOffsetPartitionResponse(0)
|
||||||
))
|
))
|
||||||
);
|
);
|
||||||
|
@ -1744,7 +1745,7 @@ public class OffsetMetadataManagerTest {
|
||||||
.setPartitionIndexes(Arrays.asList(0, 1)),
|
.setPartitionIndexes(Arrays.asList(0, 1)),
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
List<OffsetFetchResponseData.OffsetFetchResponseTopics> expectedResponse = Arrays.asList(
|
List<OffsetFetchResponseData.OffsetFetchResponseTopics> expectedResponse = Arrays.asList(
|
||||||
|
@ -1756,7 +1757,7 @@ public class OffsetMetadataManagerTest {
|
||||||
)),
|
)),
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
mkInvalidOffsetPartitionResponse(0)
|
mkInvalidOffsetPartitionResponse(0)
|
||||||
))
|
))
|
||||||
);
|
);
|
||||||
|
@ -2001,13 +2002,13 @@ public class OffsetMetadataManagerTest {
|
||||||
);
|
);
|
||||||
group.transitionTo(ClassicGroupState.DEAD);
|
group.transitionTo(ClassicGroupState.DEAD);
|
||||||
|
|
||||||
assertEquals(Collections.emptyList(), context.fetchAllOffsets("group", Long.MAX_VALUE));
|
assertEquals(List.of(), context.fetchAllOffsets("group", Long.MAX_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testFetchAllOffsetsWithUnknownGroup() {
|
public void testFetchAllOffsetsWithUnknownGroup() {
|
||||||
OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build();
|
OffsetMetadataManagerTestContext context = new OffsetMetadataManagerTestContext.Builder().build();
|
||||||
assertEquals(Collections.emptyList(), context.fetchAllOffsets("group", Long.MAX_VALUE));
|
assertEquals(List.of(), context.fetchAllOffsets("group", Long.MAX_VALUE));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -2029,7 +2030,7 @@ public class OffsetMetadataManagerTest {
|
||||||
assertEquals(5, context.lastWrittenOffset);
|
assertEquals(5, context.lastWrittenOffset);
|
||||||
|
|
||||||
// Fetching with 0 should no offsets.
|
// Fetching with 0 should no offsets.
|
||||||
assertEquals(Collections.emptyList(), context.fetchAllOffsets("group", 0L));
|
assertEquals(List.of(), context.fetchAllOffsets("group", 0L));
|
||||||
|
|
||||||
// Fetching with 1 should return data up to offset 1.
|
// Fetching with 1 should return data up to offset 1.
|
||||||
assertEquals(Arrays.asList(
|
assertEquals(Arrays.asList(
|
||||||
|
@ -2181,25 +2182,25 @@ public class OffsetMetadataManagerTest {
|
||||||
context.commitOffset("group", "foo", 0, 100L, 1);
|
context.commitOffset("group", "foo", 0, 100L, 1);
|
||||||
|
|
||||||
// Fetch offsets case.
|
// Fetch offsets case.
|
||||||
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = Collections.singletonList(
|
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = List.of(
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(Collections.singletonList(
|
assertEquals(List.of(
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
||||||
))
|
))
|
||||||
), context.fetchOffsets("group", "member", 0, topics, Long.MAX_VALUE));
|
), context.fetchOffsets("group", "member", 0, topics, Long.MAX_VALUE));
|
||||||
|
|
||||||
// Fetch all offsets case.
|
// Fetch all offsets case.
|
||||||
assertEquals(Collections.singletonList(
|
assertEquals(List.of(
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
||||||
))
|
))
|
||||||
), context.fetchAllOffsets("group", "member", 0, Long.MAX_VALUE));
|
), context.fetchAllOffsets("group", "member", 0, Long.MAX_VALUE));
|
||||||
|
@ -2216,25 +2217,25 @@ public class OffsetMetadataManagerTest {
|
||||||
context.commitOffset("group", "foo", 0, 100L, 1);
|
context.commitOffset("group", "foo", 0, 100L, 1);
|
||||||
|
|
||||||
// Fetch offsets case.
|
// Fetch offsets case.
|
||||||
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = Collections.singletonList(
|
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = List.of(
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(Collections.singletonList(
|
assertEquals(List.of(
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
||||||
))
|
))
|
||||||
), context.fetchOffsets("group", topics, Long.MAX_VALUE));
|
), context.fetchOffsets("group", topics, Long.MAX_VALUE));
|
||||||
|
|
||||||
// Fetch all offsets case.
|
// Fetch all offsets case.
|
||||||
assertEquals(Collections.singletonList(
|
assertEquals(List.of(
|
||||||
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
new OffsetFetchResponseData.OffsetFetchResponseTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitions(Collections.singletonList(
|
.setPartitions(List.of(
|
||||||
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
mkOffsetPartitionResponse(0, 100L, 1, "metadata")
|
||||||
))
|
))
|
||||||
), context.fetchAllOffsets("group", Long.MAX_VALUE));
|
), context.fetchAllOffsets("group", Long.MAX_VALUE));
|
||||||
|
@ -2246,10 +2247,10 @@ public class OffsetMetadataManagerTest {
|
||||||
context.groupMetadataManager.getOrMaybeCreatePersistedConsumerGroup("group", true);
|
context.groupMetadataManager.getOrMaybeCreatePersistedConsumerGroup("group", true);
|
||||||
|
|
||||||
// Fetch offsets case.
|
// Fetch offsets case.
|
||||||
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = Collections.singletonList(
|
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = List.of(
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
// Fetch offsets cases.
|
// Fetch offsets cases.
|
||||||
|
@ -2272,10 +2273,10 @@ public class OffsetMetadataManagerTest {
|
||||||
group.updateMember(new ConsumerGroupMember.Builder("member").build());
|
group.updateMember(new ConsumerGroupMember.Builder("member").build());
|
||||||
|
|
||||||
// Fetch offsets case.
|
// Fetch offsets case.
|
||||||
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = Collections.singletonList(
|
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = List.of(
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
// Fetch offsets case.
|
// Fetch offsets case.
|
||||||
|
@ -2296,10 +2297,10 @@ public class OffsetMetadataManagerTest {
|
||||||
.build()
|
.build()
|
||||||
);
|
);
|
||||||
|
|
||||||
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = Collections.singletonList(
|
List<OffsetFetchRequestData.OffsetFetchRequestTopics> topics = List.of(
|
||||||
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
new OffsetFetchRequestData.OffsetFetchRequestTopics()
|
||||||
.setName("foo")
|
.setName("foo")
|
||||||
.setPartitionIndexes(Collections.singletonList(0))
|
.setPartitionIndexes(List.of(0))
|
||||||
);
|
);
|
||||||
|
|
||||||
// Fetch offsets case.
|
// Fetch offsets case.
|
||||||
|
@ -2319,7 +2320,7 @@ public class OffsetMetadataManagerTest {
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
context.commitOffset("foo", "bar", 0, 100L, 0);
|
context.commitOffset("foo", "bar", 0, 100L, 0);
|
||||||
group.setSubscribedTopics(Optional.of(Collections.emptySet()));
|
group.setSubscribedTopics(Optional.of(Set.of()));
|
||||||
context.testOffsetDeleteWith("foo", "bar", 0, Errors.NONE);
|
context.testOffsetDeleteWith("foo", "bar", 0, Errors.NONE);
|
||||||
assertFalse(context.hasOffset("foo", "bar", 0));
|
assertFalse(context.hasOffset("foo", "bar", 0));
|
||||||
}
|
}
|
||||||
|
@ -2331,7 +2332,7 @@ public class OffsetMetadataManagerTest {
|
||||||
"foo",
|
"foo",
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
group.setSubscribedTopics(Optional.of(Collections.singleton("bar")));
|
group.setSubscribedTopics(Optional.of(Set.of("bar")));
|
||||||
context.commitOffset("foo", "bar", 0, 100L, 0);
|
context.commitOffset("foo", "bar", 0, 100L, 0);
|
||||||
|
|
||||||
// Delete the offset whose topic partition doesn't exist.
|
// Delete the offset whose topic partition doesn't exist.
|
||||||
|
@ -2348,7 +2349,7 @@ public class OffsetMetadataManagerTest {
|
||||||
true
|
true
|
||||||
);
|
);
|
||||||
context.commitOffset(10L, "foo", "bar", 0, 100L, 0, context.time.milliseconds());
|
context.commitOffset(10L, "foo", "bar", 0, 100L, 0, context.time.milliseconds());
|
||||||
group.setSubscribedTopics(Optional.of(Collections.emptySet()));
|
group.setSubscribedTopics(Optional.of(Set.of()));
|
||||||
context.testOffsetDeleteWith("foo", "bar", 0, Errors.NONE);
|
context.testOffsetDeleteWith("foo", "bar", 0, Errors.NONE);
|
||||||
assertFalse(context.hasOffset("foo", "bar", 0));
|
assertFalse(context.hasOffset("foo", "bar", 0));
|
||||||
}
|
}
|
||||||
|
@ -2377,7 +2378,7 @@ public class OffsetMetadataManagerTest {
|
||||||
.addRacks()
|
.addRacks()
|
||||||
.build();
|
.build();
|
||||||
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("bar"))
|
.setSubscribedTopicNames(List.of("bar"))
|
||||||
.build();
|
.build();
|
||||||
group.computeSubscriptionMetadata(
|
group.computeSubscriptionMetadata(
|
||||||
group.computeSubscribedTopicNames(null, member1),
|
group.computeSubscribedTopicNames(null, member1),
|
||||||
|
@ -2469,7 +2470,7 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
List<CoordinatorRecord> records = new ArrayList<>();
|
List<CoordinatorRecord> records = new ArrayList<>();
|
||||||
assertTrue(context.cleanupExpiredOffsets("unknown-group-id", records));
|
assertTrue(context.cleanupExpiredOffsets("unknown-group-id", records));
|
||||||
assertEquals(Collections.emptyList(), records);
|
assertEquals(List.of(), records);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -2500,7 +2501,7 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
List<CoordinatorRecord> records = new ArrayList<>();
|
List<CoordinatorRecord> records = new ArrayList<>();
|
||||||
assertFalse(context.cleanupExpiredOffsets("group-id", records));
|
assertFalse(context.cleanupExpiredOffsets("group-id", records));
|
||||||
assertEquals(Collections.emptyList(), records);
|
assertEquals(List.of(), records);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -2524,7 +2525,7 @@ public class OffsetMetadataManagerTest {
|
||||||
// firstTopic-0: group is still subscribed to firstTopic. Do not expire.
|
// firstTopic-0: group is still subscribed to firstTopic. Do not expire.
|
||||||
// secondTopic-0: should expire as offset retention has passed.
|
// secondTopic-0: should expire as offset retention has passed.
|
||||||
// secondTopic-1: has not passed offset retention. Do not expire.
|
// secondTopic-1: has not passed offset retention. Do not expire.
|
||||||
List<CoordinatorRecord> expectedRecords = Collections.singletonList(
|
List<CoordinatorRecord> expectedRecords = List.of(
|
||||||
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("group-id", "secondTopic", 0)
|
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("group-id", "secondTopic", 0)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -2540,7 +2541,7 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
// Expire secondTopic-1.
|
// Expire secondTopic-1.
|
||||||
context.time.sleep(500);
|
context.time.sleep(500);
|
||||||
expectedRecords = Collections.singletonList(
|
expectedRecords = List.of(
|
||||||
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("group-id", "secondTopic", 1)
|
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("group-id", "secondTopic", 1)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -2589,7 +2590,7 @@ public class OffsetMetadataManagerTest {
|
||||||
// foo-0 should not be expired because it has a pending transactional offset commit.
|
// foo-0 should not be expired because it has a pending transactional offset commit.
|
||||||
List<CoordinatorRecord> records = new ArrayList<>();
|
List<CoordinatorRecord> records = new ArrayList<>();
|
||||||
assertFalse(context.cleanupExpiredOffsets("group-id", records));
|
assertFalse(context.cleanupExpiredOffsets("group-id", records));
|
||||||
assertEquals(Collections.emptyList(), records);
|
assertEquals(List.of(), records);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static OffsetFetchResponseData.OffsetFetchResponsePartitions mkOffsetPartitionResponse(
|
private static OffsetFetchResponseData.OffsetFetchResponsePartitions mkOffsetPartitionResponse(
|
||||||
|
@ -2972,7 +2973,7 @@ public class OffsetMetadataManagerTest {
|
||||||
.setMemberId("member")
|
.setMemberId("member")
|
||||||
.setGenerationIdOrMemberEpoch(1)
|
.setGenerationIdOrMemberEpoch(1)
|
||||||
.setRetentionTimeMs(1234L)
|
.setRetentionTimeMs(1234L)
|
||||||
.setTopics(Collections.singletonList(
|
.setTopics(List.of(
|
||||||
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
new OffsetCommitRequestData.OffsetCommitRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Arrays.asList(
|
.setPartitions(Arrays.asList(
|
||||||
|
@ -3010,7 +3011,7 @@ public class OffsetMetadataManagerTest {
|
||||||
// firstTopic-0: group is still subscribed to firstTopic. Do not expire.
|
// firstTopic-0: group is still subscribed to firstTopic. Do not expire.
|
||||||
// secondTopic-0: should expire as offset retention has passed.
|
// secondTopic-0: should expire as offset retention has passed.
|
||||||
// secondTopic-1: has not passed offset retention. Do not expire.
|
// secondTopic-1: has not passed offset retention. Do not expire.
|
||||||
List<CoordinatorRecord> expectedRecords = Collections.singletonList(
|
List<CoordinatorRecord> expectedRecords = List.of(
|
||||||
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("group-id", "secondTopic", 0)
|
GroupCoordinatorRecordHelpers.newOffsetCommitTombstoneRecord("group-id", "secondTopic", 0)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -3048,10 +3049,10 @@ public class OffsetMetadataManagerTest {
|
||||||
|
|
||||||
context.commitOffset("foo", "bar", 0, 100L, 0);
|
context.commitOffset("foo", "bar", 0, 100L, 0);
|
||||||
context.commitOffset("foo", "bar", 1, 150L, 0);
|
context.commitOffset("foo", "bar", 1, 150L, 0);
|
||||||
group.setSubscribedTopics(Optional.of(Collections.emptySet()));
|
group.setSubscribedTopics(Optional.of(Set.of()));
|
||||||
|
|
||||||
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection requestTopicCollection =
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(Collections.singletonList(
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopicCollection(List.of(
|
||||||
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
new OffsetDeleteRequestData.OffsetDeleteRequestTopic()
|
||||||
.setName("bar")
|
.setName("bar")
|
||||||
.setPartitions(Arrays.asList(
|
.setPartitions(Arrays.asList(
|
||||||
|
@ -3173,7 +3174,7 @@ public class OffsetMetadataManagerTest {
|
||||||
5000,
|
5000,
|
||||||
"consumer",
|
"consumer",
|
||||||
new JoinGroupRequestData.JoinGroupRequestProtocolCollection(
|
new JoinGroupRequestData.JoinGroupRequestProtocolCollection(
|
||||||
Collections.singletonList(new JoinGroupRequestData.JoinGroupRequestProtocol()
|
List.of(new JoinGroupRequestData.JoinGroupRequestProtocol()
|
||||||
.setName("range")
|
.setName("range")
|
||||||
.setMetadata(new byte[0])
|
.setMetadata(new byte[0])
|
||||||
).iterator()
|
).iterator()
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.kafka.coordinator.group.modern.MemberSubscriptionAndAssignment
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -108,6 +107,6 @@ public class GroupSpecImplTest {
|
||||||
));
|
));
|
||||||
|
|
||||||
assertEquals(topicPartitions, groupSpec.memberAssignment(TEST_MEMBER).partitions());
|
assertEquals(topicPartitions, groupSpec.memberAssignment(TEST_MEMBER).partitions());
|
||||||
assertEquals(Collections.emptyMap(), groupSpec.memberAssignment("unknown-member").partitions());
|
assertEquals(Map.of(), groupSpec.memberAssignment("unknown-member").partitions());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,6 @@ import org.apache.kafka.coordinator.group.modern.TopicMetadata;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -63,7 +62,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testOneMemberNoTopicSubscription() {
|
public void testOneMemberNoTopicSubscription() {
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -73,12 +72,12 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, MemberSubscriptionAndAssignmentImpl> members = Collections.singletonMap(
|
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
|
||||||
memberA,
|
memberA,
|
||||||
new MemberSubscriptionAndAssignmentImpl(
|
new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -86,7 +85,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupAssignment groupAssignment = assignor.assign(
|
GroupAssignment groupAssignment = assignor.assign(
|
||||||
|
@ -94,13 +93,13 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
subscribedTopicMetadata
|
subscribedTopicMetadata
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(Collections.emptyMap(), groupAssignment.members());
|
assertEquals(Map.of(), groupAssignment.members());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testOneMemberSubscribedToNonexistentTopic() {
|
public void testOneMemberSubscribedToNonexistentTopic() {
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -110,12 +109,12 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, MemberSubscriptionAndAssignmentImpl> members = Collections.singletonMap(
|
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
|
||||||
memberA,
|
memberA,
|
||||||
new MemberSubscriptionAndAssignmentImpl(
|
new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic2Uuid),
|
Set.of(topic2Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -123,7 +122,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
assertThrows(PartitionAssignorException.class,
|
assertThrows(PartitionAssignorException.class,
|
||||||
|
@ -172,7 +171,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
||||||
|
|
||||||
|
@ -199,21 +198,21 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
members.put(memberC, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberC, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -226,13 +225,13 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
mkTopicAssignment(topic3Uuid, 1)
|
mkTopicAssignment(topic3Uuid, 1)
|
||||||
));
|
));
|
||||||
expectedAssignment.put(memberC,
|
expectedAssignment.put(memberC,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
||||||
|
|
||||||
|
@ -270,7 +269,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
||||||
|
|
||||||
|
@ -563,7 +562,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic2Uuid),
|
Set.of(topic2Uuid),
|
||||||
new Assignment(mkAssignment(
|
new Assignment(mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 0),
|
mkTopicAssignment(topic1Uuid, 0),
|
||||||
mkTopicAssignment(topic2Uuid, 0)
|
mkTopicAssignment(topic2Uuid, 0)
|
||||||
|
@ -573,7 +572,7 @@ public class OptimizedUniformAssignmentBuilderTest {
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic2Uuid),
|
Set.of(topic2Uuid),
|
||||||
new Assignment(mkAssignment(
|
new Assignment(mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 1),
|
mkTopicAssignment(topic1Uuid, 1),
|
||||||
mkTopicAssignment(topic2Uuid, 1)
|
mkTopicAssignment(topic2Uuid, 1)
|
||||||
|
|
|
@ -32,7 +32,6 @@ import org.apache.kafka.coordinator.group.modern.TopicMetadata;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -62,7 +61,7 @@ public class RangeAssignorTest {
|
||||||
@Test
|
@Test
|
||||||
public void testOneMemberNoTopic() {
|
public void testOneMemberNoTopic() {
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -72,12 +71,12 @@ public class RangeAssignorTest {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, MemberSubscriptionAndAssignmentImpl> members = Collections.singletonMap(
|
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
|
||||||
memberA,
|
memberA,
|
||||||
new MemberSubscriptionAndAssignmentImpl(
|
new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -85,7 +84,7 @@ public class RangeAssignorTest {
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupAssignment groupAssignment = assignor.assign(
|
GroupAssignment groupAssignment = assignor.assign(
|
||||||
|
@ -93,9 +92,9 @@ public class RangeAssignorTest {
|
||||||
subscribedTopicMetadata
|
subscribedTopicMetadata
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, MemberAssignment> expectedAssignment = Collections.singletonMap(
|
Map<String, MemberAssignment> expectedAssignment = Map.of(
|
||||||
memberA,
|
memberA,
|
||||||
new MemberAssignmentImpl(Collections.emptyMap())
|
new MemberAssignmentImpl(Map.of())
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(expectedAssignment, groupAssignment.members());
|
assertEquals(expectedAssignment, groupAssignment.members());
|
||||||
|
@ -104,7 +103,7 @@ public class RangeAssignorTest {
|
||||||
@Test
|
@Test
|
||||||
public void testOneMemberSubscribedToNonExistentTopic() {
|
public void testOneMemberSubscribedToNonExistentTopic() {
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -114,7 +113,7 @@ public class RangeAssignorTest {
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, MemberSubscriptionAndAssignmentImpl> members = Collections.singletonMap(
|
Map<String, MemberSubscriptionAndAssignmentImpl> members = Map.of(
|
||||||
memberA,
|
memberA,
|
||||||
new MemberSubscriptionAndAssignmentImpl(
|
new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
|
@ -127,7 +126,7 @@ public class RangeAssignorTest {
|
||||||
GroupSpec groupSpec = new GroupSpecImpl(
|
GroupSpec groupSpec = new GroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
assertThrows(PartitionAssignorException.class,
|
assertThrows(PartitionAssignorException.class,
|
||||||
|
@ -328,7 +327,7 @@ public class RangeAssignorTest {
|
||||||
@Test
|
@Test
|
||||||
public void testStaticMembership() throws PartitionAssignorException {
|
public void testStaticMembership() throws PartitionAssignorException {
|
||||||
SubscribedTopicDescriber subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriber subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -342,13 +341,13 @@ public class RangeAssignorTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.of("instanceA"),
|
Optional.of("instanceA"),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.of("instanceB"),
|
Optional.of("instanceB"),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -368,7 +367,7 @@ public class RangeAssignorTest {
|
||||||
members.put("memberA1", new MemberSubscriptionAndAssignmentImpl(
|
members.put("memberA1", new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.of("instanceA"),
|
Optional.of("instanceA"),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -398,7 +397,7 @@ public class RangeAssignorTest {
|
||||||
@Test
|
@Test
|
||||||
public void testMixedStaticMembership() throws PartitionAssignorException {
|
public void testMixedStaticMembership() throws PartitionAssignorException {
|
||||||
SubscribedTopicDescriber subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriber subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -413,13 +412,13 @@ public class RangeAssignorTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.of("instanceA"),
|
Optional.of("instanceA"),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
members.put(memberC, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberC, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.of("instanceC"),
|
Optional.of("instanceC"),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -427,7 +426,7 @@ public class RangeAssignorTest {
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -447,7 +446,7 @@ public class RangeAssignorTest {
|
||||||
members.put("memberA1", new MemberSubscriptionAndAssignmentImpl(
|
members.put("memberA1", new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.of("instanceA"),
|
Optional.of("instanceA"),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -545,7 +544,7 @@ public class RangeAssignorTest {
|
||||||
mkTopicAssignment(topic2Uuid, 1)
|
mkTopicAssignment(topic2Uuid, 1)
|
||||||
));
|
));
|
||||||
// Member C shouldn't get any assignment.
|
// Member C shouldn't get any assignment.
|
||||||
expectedAssignment.put(memberC, Collections.emptyMap());
|
expectedAssignment.put(memberC, Map.of());
|
||||||
|
|
||||||
assertAssignment(expectedAssignment, computedAssignment);
|
assertAssignment(expectedAssignment, computedAssignment);
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,6 @@ import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -91,7 +90,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
@Test
|
@Test
|
||||||
public void testTwoMembersNoTopicSubscription() {
|
public void testTwoMembersNoTopicSubscription() {
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -105,20 +104,20 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
GroupSpec groupSpec = new TestGroupSpecImpl(
|
GroupSpec groupSpec = new TestGroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HETEROGENEOUS,
|
HETEROGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
GroupAssignment groupAssignment = assignor.assign(
|
GroupAssignment groupAssignment = assignor.assign(
|
||||||
|
@ -126,13 +125,13 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
subscribedTopicMetadata
|
subscribedTopicMetadata
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(Collections.emptyMap(), groupAssignment.members());
|
assertEquals(Map.of(), groupAssignment.members());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTwoMembersSubscribedToNonexistentTopics() {
|
public void testTwoMembersSubscribedToNonexistentTopics() {
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(
|
||||||
Collections.singletonMap(
|
Map.of(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
new TopicMetadata(
|
new TopicMetadata(
|
||||||
topic1Uuid,
|
topic1Uuid,
|
||||||
|
@ -146,20 +145,20 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic2Uuid),
|
Set.of(topic2Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
GroupSpec groupSpec = new TestGroupSpecImpl(
|
GroupSpec groupSpec = new TestGroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HETEROGENEOUS,
|
HETEROGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
assertThrows(
|
assertThrows(
|
||||||
|
@ -193,14 +192,14 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
GroupSpec groupSpec = new TestGroupSpecImpl(
|
GroupSpec groupSpec = new TestGroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HETEROGENEOUS,
|
HETEROGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
||||||
|
|
||||||
|
@ -240,28 +239,28 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic3Uuid),
|
Set.of(topic3Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
members.put(memberC, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberC, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
GroupSpec groupSpec = new TestGroupSpecImpl(
|
GroupSpec groupSpec = new TestGroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HETEROGENEOUS,
|
HETEROGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
||||||
|
|
||||||
|
@ -276,7 +275,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
mkTopicAssignment(topic3Uuid, 0)
|
mkTopicAssignment(topic3Uuid, 0)
|
||||||
));
|
));
|
||||||
expectedAssignment.put(memberB,
|
expectedAssignment.put(memberB,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
expectedAssignment.put(memberC, mkAssignment(
|
expectedAssignment.put(memberC, mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 0, 1)
|
mkTopicAssignment(topic1Uuid, 0, 1)
|
||||||
|
@ -309,7 +308,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
new Assignment(mkAssignment(
|
new Assignment(mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 0, 1, 2)
|
mkTopicAssignment(topic1Uuid, 0, 1, 2)
|
||||||
))
|
))
|
||||||
|
@ -456,7 +455,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
new Assignment(mkAssignment(
|
new Assignment(mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 0, 2),
|
mkTopicAssignment(topic1Uuid, 0, 2),
|
||||||
mkTopicAssignment(topic2Uuid, 0)
|
mkTopicAssignment(topic2Uuid, 0)
|
||||||
|
@ -542,7 +541,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic2Uuid),
|
Set.of(topic2Uuid),
|
||||||
new Assignment(mkAssignment(
|
new Assignment(mkAssignment(
|
||||||
mkTopicAssignment(topic2Uuid, 3, 4, 5, 6)
|
mkTopicAssignment(topic2Uuid, 3, 4, 5, 6)
|
||||||
))
|
))
|
||||||
|
@ -594,7 +593,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberA, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singleton(topic1Uuid),
|
Set.of(topic1Uuid),
|
||||||
new Assignment(mkAssignment(
|
new Assignment(mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 0, 2),
|
mkTopicAssignment(topic1Uuid, 0, 2),
|
||||||
mkTopicAssignment(topic2Uuid, 1, 3)
|
mkTopicAssignment(topic2Uuid, 1, 3)
|
||||||
|
@ -725,14 +724,14 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
members.put(memberB, new MemberSubscriptionAndAssignmentImpl(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Assignment.EMPTY
|
Assignment.EMPTY
|
||||||
));
|
));
|
||||||
|
|
||||||
GroupSpec groupSpec = new TestGroupSpecImpl(
|
GroupSpec groupSpec = new TestGroupSpecImpl(
|
||||||
members,
|
members,
|
||||||
HETEROGENEOUS,
|
HETEROGENEOUS,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
SubscribedTopicDescriberImpl subscribedTopicMetadata = new SubscribedTopicDescriberImpl(topicMetadata);
|
||||||
|
|
||||||
|
@ -745,7 +744,7 @@ public class UniformHeterogeneousAssignmentBuilderTest {
|
||||||
expectedAssignment.put(memberA, mkAssignment(
|
expectedAssignment.put(memberA, mkAssignment(
|
||||||
mkTopicAssignment(topic1Uuid, 0, 1, 2)
|
mkTopicAssignment(topic1Uuid, 0, 1, 2)
|
||||||
));
|
));
|
||||||
expectedAssignment.put(memberB, Collections.emptyMap());
|
expectedAssignment.put(memberB, Map.of());
|
||||||
|
|
||||||
assertAssignment(expectedAssignment, computedAssignment);
|
assertAssignment(expectedAssignment, computedAssignment);
|
||||||
}
|
}
|
||||||
|
|
|
@ -57,7 +57,6 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -429,7 +428,7 @@ public class ClassicGroupTest {
|
||||||
|
|
||||||
group.initNextGeneration();
|
group.initNextGeneration();
|
||||||
|
|
||||||
assertEquals(Optional.of(Collections.emptySet()), group.subscribedTopics());
|
assertEquals(Optional.of(Set.of()), group.subscribedTopics());
|
||||||
|
|
||||||
protocols = new JoinGroupRequestProtocolCollection();
|
protocols = new JoinGroupRequestProtocolCollection();
|
||||||
protocols.add(new JoinGroupRequestProtocol()
|
protocols.add(new JoinGroupRequestProtocol()
|
||||||
|
@ -852,7 +851,7 @@ public class ClassicGroupTest {
|
||||||
assertTrue(group.addPendingSyncMember(memberId));
|
assertTrue(group.addPendingSyncMember(memberId));
|
||||||
assertEquals(Set.of(memberId), group.allPendingSyncMembers());
|
assertEquals(Set.of(memberId), group.allPendingSyncMembers());
|
||||||
group.removePendingSyncMember(memberId);
|
group.removePendingSyncMember(memberId);
|
||||||
assertEquals(Collections.emptySet(), group.allPendingSyncMembers());
|
assertEquals(Set.of(), group.allPendingSyncMembers());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -877,7 +876,7 @@ public class ClassicGroupTest {
|
||||||
assertTrue(group.addPendingSyncMember(memberId));
|
assertTrue(group.addPendingSyncMember(memberId));
|
||||||
assertEquals(Set.of(memberId), group.allPendingSyncMembers());
|
assertEquals(Set.of(memberId), group.allPendingSyncMembers());
|
||||||
group.remove(memberId);
|
group.remove(memberId);
|
||||||
assertEquals(Collections.emptySet(), group.allPendingSyncMembers());
|
assertEquals(Set.of(), group.allPendingSyncMembers());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -903,7 +902,7 @@ public class ClassicGroupTest {
|
||||||
assertTrue(group.addPendingSyncMember(memberId));
|
assertTrue(group.addPendingSyncMember(memberId));
|
||||||
assertEquals(Set.of(memberId), group.allPendingSyncMembers());
|
assertEquals(Set.of(memberId), group.allPendingSyncMembers());
|
||||||
group.initNextGeneration();
|
group.initNextGeneration();
|
||||||
assertEquals(Collections.emptySet(), group.allPendingSyncMembers());
|
assertEquals(Set.of(), group.allPendingSyncMembers());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -1251,7 +1250,7 @@ public class ClassicGroupTest {
|
||||||
group.transitionTo(PREPARING_REBALANCE);
|
group.transitionTo(PREPARING_REBALANCE);
|
||||||
group.initNextGeneration();
|
group.initNextGeneration();
|
||||||
assertTrue(group.isInState(EMPTY));
|
assertTrue(group.isInState(EMPTY));
|
||||||
assertEquals(Optional.of(Collections.emptySet()), group.computeSubscribedTopics());
|
assertEquals(Optional.of(Set.of()), group.computeSubscribedTopics());
|
||||||
assertTrue(group.usesConsumerGroupProtocol());
|
assertTrue(group.usesConsumerGroupProtocol());
|
||||||
assertFalse(group.isSubscribedToTopic("topic"));
|
assertFalse(group.isSubscribedToTopic("topic"));
|
||||||
|
|
||||||
|
@ -1458,7 +1457,7 @@ public class ClassicGroupTest {
|
||||||
|
|
||||||
ClassicGroup classicGroup = ClassicGroup.fromConsumerGroup(
|
ClassicGroup classicGroup = ClassicGroup.fromConsumerGroup(
|
||||||
consumerGroup,
|
consumerGroup,
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
newMember2,
|
newMember2,
|
||||||
logContext,
|
logContext,
|
||||||
time,
|
time,
|
||||||
|
|
|
@ -37,9 +37,9 @@ import com.yammer.metrics.core.MetricsRegistry;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.stream.IntStream;
|
import java.util.stream.IntStream;
|
||||||
|
|
||||||
import static org.apache.kafka.coordinator.group.metrics.GroupCoordinatorMetrics.CLASSIC_GROUP_COMPLETED_REBALANCES_SENSOR_NAME;
|
import static org.apache.kafka.coordinator.group.metrics.GroupCoordinatorMetrics.CLASSIC_GROUP_COMPLETED_REBALANCES_SENSOR_NAME;
|
||||||
|
@ -77,43 +77,43 @@ public class GroupCoordinatorMetricsTest {
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"group-count",
|
"group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("protocol", "classic")),
|
Map.of("protocol", "classic")),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"group-count",
|
"group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("protocol", "consumer")),
|
Map.of("protocol", "consumer")),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"consumer-group-count",
|
"consumer-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", ConsumerGroupState.EMPTY.toString())),
|
Map.of("state", ConsumerGroupState.EMPTY.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"consumer-group-count",
|
"consumer-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", ConsumerGroupState.ASSIGNING.toString())),
|
Map.of("state", ConsumerGroupState.ASSIGNING.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"consumer-group-count",
|
"consumer-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", ConsumerGroupState.RECONCILING.toString())),
|
Map.of("state", ConsumerGroupState.RECONCILING.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"consumer-group-count",
|
"consumer-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", ConsumerGroupState.STABLE.toString())),
|
Map.of("state", ConsumerGroupState.STABLE.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"consumer-group-count",
|
"consumer-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", ConsumerGroupState.DEAD.toString())),
|
Map.of("state", ConsumerGroupState.DEAD.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"group-count",
|
"group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("protocol", Group.GroupType.SHARE.toString())),
|
Map.of("protocol", Group.GroupType.SHARE.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"rebalance-rate",
|
"rebalance-rate",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("protocol", Group.GroupType.SHARE.toString())),
|
Map.of("protocol", Group.GroupType.SHARE.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"rebalance-count",
|
"rebalance-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("protocol", Group.GroupType.SHARE.toString())),
|
Map.of("protocol", Group.GroupType.SHARE.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"share-group-count",
|
"share-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
|
@ -132,33 +132,33 @@ public class GroupCoordinatorMetricsTest {
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"group-count",
|
"group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("protocol", Group.GroupType.STREAMS.toString())),
|
Map.of("protocol", Group.GroupType.STREAMS.toString())),
|
||||||
metrics.metricName("streams-group-rebalance-rate", GroupCoordinatorMetrics.METRICS_GROUP),
|
metrics.metricName("streams-group-rebalance-rate", GroupCoordinatorMetrics.METRICS_GROUP),
|
||||||
metrics.metricName("streams-group-rebalance-count", GroupCoordinatorMetrics.METRICS_GROUP),
|
metrics.metricName("streams-group-rebalance-count", GroupCoordinatorMetrics.METRICS_GROUP),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"streams-group-count",
|
"streams-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", StreamsGroupState.EMPTY.toString())),
|
Map.of("state", StreamsGroupState.EMPTY.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"streams-group-count",
|
"streams-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", StreamsGroupState.ASSIGNING.toString())),
|
Map.of("state", StreamsGroupState.ASSIGNING.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"streams-group-count",
|
"streams-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", StreamsGroupState.RECONCILING.toString())),
|
Map.of("state", StreamsGroupState.RECONCILING.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"streams-group-count",
|
"streams-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", StreamsGroupState.STABLE.toString())),
|
Map.of("state", StreamsGroupState.STABLE.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"streams-group-count",
|
"streams-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", StreamsGroupState.DEAD.toString())),
|
Map.of("state", StreamsGroupState.DEAD.toString())),
|
||||||
metrics.metricName(
|
metrics.metricName(
|
||||||
"streams-group-count",
|
"streams-group-count",
|
||||||
GroupCoordinatorMetrics.METRICS_GROUP,
|
GroupCoordinatorMetrics.METRICS_GROUP,
|
||||||
Collections.singletonMap("state", StreamsGroupState.NOT_READY.toString()))
|
Map.of("state", StreamsGroupState.NOT_READY.toString()))
|
||||||
));
|
));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -176,7 +176,7 @@ public class GroupCoordinatorMetricsTest {
|
||||||
assertMetricsForTypeEqual(registry, "kafka.coordinator.group", expectedRegistry);
|
assertMetricsForTypeEqual(registry, "kafka.coordinator.group", expectedRegistry);
|
||||||
expectedMetrics.forEach(metricName -> assertTrue(metrics.metrics().containsKey(metricName), metricName + " is missing"));
|
expectedMetrics.forEach(metricName -> assertTrue(metrics.metrics().containsKey(metricName), metricName + " is missing"));
|
||||||
}
|
}
|
||||||
assertMetricsForTypeEqual(registry, "kafka.coordinator.group", Collections.emptySet());
|
assertMetricsForTypeEqual(registry, "kafka.coordinator.group", Set.of());
|
||||||
expectedMetrics.forEach(metricName -> assertFalse(metrics.metrics().containsKey(metricName)));
|
expectedMetrics.forEach(metricName -> assertFalse(metrics.metrics().containsKey(metricName)));
|
||||||
} finally {
|
} finally {
|
||||||
registry.shutdown();
|
registry.shutdown();
|
||||||
|
@ -211,13 +211,13 @@ public class GroupCoordinatorMetricsTest {
|
||||||
Utils.mkEntry(ClassicGroupState.DEAD, 1L)
|
Utils.mkEntry(ClassicGroupState.DEAD, 1L)
|
||||||
));
|
));
|
||||||
|
|
||||||
shard0.setConsumerGroupGauges(Collections.singletonMap(ConsumerGroupState.ASSIGNING, 5L));
|
shard0.setConsumerGroupGauges(Map.of(ConsumerGroupState.ASSIGNING, 5L));
|
||||||
shard1.setConsumerGroupGauges(Map.of(
|
shard1.setConsumerGroupGauges(Map.of(
|
||||||
ConsumerGroupState.RECONCILING, 1L,
|
ConsumerGroupState.RECONCILING, 1L,
|
||||||
ConsumerGroupState.DEAD, 1L
|
ConsumerGroupState.DEAD, 1L
|
||||||
));
|
));
|
||||||
|
|
||||||
shard0.setStreamsGroupGauges(Collections.singletonMap(StreamsGroupState.ASSIGNING, 2L));
|
shard0.setStreamsGroupGauges(Map.of(StreamsGroupState.ASSIGNING, 2L));
|
||||||
shard1.setStreamsGroupGauges(Map.of(
|
shard1.setStreamsGroupGauges(Map.of(
|
||||||
StreamsGroupState.RECONCILING, 1L,
|
StreamsGroupState.RECONCILING, 1L,
|
||||||
StreamsGroupState.DEAD, 1L,
|
StreamsGroupState.DEAD, 1L,
|
||||||
|
@ -240,7 +240,7 @@ public class GroupCoordinatorMetricsTest {
|
||||||
assertGaugeValue(registry, metricName("GroupMetadataManager", "NumGroups"), 9);
|
assertGaugeValue(registry, metricName("GroupMetadataManager", "NumGroups"), 9);
|
||||||
assertGaugeValue(
|
assertGaugeValue(
|
||||||
metrics,
|
metrics,
|
||||||
metrics.metricName("group-count", METRICS_GROUP, Collections.singletonMap("protocol", "classic")),
|
metrics.metricName("group-count", METRICS_GROUP, Map.of("protocol", "classic")),
|
||||||
9
|
9
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -256,7 +256,7 @@ public class GroupCoordinatorMetricsTest {
|
||||||
assertEquals(1, shard1.numOffsets());
|
assertEquals(1, shard1.numOffsets());
|
||||||
assertGaugeValue(
|
assertGaugeValue(
|
||||||
metrics,
|
metrics,
|
||||||
metrics.metricName("group-count", METRICS_GROUP, Collections.singletonMap("protocol", "consumer")),
|
metrics.metricName("group-count", METRICS_GROUP, Map.of("protocol", "consumer")),
|
||||||
7
|
7
|
||||||
);
|
);
|
||||||
assertGaugeValue(registry, metricName("GroupMetadataManager", "NumOffsets"), 7);
|
assertGaugeValue(registry, metricName("GroupMetadataManager", "NumOffsets"), 7);
|
||||||
|
@ -265,7 +265,7 @@ public class GroupCoordinatorMetricsTest {
|
||||||
assertEquals(6, shard1.numShareGroups());
|
assertEquals(6, shard1.numShareGroups());
|
||||||
assertGaugeValue(
|
assertGaugeValue(
|
||||||
metrics,
|
metrics,
|
||||||
metrics.metricName("group-count", METRICS_GROUP, Collections.singletonMap("protocol", "share")),
|
metrics.metricName("group-count", METRICS_GROUP, Map.of("protocol", "share")),
|
||||||
8
|
8
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -273,7 +273,7 @@ public class GroupCoordinatorMetricsTest {
|
||||||
assertEquals(3, shard1.numStreamsGroups());
|
assertEquals(3, shard1.numStreamsGroups());
|
||||||
assertGaugeValue(
|
assertGaugeValue(
|
||||||
metrics,
|
metrics,
|
||||||
metrics.metricName("group-count", METRICS_GROUP, Collections.singletonMap("protocol", "streams")),
|
metrics.metricName("group-count", METRICS_GROUP, Map.of("protocol", "streams")),
|
||||||
5
|
5
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,7 +30,6 @@ import org.apache.kafka.image.TopicsImage;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -323,11 +322,11 @@ public class TargetAssignmentBuilderTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
|
TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
|
||||||
assertEquals(Collections.singletonList(newConsumerGroupTargetAssignmentEpochRecord(
|
assertEquals(List.of(newConsumerGroupTargetAssignmentEpochRecord(
|
||||||
"my-group",
|
"my-group",
|
||||||
20
|
20
|
||||||
)), result.records());
|
)), result.records());
|
||||||
assertEquals(Collections.emptyMap(), result.targetAssignment());
|
assertEquals(Map.of(), result.targetAssignment());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -362,7 +361,7 @@ public class TargetAssignmentBuilderTest {
|
||||||
|
|
||||||
TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
|
TargetAssignmentBuilder.TargetAssignmentResult result = context.build();
|
||||||
|
|
||||||
assertEquals(Collections.singletonList(newConsumerGroupTargetAssignmentEpochRecord(
|
assertEquals(List.of(newConsumerGroupTargetAssignmentEpochRecord(
|
||||||
"my-group",
|
"my-group",
|
||||||
20
|
20
|
||||||
)), result.records());
|
)), result.records());
|
||||||
|
@ -870,10 +869,10 @@ public class TargetAssignmentBuilderTest {
|
||||||
|
|
||||||
context.addGroupMember("member-2", Arrays.asList("foo", "bar", "zar"), mkAssignment());
|
context.addGroupMember("member-2", Arrays.asList("foo", "bar", "zar"), mkAssignment());
|
||||||
|
|
||||||
context.addGroupMember("member-3", Collections.emptyList(), "foo*", mkAssignment());
|
context.addGroupMember("member-3", List.of(), "foo*", mkAssignment());
|
||||||
|
|
||||||
context.addResolvedRegularExpression("foo*", new ResolvedRegularExpression(
|
context.addResolvedRegularExpression("foo*", new ResolvedRegularExpression(
|
||||||
Collections.singleton("foo"),
|
Set.of("foo"),
|
||||||
10L,
|
10L,
|
||||||
12345L
|
12345L
|
||||||
));
|
));
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.kafka.image.TopicsImage;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -40,12 +39,12 @@ public class TopicIdsTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTopicsImageCannotBeNull() {
|
public void testTopicsImageCannotBeNull() {
|
||||||
assertThrows(NullPointerException.class, () -> new TopicIds(Collections.emptySet(), (TopicsImage) null));
|
assertThrows(NullPointerException.class, () -> new TopicIds(Set.of(), (TopicsImage) null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTopicResolverCannotBeNull() {
|
public void testTopicResolverCannotBeNull() {
|
||||||
assertThrows(NullPointerException.class, () -> new TopicIds(Collections.emptySet(), (TopicIds.TopicResolver) null));
|
assertThrows(NullPointerException.class, () -> new TopicIds(Set.of(), (TopicIds.TopicResolver) null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -57,7 +56,7 @@ public class TopicIdsTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testIsEmpty() {
|
public void testIsEmpty() {
|
||||||
Set<String> topicNames = Collections.emptySet();
|
Set<String> topicNames = Set.of();
|
||||||
Set<Uuid> topicIds = new TopicIds(topicNames, TopicsImage.EMPTY);
|
Set<Uuid> topicIds = new TopicIds(topicNames, TopicsImage.EMPTY);
|
||||||
assertEquals(topicNames.size(), topicIds.size());
|
assertEquals(topicNames.size(), topicIds.size());
|
||||||
}
|
}
|
||||||
|
@ -176,14 +175,14 @@ public class TopicIdsTest {
|
||||||
@Test
|
@Test
|
||||||
public void testEquals() {
|
public void testEquals() {
|
||||||
Uuid topicId = Uuid.randomUuid();
|
Uuid topicId = Uuid.randomUuid();
|
||||||
TopicIds topicIds1 = new TopicIds(Collections.singleton("topic"),
|
TopicIds topicIds1 = new TopicIds(Set.of("topic"),
|
||||||
new MetadataImageBuilder()
|
new MetadataImageBuilder()
|
||||||
.addTopic(topicId, "topicId", 3)
|
.addTopic(topicId, "topicId", 3)
|
||||||
.build()
|
.build()
|
||||||
.topics()
|
.topics()
|
||||||
);
|
);
|
||||||
|
|
||||||
TopicIds topicIds2 = new TopicIds(Collections.singleton("topic"),
|
TopicIds topicIds2 = new TopicIds(Set.of("topic"),
|
||||||
new MetadataImageBuilder()
|
new MetadataImageBuilder()
|
||||||
.addTopic(topicId, "topicId", 3)
|
.addTopic(topicId, "topicId", 3)
|
||||||
.build()
|
.build()
|
||||||
|
|
|
@ -20,7 +20,6 @@ import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.IntStream;
|
import java.util.stream.IntStream;
|
||||||
|
@ -34,8 +33,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||||
public class UnionSetTest {
|
public class UnionSetTest {
|
||||||
@Test
|
@Test
|
||||||
public void testSetsCannotBeNull() {
|
public void testSetsCannotBeNull() {
|
||||||
assertThrows(NullPointerException.class, () -> new UnionSet<String>(Collections.emptySet(), null));
|
assertThrows(NullPointerException.class, () -> new UnionSet<String>(Set.of(), null));
|
||||||
assertThrows(NullPointerException.class, () -> new UnionSet<String>(null, Collections.emptySet()));
|
assertThrows(NullPointerException.class, () -> new UnionSet<String>(null, Set.of()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -72,21 +71,21 @@ public class UnionSetTest {
|
||||||
|
|
||||||
union = new UnionSet<>(
|
union = new UnionSet<>(
|
||||||
Set.of(1, 2, 3),
|
Set.of(1, 2, 3),
|
||||||
Collections.emptySet()
|
Set.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
assertFalse(union.isEmpty());
|
assertFalse(union.isEmpty());
|
||||||
|
|
||||||
union = new UnionSet<>(
|
union = new UnionSet<>(
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Set.of(2, 3, 4, 5)
|
Set.of(2, 3, 4, 5)
|
||||||
);
|
);
|
||||||
|
|
||||||
assertFalse(union.isEmpty());
|
assertFalse(union.isEmpty());
|
||||||
|
|
||||||
union = new UnionSet<>(
|
union = new UnionSet<>(
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Collections.emptySet()
|
Set.of()
|
||||||
);
|
);
|
||||||
assertTrue(union.isEmpty());
|
assertTrue(union.isEmpty());
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,6 @@ import org.junit.jupiter.params.provider.ValueSource;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -39,7 +38,6 @@ import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.OptionalInt;
|
import java.util.OptionalInt;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkAssignment;
|
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkAssignment;
|
||||||
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkTopicAssignment;
|
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkTopicAssignment;
|
||||||
|
@ -178,7 +176,7 @@ public class ConsumerGroupMemberTest {
|
||||||
.maybeUpdateRackId(Optional.of("new-rack-id"))
|
.maybeUpdateRackId(Optional.of("new-rack-id"))
|
||||||
.maybeUpdateInstanceId(Optional.of("new-instance-id"))
|
.maybeUpdateInstanceId(Optional.of("new-instance-id"))
|
||||||
.maybeUpdateServerAssignorName(Optional.of("new-assignor"))
|
.maybeUpdateServerAssignorName(Optional.of("new-assignor"))
|
||||||
.maybeUpdateSubscribedTopicNames(Optional.of(Collections.singletonList("zar")))
|
.maybeUpdateSubscribedTopicNames(Optional.of(List.of("zar")))
|
||||||
.maybeUpdateSubscribedTopicRegex(Optional.of("new-regex"))
|
.maybeUpdateSubscribedTopicRegex(Optional.of("new-regex"))
|
||||||
.maybeUpdateRebalanceTimeoutMs(OptionalInt.of(6000))
|
.maybeUpdateRebalanceTimeoutMs(OptionalInt.of(6000))
|
||||||
.build();
|
.build();
|
||||||
|
@ -231,10 +229,10 @@ public class ConsumerGroupMemberTest {
|
||||||
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
|
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
|
||||||
.setMemberEpoch(10)
|
.setMemberEpoch(10)
|
||||||
.setPreviousMemberEpoch(9)
|
.setPreviousMemberEpoch(9)
|
||||||
.setAssignedPartitions(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
.setAssignedPartitions(List.of(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
||||||
.setTopicId(topicId1)
|
.setTopicId(topicId1)
|
||||||
.setPartitions(Arrays.asList(0, 1, 2))))
|
.setPartitions(Arrays.asList(0, 1, 2))))
|
||||||
.setPartitionsPendingRevocation(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
.setPartitionsPendingRevocation(List.of(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
||||||
.setTopicId(topicId2)
|
.setTopicId(topicId2)
|
||||||
.setPartitions(Arrays.asList(3, 4, 5))));
|
.setPartitions(Arrays.asList(3, 4, 5))));
|
||||||
|
|
||||||
|
@ -266,10 +264,10 @@ public class ConsumerGroupMemberTest {
|
||||||
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
|
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
|
||||||
.setMemberEpoch(epoch)
|
.setMemberEpoch(epoch)
|
||||||
.setPreviousMemberEpoch(epoch - 1)
|
.setPreviousMemberEpoch(epoch - 1)
|
||||||
.setAssignedPartitions(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
.setAssignedPartitions(List.of(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
||||||
.setTopicId(topicId1)
|
.setTopicId(topicId1)
|
||||||
.setPartitions(assignedPartitions)))
|
.setPartitions(assignedPartitions)))
|
||||||
.setPartitionsPendingRevocation(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
.setPartitionsPendingRevocation(List.of(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
||||||
.setTopicId(topicId2)
|
.setTopicId(topicId2)
|
||||||
.setPartitions(Arrays.asList(3, 4, 5))));
|
.setPartitions(Arrays.asList(3, 4, 5))));
|
||||||
String memberId = Uuid.randomUuid().toString();
|
String memberId = Uuid.randomUuid().toString();
|
||||||
|
@ -306,7 +304,7 @@ public class ConsumerGroupMemberTest {
|
||||||
.setSubscribedTopicRegex(subscribedTopicRegex)
|
.setSubscribedTopicRegex(subscribedTopicRegex)
|
||||||
.setAssignment(
|
.setAssignment(
|
||||||
new ConsumerGroupDescribeResponseData.Assignment()
|
new ConsumerGroupDescribeResponseData.Assignment()
|
||||||
.setTopicPartitions(Collections.singletonList(new ConsumerGroupDescribeResponseData.TopicPartitions()
|
.setTopicPartitions(List.of(new ConsumerGroupDescribeResponseData.TopicPartitions()
|
||||||
.setTopicId(topicId1)
|
.setTopicId(topicId1)
|
||||||
.setTopicName("topic1")
|
.setTopicName("topic1")
|
||||||
.setPartitions(assignedPartitions)
|
.setPartitions(assignedPartitions)
|
||||||
|
@ -319,7 +317,7 @@ public class ConsumerGroupMemberTest {
|
||||||
.setTopicId(item.getKey())
|
.setTopicId(item.getKey())
|
||||||
.setTopicName("topic4")
|
.setTopicName("topic4")
|
||||||
.setPartitions(new ArrayList<>(item.getValue()))
|
.setPartitions(new ArrayList<>(item.getValue()))
|
||||||
).collect(Collectors.toList()))
|
).toList())
|
||||||
)
|
)
|
||||||
.setMemberType(withClassicMemberMetadata ? (byte) 0 : (byte) 1);
|
.setMemberType(withClassicMemberMetadata ? (byte) 0 : (byte) 1);
|
||||||
|
|
||||||
|
@ -341,7 +339,7 @@ public class ConsumerGroupMemberTest {
|
||||||
public void testAsConsumerGroupDescribeWithTopicNameNotFound() {
|
public void testAsConsumerGroupDescribeWithTopicNameNotFound() {
|
||||||
Uuid memberId = Uuid.randomUuid();
|
Uuid memberId = Uuid.randomUuid();
|
||||||
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
|
ConsumerGroupCurrentMemberAssignmentValue record = new ConsumerGroupCurrentMemberAssignmentValue()
|
||||||
.setAssignedPartitions(Collections.singletonList(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
.setAssignedPartitions(List.of(new ConsumerGroupCurrentMemberAssignmentValue.TopicPartitions()
|
||||||
.setTopicId(Uuid.randomUuid())
|
.setTopicId(Uuid.randomUuid())
|
||||||
.setPartitions(Arrays.asList(0, 1, 2))));
|
.setPartitions(Arrays.asList(0, 1, 2))));
|
||||||
ConsumerGroupMember member = new ConsumerGroupMember.Builder(memberId.toString())
|
ConsumerGroupMember member = new ConsumerGroupMember.Builder(memberId.toString())
|
||||||
|
|
|
@ -57,7 +57,6 @@ import org.junit.jupiter.params.provider.CsvSource;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -247,7 +246,7 @@ public class ConsumerGroupTest {
|
||||||
|
|
||||||
member = new ConsumerGroupMember.Builder("member")
|
member = new ConsumerGroupMember.Builder("member")
|
||||||
.setMemberEpoch(10)
|
.setMemberEpoch(10)
|
||||||
.setAssignedPartitions(Collections.emptyMap())
|
.setAssignedPartitions(Map.of())
|
||||||
.setPartitionsPendingRevocation(mkAssignment(
|
.setPartitionsPendingRevocation(mkAssignment(
|
||||||
mkTopicAssignment(fooTopicId, 1)))
|
mkTopicAssignment(fooTopicId, 1)))
|
||||||
.build();
|
.build();
|
||||||
|
@ -260,7 +259,7 @@ public class ConsumerGroupTest {
|
||||||
.setMemberEpoch(11)
|
.setMemberEpoch(11)
|
||||||
.setAssignedPartitions(mkAssignment(
|
.setAssignedPartitions(mkAssignment(
|
||||||
mkTopicAssignment(fooTopicId, 1)))
|
mkTopicAssignment(fooTopicId, 1)))
|
||||||
.setPartitionsPendingRevocation(Collections.emptyMap())
|
.setPartitionsPendingRevocation(Map.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
consumerGroup.updateMember(member);
|
consumerGroup.updateMember(member);
|
||||||
|
@ -641,20 +640,20 @@ public class ConsumerGroupTest {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build();
|
.build();
|
||||||
ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2")
|
ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("bar"))
|
.setSubscribedTopicNames(List.of("bar"))
|
||||||
.build();
|
.build();
|
||||||
ConsumerGroupMember member3 = new ConsumerGroupMember.Builder("member3")
|
ConsumerGroupMember member3 = new ConsumerGroupMember.Builder("member3")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("zar"))
|
.setSubscribedTopicNames(List.of("zar"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ConsumerGroup consumerGroup = createConsumerGroup("group-foo");
|
ConsumerGroup consumerGroup = createConsumerGroup("group-foo");
|
||||||
|
|
||||||
// It should be empty by default.
|
// It should be empty by default.
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
consumerGroup.computeSubscriptionMetadata(
|
consumerGroup.computeSubscriptionMetadata(
|
||||||
consumerGroup.computeSubscribedTopicNames(null, null),
|
consumerGroup.computeSubscribedTopicNames(null, null),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
|
@ -691,7 +690,7 @@ public class ConsumerGroupTest {
|
||||||
|
|
||||||
// Compute while taking into account removal of member 1.
|
// Compute while taking into account removal of member 1.
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
consumerGroup.computeSubscriptionMetadata(
|
consumerGroup.computeSubscriptionMetadata(
|
||||||
consumerGroup.computeSubscribedTopicNames(member1, null),
|
consumerGroup.computeSubscribedTopicNames(member1, null),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
|
@ -785,7 +784,7 @@ public class ConsumerGroupTest {
|
||||||
|
|
||||||
// Compute while taking into account removal of member 1, member 2 and member 3
|
// Compute while taking into account removal of member 1, member 2 and member 3
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
consumerGroup.computeSubscriptionMetadata(
|
consumerGroup.computeSubscriptionMetadata(
|
||||||
consumerGroup.computeSubscribedTopicNames(new HashSet<>(Arrays.asList(member1, member2, member3))),
|
consumerGroup.computeSubscribedTopicNames(new HashSet<>(Arrays.asList(member1, member2, member3))),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
|
@ -812,7 +811,7 @@ public class ConsumerGroupTest {
|
||||||
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
||||||
),
|
),
|
||||||
consumerGroup.computeSubscriptionMetadata(
|
consumerGroup.computeSubscriptionMetadata(
|
||||||
consumerGroup.computeSubscribedTopicNames(Collections.singleton(member1)),
|
consumerGroup.computeSubscribedTopicNames(Set.of(member1)),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
image.cluster()
|
image.cluster()
|
||||||
)
|
)
|
||||||
|
@ -826,7 +825,7 @@ public class ConsumerGroupTest {
|
||||||
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
||||||
),
|
),
|
||||||
consumerGroup.computeSubscriptionMetadata(
|
consumerGroup.computeSubscriptionMetadata(
|
||||||
consumerGroup.computeSubscribedTopicNames(Collections.emptySet()),
|
consumerGroup.computeSubscribedTopicNames(Set.of()),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
image.cluster()
|
image.cluster()
|
||||||
)
|
)
|
||||||
|
@ -836,7 +835,7 @@ public class ConsumerGroupTest {
|
||||||
@Test
|
@Test
|
||||||
public void testUpdateSubscribedTopicNamesAndSubscriptionType() {
|
public void testUpdateSubscribedTopicNamesAndSubscriptionType() {
|
||||||
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build();
|
.build();
|
||||||
ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2")
|
ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2")
|
||||||
.setSubscribedTopicNames(Arrays.asList("bar", "foo"))
|
.setSubscribedTopicNames(Arrays.asList("bar", "foo"))
|
||||||
|
@ -849,7 +848,7 @@ public class ConsumerGroupTest {
|
||||||
|
|
||||||
// It should be empty by default.
|
// It should be empty by default.
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
consumerGroup.subscribedTopicNames()
|
consumerGroup.subscribedTopicNames()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -910,9 +909,9 @@ public class ConsumerGroupTest {
|
||||||
String memberId2 = "member2";
|
String memberId2 = "member2";
|
||||||
|
|
||||||
// Initial assignment for member1
|
// Initial assignment for member1
|
||||||
Assignment initialAssignment = new Assignment(Collections.singletonMap(
|
Assignment initialAssignment = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(0))
|
new HashSet<>(List.of(0))
|
||||||
));
|
));
|
||||||
consumerGroup.updateTargetAssignment(memberId1, initialAssignment);
|
consumerGroup.updateTargetAssignment(memberId1, initialAssignment);
|
||||||
|
|
||||||
|
@ -925,9 +924,9 @@ public class ConsumerGroupTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
// New assignment for member1
|
// New assignment for member1
|
||||||
Assignment newAssignment = new Assignment(Collections.singletonMap(
|
Assignment newAssignment = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(1))
|
new HashSet<>(List.of(1))
|
||||||
));
|
));
|
||||||
consumerGroup.updateTargetAssignment(memberId1, newAssignment);
|
consumerGroup.updateTargetAssignment(memberId1, newAssignment);
|
||||||
|
|
||||||
|
@ -940,9 +939,9 @@ public class ConsumerGroupTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
// New assignment for member2 to add partition 1
|
// New assignment for member2 to add partition 1
|
||||||
Assignment newAssignment2 = new Assignment(Collections.singletonMap(
|
Assignment newAssignment2 = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(1))
|
new HashSet<>(List.of(1))
|
||||||
));
|
));
|
||||||
consumerGroup.updateTargetAssignment(memberId2, newAssignment2);
|
consumerGroup.updateTargetAssignment(memberId2, newAssignment2);
|
||||||
|
|
||||||
|
@ -955,9 +954,9 @@ public class ConsumerGroupTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
// New assignment for member1 to revoke partition 1 and assign partition 0
|
// New assignment for member1 to revoke partition 1 and assign partition 0
|
||||||
Assignment newAssignment1 = new Assignment(Collections.singletonMap(
|
Assignment newAssignment1 = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(0))
|
new HashSet<>(List.of(0))
|
||||||
));
|
));
|
||||||
consumerGroup.updateTargetAssignment(memberId1, newAssignment1);
|
consumerGroup.updateTargetAssignment(memberId1, newAssignment1);
|
||||||
|
|
||||||
|
@ -1121,14 +1120,14 @@ public class ConsumerGroupTest {
|
||||||
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
|
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
|
||||||
GroupCoordinatorMetricsShard metricsShard = new GroupCoordinatorMetricsShard(
|
GroupCoordinatorMetricsShard metricsShard = new GroupCoordinatorMetricsShard(
|
||||||
snapshotRegistry,
|
snapshotRegistry,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)
|
new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)
|
||||||
);
|
);
|
||||||
ConsumerGroup group = new ConsumerGroup(snapshotRegistry, "group-foo", metricsShard);
|
ConsumerGroup group = new ConsumerGroup(snapshotRegistry, "group-foo", metricsShard);
|
||||||
snapshotRegistry.idempotentCreateSnapshot(0);
|
snapshotRegistry.idempotentCreateSnapshot(0);
|
||||||
assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0));
|
assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0));
|
||||||
group.updateMember(new ConsumerGroupMember.Builder("member1")
|
group.updateMember(new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build());
|
.build());
|
||||||
snapshotRegistry.idempotentCreateSnapshot(1);
|
snapshotRegistry.idempotentCreateSnapshot(1);
|
||||||
assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0));
|
assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0));
|
||||||
|
@ -1222,10 +1221,10 @@ public class ConsumerGroupTest {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
ConsumerGroupMember member1 = new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build();
|
.build();
|
||||||
ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2")
|
ConsumerGroupMember member2 = new ConsumerGroupMember.Builder("member2")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("bar"))
|
.setSubscribedTopicNames(List.of("bar"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ConsumerGroup consumerGroup = createConsumerGroup("group-foo");
|
ConsumerGroup consumerGroup = createConsumerGroup("group-foo");
|
||||||
|
@ -1263,7 +1262,7 @@ public class ConsumerGroupTest {
|
||||||
assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0));
|
assertEquals(ConsumerGroup.ConsumerGroupState.EMPTY.toString(), group.stateAsString(0));
|
||||||
|
|
||||||
group.updateMember(new ConsumerGroupMember.Builder("member1")
|
group.updateMember(new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.setServerAssignorName("assignorName")
|
.setServerAssignorName("assignorName")
|
||||||
.build());
|
.build());
|
||||||
group.updateMember(new ConsumerGroupMember.Builder("member2")
|
group.updateMember(new ConsumerGroupMember.Builder("member2")
|
||||||
|
@ -1279,7 +1278,7 @@ public class ConsumerGroupTest {
|
||||||
.setMembers(Arrays.asList(
|
.setMembers(Arrays.asList(
|
||||||
new ConsumerGroupDescribeResponseData.Member()
|
new ConsumerGroupDescribeResponseData.Member()
|
||||||
.setMemberId("member1")
|
.setMemberId("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.setSubscribedTopicRegex("")
|
.setSubscribedTopicRegex("")
|
||||||
.setMemberType((byte) 1),
|
.setMemberType((byte) 1),
|
||||||
new ConsumerGroupDescribeResponseData.Member().setMemberId("member2")
|
new ConsumerGroupDescribeResponseData.Member().setMemberId("member2")
|
||||||
|
@ -1297,21 +1296,21 @@ public class ConsumerGroupTest {
|
||||||
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
|
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
|
||||||
GroupCoordinatorMetricsShard metricsShard = new GroupCoordinatorMetricsShard(
|
GroupCoordinatorMetricsShard metricsShard = new GroupCoordinatorMetricsShard(
|
||||||
snapshotRegistry,
|
snapshotRegistry,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)
|
new TopicPartition(Topic.GROUP_METADATA_TOPIC_NAME, 0)
|
||||||
);
|
);
|
||||||
ConsumerGroup group = new ConsumerGroup(snapshotRegistry, "group-foo", metricsShard);
|
ConsumerGroup group = new ConsumerGroup(snapshotRegistry, "group-foo", metricsShard);
|
||||||
snapshotRegistry.idempotentCreateSnapshot(0);
|
snapshotRegistry.idempotentCreateSnapshot(0);
|
||||||
assertTrue(group.isInStates(Collections.singleton("empty"), 0));
|
assertTrue(group.isInStates(Set.of("empty"), 0));
|
||||||
assertFalse(group.isInStates(Collections.singleton("Empty"), 0));
|
assertFalse(group.isInStates(Set.of("Empty"), 0));
|
||||||
|
|
||||||
group.updateMember(new ConsumerGroupMember.Builder("member1")
|
group.updateMember(new ConsumerGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build());
|
.build());
|
||||||
snapshotRegistry.idempotentCreateSnapshot(1);
|
snapshotRegistry.idempotentCreateSnapshot(1);
|
||||||
assertTrue(group.isInStates(Collections.singleton("empty"), 0));
|
assertTrue(group.isInStates(Set.of("empty"), 0));
|
||||||
assertTrue(group.isInStates(Collections.singleton("stable"), 1));
|
assertTrue(group.isInStates(Set.of("stable"), 1));
|
||||||
assertFalse(group.isInStates(Collections.singleton("empty"), 1));
|
assertFalse(group.isInStates(Set.of("empty"), 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -1512,7 +1511,7 @@ public class ConsumerGroupTest {
|
||||||
5000,
|
5000,
|
||||||
500,
|
500,
|
||||||
ConsumerProtocol.PROTOCOL_TYPE,
|
ConsumerProtocol.PROTOCOL_TYPE,
|
||||||
new JoinGroupRequestData.JoinGroupRequestProtocolCollection(Collections.singletonList(
|
new JoinGroupRequestData.JoinGroupRequestProtocolCollection(List.of(
|
||||||
new JoinGroupRequestData.JoinGroupRequestProtocol()
|
new JoinGroupRequestData.JoinGroupRequestProtocol()
|
||||||
.setName("range")
|
.setName("range")
|
||||||
.setMetadata(Utils.toArray(ConsumerProtocol.serializeSubscription(new ConsumerPartitionAssignor.Subscription(
|
.setMetadata(Utils.toArray(ConsumerProtocol.serializeSubscription(new ConsumerPartitionAssignor.Subscription(
|
||||||
|
@ -1562,7 +1561,7 @@ public class ConsumerGroupTest {
|
||||||
.setClassicMemberMetadata(
|
.setClassicMemberMetadata(
|
||||||
new ConsumerGroupMemberMetadataValue.ClassicMemberMetadata()
|
new ConsumerGroupMemberMetadataValue.ClassicMemberMetadata()
|
||||||
.setSessionTimeoutMs(member.sessionTimeoutMs())
|
.setSessionTimeoutMs(member.sessionTimeoutMs())
|
||||||
.setSupportedProtocols(Collections.singletonList(
|
.setSupportedProtocols(List.of(
|
||||||
new ConsumerGroupMemberMetadataValue.ClassicProtocol()
|
new ConsumerGroupMemberMetadataValue.ClassicProtocol()
|
||||||
.setName("range")
|
.setName("range")
|
||||||
.setMetadata(Utils.toArray(ConsumerProtocol.serializeSubscription(new ConsumerPartitionAssignor.Subscription(
|
.setMetadata(Utils.toArray(ConsumerProtocol.serializeSubscription(new ConsumerPartitionAssignor.Subscription(
|
||||||
|
@ -1927,7 +1926,7 @@ public class ConsumerGroupTest {
|
||||||
consumerGroup.computeSubscribedTopicNames(
|
consumerGroup.computeSubscribedTopicNames(
|
||||||
member3,
|
member3,
|
||||||
new ConsumerGroupMember.Builder(member3)
|
new ConsumerGroupMember.Builder(member3)
|
||||||
.setSubscribedTopicNames(Collections.emptyList())
|
.setSubscribedTopicNames(List.of())
|
||||||
.build()
|
.build()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -1942,7 +1941,7 @@ public class ConsumerGroupTest {
|
||||||
consumerGroup.computeSubscribedTopicNames(
|
consumerGroup.computeSubscribedTopicNames(
|
||||||
member3,
|
member3,
|
||||||
new ConsumerGroupMember.Builder(member3)
|
new ConsumerGroupMember.Builder(member3)
|
||||||
.setSubscribedTopicNames(Collections.emptyList())
|
.setSubscribedTopicNames(List.of())
|
||||||
.setSubscribedTopicRegex("")
|
.setSubscribedTopicRegex("")
|
||||||
.build()
|
.build()
|
||||||
)
|
)
|
||||||
|
@ -2122,8 +2121,8 @@ public class ConsumerGroupTest {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
ConsumerGroup.subscriptionType(
|
ConsumerGroup.subscriptionType(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
0
|
0
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -2131,7 +2130,7 @@ public class ConsumerGroupTest {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
HOMOGENEOUS,
|
HOMOGENEOUS,
|
||||||
ConsumerGroup.subscriptionType(
|
ConsumerGroup.subscriptionType(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Map.of("foo", new SubscriptionCount(5, 0)),
|
Map.of("foo", new SubscriptionCount(5, 0)),
|
||||||
5
|
5
|
||||||
)
|
)
|
||||||
|
@ -2140,7 +2139,7 @@ public class ConsumerGroupTest {
|
||||||
assertEquals(
|
assertEquals(
|
||||||
HETEROGENEOUS,
|
HETEROGENEOUS,
|
||||||
ConsumerGroup.subscriptionType(
|
ConsumerGroup.subscriptionType(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Map.of(
|
Map.of(
|
||||||
"foo", new SubscriptionCount(4, 0),
|
"foo", new SubscriptionCount(4, 0),
|
||||||
"bar", new SubscriptionCount(1, 0)
|
"bar", new SubscriptionCount(1, 0)
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.kafka.coordinator.group.modern.MemberState;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.List;
|
||||||
|
|
||||||
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkAssignment;
|
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkAssignment;
|
||||||
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkTopicAssignment;
|
import static org.apache.kafka.coordinator.group.AssignmentTestUtil.mkTopicAssignment;
|
||||||
|
@ -194,7 +194,7 @@ public class CurrentAssignmentBuilderTest {
|
||||||
.withCurrentPartitionEpoch((topicId, __) ->
|
.withCurrentPartitionEpoch((topicId, __) ->
|
||||||
topicId2.equals(topicId) ? 10 : -1
|
topicId2.equals(topicId) ? 10 : -1
|
||||||
)
|
)
|
||||||
.withOwnedTopicPartitions(Collections.emptyList())
|
.withOwnedTopicPartitions(List.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -560,7 +560,7 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTopicAssignment(topicId1, 3),
|
mkTopicAssignment(topicId1, 3),
|
||||||
mkTopicAssignment(topicId2, 6))))
|
mkTopicAssignment(topicId2, 6))))
|
||||||
.withCurrentPartitionEpoch((topicId, partitionId) -> 11)
|
.withCurrentPartitionEpoch((topicId, partitionId) -> 11)
|
||||||
.withOwnedTopicPartitions(Collections.emptyList())
|
.withOwnedTopicPartitions(List.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
|
|
@ -25,7 +25,6 @@ import org.apache.kafka.image.MetadataImage;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -117,7 +116,7 @@ public class ShareGroupMemberTest {
|
||||||
|
|
||||||
updatedMember = new ShareGroupMember.Builder(member)
|
updatedMember = new ShareGroupMember.Builder(member)
|
||||||
.maybeUpdateRackId(Optional.of("new-rack-id"))
|
.maybeUpdateRackId(Optional.of("new-rack-id"))
|
||||||
.maybeUpdateSubscribedTopicNames(Optional.of(Collections.singletonList("zar")))
|
.maybeUpdateSubscribedTopicNames(Optional.of(List.of("zar")))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertNull(member.instanceId());
|
assertNull(member.instanceId());
|
||||||
|
@ -180,7 +179,7 @@ public class ShareGroupMemberTest {
|
||||||
.setSubscribedTopicNames(subscribedTopicNames)
|
.setSubscribedTopicNames(subscribedTopicNames)
|
||||||
.setAssignment(
|
.setAssignment(
|
||||||
new ShareGroupDescribeResponseData.Assignment()
|
new ShareGroupDescribeResponseData.Assignment()
|
||||||
.setTopicPartitions(Collections.singletonList(new ShareGroupDescribeResponseData.TopicPartitions()
|
.setTopicPartitions(List.of(new ShareGroupDescribeResponseData.TopicPartitions()
|
||||||
.setTopicId(topicId1)
|
.setTopicId(topicId1)
|
||||||
.setTopicName("topic1")
|
.setTopicName("topic1")
|
||||||
.setPartitions(assignedPartitions)
|
.setPartitions(assignedPartitions)
|
||||||
|
|
|
@ -38,8 +38,10 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.apache.kafka.common.utils.Utils.mkEntry;
|
import static org.apache.kafka.common.utils.Utils.mkEntry;
|
||||||
import static org.apache.kafka.common.utils.Utils.mkMap;
|
import static org.apache.kafka.common.utils.Utils.mkMap;
|
||||||
|
@ -156,20 +158,20 @@ public class ShareGroupTest {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ShareGroupMember member1 = new ShareGroupMember.Builder("member1")
|
ShareGroupMember member1 = new ShareGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build();
|
.build();
|
||||||
ShareGroupMember member2 = new ShareGroupMember.Builder("member2")
|
ShareGroupMember member2 = new ShareGroupMember.Builder("member2")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("bar"))
|
.setSubscribedTopicNames(List.of("bar"))
|
||||||
.build();
|
.build();
|
||||||
ShareGroupMember member3 = new ShareGroupMember.Builder("member3")
|
ShareGroupMember member3 = new ShareGroupMember.Builder("member3")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("zar"))
|
.setSubscribedTopicNames(List.of("zar"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ShareGroup shareGroup = createShareGroup("group-foo");
|
ShareGroup shareGroup = createShareGroup("group-foo");
|
||||||
|
|
||||||
// It should be empty by default.
|
// It should be empty by default.
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
shareGroup.computeSubscriptionMetadata(
|
shareGroup.computeSubscriptionMetadata(
|
||||||
shareGroup.computeSubscribedTopicNames(null, null),
|
shareGroup.computeSubscribedTopicNames(null, null),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
|
@ -206,7 +208,7 @@ public class ShareGroupTest {
|
||||||
|
|
||||||
// Compute while taking into account removal of member 1.
|
// Compute while taking into account removal of member 1.
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
shareGroup.computeSubscriptionMetadata(
|
shareGroup.computeSubscriptionMetadata(
|
||||||
shareGroup.computeSubscribedTopicNames(member1, null),
|
shareGroup.computeSubscribedTopicNames(member1, null),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
|
@ -300,7 +302,7 @@ public class ShareGroupTest {
|
||||||
|
|
||||||
// Compute while taking into account removal of member 1, member 2 and member 3
|
// Compute while taking into account removal of member 1, member 2 and member 3
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
shareGroup.computeSubscriptionMetadata(
|
shareGroup.computeSubscriptionMetadata(
|
||||||
shareGroup.computeSubscribedTopicNames(new HashSet<>(Arrays.asList(member1, member2, member3))),
|
shareGroup.computeSubscribedTopicNames(new HashSet<>(Arrays.asList(member1, member2, member3))),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
|
@ -327,7 +329,7 @@ public class ShareGroupTest {
|
||||||
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
||||||
),
|
),
|
||||||
shareGroup.computeSubscriptionMetadata(
|
shareGroup.computeSubscriptionMetadata(
|
||||||
shareGroup.computeSubscribedTopicNames(Collections.singleton(member1)),
|
shareGroup.computeSubscribedTopicNames(Set.of(member1)),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
image.cluster()
|
image.cluster()
|
||||||
)
|
)
|
||||||
|
@ -341,7 +343,7 @@ public class ShareGroupTest {
|
||||||
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
mkEntry("zar", new TopicMetadata(zarTopicId, "zar", 3))
|
||||||
),
|
),
|
||||||
shareGroup.computeSubscriptionMetadata(
|
shareGroup.computeSubscriptionMetadata(
|
||||||
shareGroup.computeSubscribedTopicNames(Collections.emptySet()),
|
shareGroup.computeSubscribedTopicNames(Set.of()),
|
||||||
image.topics(),
|
image.topics(),
|
||||||
image.cluster()
|
image.cluster()
|
||||||
)
|
)
|
||||||
|
@ -351,7 +353,7 @@ public class ShareGroupTest {
|
||||||
@Test
|
@Test
|
||||||
public void testUpdateSubscribedTopicNamesAndSubscriptionType() {
|
public void testUpdateSubscribedTopicNamesAndSubscriptionType() {
|
||||||
ShareGroupMember member1 = new ShareGroupMember.Builder("member1")
|
ShareGroupMember member1 = new ShareGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build();
|
.build();
|
||||||
ShareGroupMember member2 = new ShareGroupMember.Builder("member2")
|
ShareGroupMember member2 = new ShareGroupMember.Builder("member2")
|
||||||
.setSubscribedTopicNames(Arrays.asList("bar", "foo"))
|
.setSubscribedTopicNames(Arrays.asList("bar", "foo"))
|
||||||
|
@ -364,7 +366,7 @@ public class ShareGroupTest {
|
||||||
|
|
||||||
// It should be empty by default.
|
// It should be empty by default.
|
||||||
assertEquals(
|
assertEquals(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
shareGroup.subscribedTopicNames()
|
shareGroup.subscribedTopicNames()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -424,9 +426,9 @@ public class ShareGroupTest {
|
||||||
String memberId2 = "member2";
|
String memberId2 = "member2";
|
||||||
|
|
||||||
// Initial assignment for member1
|
// Initial assignment for member1
|
||||||
Assignment initialAssignment = new Assignment(Collections.singletonMap(
|
Assignment initialAssignment = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(0))
|
new HashSet<>(List.of(0))
|
||||||
));
|
));
|
||||||
shareGroup.updateTargetAssignment(memberId1, initialAssignment);
|
shareGroup.updateTargetAssignment(memberId1, initialAssignment);
|
||||||
|
|
||||||
|
@ -439,9 +441,9 @@ public class ShareGroupTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
// New assignment for member1
|
// New assignment for member1
|
||||||
Assignment newAssignment = new Assignment(Collections.singletonMap(
|
Assignment newAssignment = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(1))
|
new HashSet<>(List.of(1))
|
||||||
));
|
));
|
||||||
shareGroup.updateTargetAssignment(memberId1, newAssignment);
|
shareGroup.updateTargetAssignment(memberId1, newAssignment);
|
||||||
|
|
||||||
|
@ -454,9 +456,9 @@ public class ShareGroupTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
// New assignment for member2 to add partition 1
|
// New assignment for member2 to add partition 1
|
||||||
Assignment newAssignment2 = new Assignment(Collections.singletonMap(
|
Assignment newAssignment2 = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(1))
|
new HashSet<>(List.of(1))
|
||||||
));
|
));
|
||||||
shareGroup.updateTargetAssignment(memberId2, newAssignment2);
|
shareGroup.updateTargetAssignment(memberId2, newAssignment2);
|
||||||
|
|
||||||
|
@ -469,9 +471,9 @@ public class ShareGroupTest {
|
||||||
);
|
);
|
||||||
|
|
||||||
// New assignment for member1 to revoke partition 1 and assign partition 0
|
// New assignment for member1 to revoke partition 1 and assign partition 0
|
||||||
Assignment newAssignment1 = new Assignment(Collections.singletonMap(
|
Assignment newAssignment1 = new Assignment(Map.of(
|
||||||
topicId,
|
topicId,
|
||||||
new HashSet<>(Collections.singletonList(0))
|
new HashSet<>(List.of(0))
|
||||||
));
|
));
|
||||||
shareGroup.updateTargetAssignment(memberId1, newAssignment1);
|
shareGroup.updateTargetAssignment(memberId1, newAssignment1);
|
||||||
|
|
||||||
|
@ -563,7 +565,7 @@ public class ShareGroupTest {
|
||||||
assertEquals(ShareGroupState.EMPTY, shareGroup.state(0));
|
assertEquals(ShareGroupState.EMPTY, shareGroup.state(0));
|
||||||
assertEquals("Empty", shareGroup.stateAsString(0));
|
assertEquals("Empty", shareGroup.stateAsString(0));
|
||||||
shareGroup.updateMember(new ShareGroupMember.Builder("member1")
|
shareGroup.updateMember(new ShareGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build());
|
.build());
|
||||||
snapshotRegistry.idempotentCreateSnapshot(1);
|
snapshotRegistry.idempotentCreateSnapshot(1);
|
||||||
assertEquals(ShareGroupState.EMPTY, shareGroup.state(0));
|
assertEquals(ShareGroupState.EMPTY, shareGroup.state(0));
|
||||||
|
@ -630,10 +632,10 @@ public class ShareGroupTest {
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ShareGroupMember member1 = new ShareGroupMember.Builder("member1")
|
ShareGroupMember member1 = new ShareGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build();
|
.build();
|
||||||
ShareGroupMember member2 = new ShareGroupMember.Builder("member2")
|
ShareGroupMember member2 = new ShareGroupMember.Builder("member2")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("bar"))
|
.setSubscribedTopicNames(List.of("bar"))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
ShareGroup shareGroup = createShareGroup("group-foo");
|
ShareGroup shareGroup = createShareGroup("group-foo");
|
||||||
|
@ -671,7 +673,7 @@ public class ShareGroupTest {
|
||||||
assertEquals(ShareGroupState.EMPTY.toString(), shareGroup.stateAsString(0));
|
assertEquals(ShareGroupState.EMPTY.toString(), shareGroup.stateAsString(0));
|
||||||
|
|
||||||
shareGroup.updateMember(new ShareGroupMember.Builder("member1")
|
shareGroup.updateMember(new ShareGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build());
|
.build());
|
||||||
shareGroup.updateMember(new ShareGroupMember.Builder("member2")
|
shareGroup.updateMember(new ShareGroupMember.Builder("member2")
|
||||||
.build());
|
.build());
|
||||||
|
@ -686,7 +688,7 @@ public class ShareGroupTest {
|
||||||
.setMembers(Arrays.asList(
|
.setMembers(Arrays.asList(
|
||||||
new ShareGroupDescribeResponseData.Member()
|
new ShareGroupDescribeResponseData.Member()
|
||||||
.setMemberId("member1")
|
.setMemberId("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo")),
|
.setSubscribedTopicNames(List.of("foo")),
|
||||||
new ShareGroupDescribeResponseData.Member().setMemberId("member2")
|
new ShareGroupDescribeResponseData.Member().setMemberId("member2")
|
||||||
));
|
));
|
||||||
ShareGroupDescribeResponseData.DescribedGroup actual = shareGroup.asDescribedGroup(1, "assignorName",
|
ShareGroupDescribeResponseData.DescribedGroup actual = shareGroup.asDescribedGroup(1, "assignorName",
|
||||||
|
@ -700,16 +702,16 @@ public class ShareGroupTest {
|
||||||
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
|
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(new LogContext());
|
||||||
ShareGroup shareGroup = new ShareGroup(snapshotRegistry, "group-foo");
|
ShareGroup shareGroup = new ShareGroup(snapshotRegistry, "group-foo");
|
||||||
snapshotRegistry.idempotentCreateSnapshot(0);
|
snapshotRegistry.idempotentCreateSnapshot(0);
|
||||||
assertTrue(shareGroup.isInStates(Collections.singleton("empty"), 0));
|
assertTrue(shareGroup.isInStates(Set.of("empty"), 0));
|
||||||
assertFalse(shareGroup.isInStates(Collections.singleton("Empty"), 0));
|
assertFalse(shareGroup.isInStates(Set.of("Empty"), 0));
|
||||||
|
|
||||||
shareGroup.updateMember(new ShareGroupMember.Builder("member1")
|
shareGroup.updateMember(new ShareGroupMember.Builder("member1")
|
||||||
.setSubscribedTopicNames(Collections.singletonList("foo"))
|
.setSubscribedTopicNames(List.of("foo"))
|
||||||
.build());
|
.build());
|
||||||
snapshotRegistry.idempotentCreateSnapshot(1);
|
snapshotRegistry.idempotentCreateSnapshot(1);
|
||||||
assertTrue(shareGroup.isInStates(Collections.singleton("empty"), 0));
|
assertTrue(shareGroup.isInStates(Set.of("empty"), 0));
|
||||||
assertTrue(shareGroup.isInStates(Collections.singleton("stable"), 1));
|
assertTrue(shareGroup.isInStates(Set.of("stable"), 1));
|
||||||
assertFalse(shareGroup.isInStates(Collections.singleton("empty"), 1));
|
assertFalse(shareGroup.isInStates(Set.of("empty"), 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
private ShareGroup createShareGroup(String groupId) {
|
private ShareGroup createShareGroup(String groupId) {
|
||||||
|
|
|
@ -24,7 +24,7 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.EnumSource;
|
import org.junit.jupiter.params.provider.EnumSource;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Set;
|
||||||
|
|
||||||
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasks;
|
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasks;
|
||||||
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksTuple;
|
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksTuple;
|
||||||
|
@ -63,8 +63,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
|
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -108,8 +108,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
|
mkTasks(SUBTOPOLOGY_ID2, 3, 4)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -150,8 +150,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7)))
|
mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -191,8 +191,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 4, 5)))
|
mkTasks(SUBTOPOLOGY_ID2, 4, 5)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -235,8 +235,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
.withTargetAssignment(memberEpoch + 1, TasksTuple.EMPTY)
|
.withTargetAssignment(memberEpoch + 1, TasksTuple.EMPTY)
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -278,8 +278,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7)))
|
mkTasks(SUBTOPOLOGY_ID2, 3, 4, 7)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -321,8 +321,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
SUBTOPOLOGY_ID2.equals(subtopologyId) ? PROCESS_ID : null
|
SUBTOPOLOGY_ID2.equals(subtopologyId) ? PROCESS_ID : null
|
||||||
)
|
)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.withOwnedAssignment(mkTasksTuple(taskRole))
|
.withOwnedAssignment(mkTasksTuple(taskRole))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
@ -365,8 +365,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.withOwnedAssignment(mkTasksTuple(taskRole,
|
.withOwnedAssignment(mkTasksTuple(taskRole,
|
||||||
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
|
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
|
||||||
|
@ -412,9 +412,9 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 6)))
|
mkTasks(SUBTOPOLOGY_ID2, 6)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds(
|
.withCurrentWarmupTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet());
|
(subtopologyId, partitionId) -> Set.of());
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
member,
|
member,
|
||||||
|
@ -511,8 +511,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.withOwnedAssignment(mkTasksTuple(taskRole,
|
.withOwnedAssignment(mkTasksTuple(taskRole,
|
||||||
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
|
mkTasks(SUBTOPOLOGY_ID1, 2, 3),
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6))
|
||||||
|
@ -556,9 +556,9 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.singleton(PROCESS_ID))
|
(subtopologyId, partitionId) -> Set.of(PROCESS_ID))
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) ->
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) ->
|
||||||
Collections.singleton(PROCESS_ID))
|
Set.of(PROCESS_ID))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -598,8 +598,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -639,9 +639,9 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
|
mkTasks(SUBTOPOLOGY_ID2, 5, 6, 7)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.singleton(PROCESS_ID))
|
(subtopologyId, partitionId) -> Set.of(PROCESS_ID))
|
||||||
.withCurrentWarmupTaskProcessIds(
|
.withCurrentWarmupTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.singleton(PROCESS_ID))
|
(subtopologyId, partitionId) -> Set.of(PROCESS_ID))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(member, updatedMember);
|
assertEquals(member, updatedMember);
|
||||||
|
@ -671,10 +671,10 @@ public class CurrentAssignmentBuilderTest {
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY)
|
(subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY)
|
||||||
? Collections.emptySet() : Collections.singleton(PROCESS_ID))
|
? Set.of() : Set.of(PROCESS_ID))
|
||||||
.withCurrentWarmupTaskProcessIds(
|
.withCurrentWarmupTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY)
|
(subtopologyId, partitionId) -> (taskRole == TaskRole.STANDBY)
|
||||||
? Collections.singleton(PROCESS_ID) : Collections.emptySet())
|
? Set.of(PROCESS_ID) : Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(member, updatedMember);
|
assertEquals(member, updatedMember);
|
||||||
|
@ -716,9 +716,9 @@ public class CurrentAssignmentBuilderTest {
|
||||||
(subtopologyId.equals(SUBTOPOLOGY_ID1) && partitionId == 4) ? "anyOtherProcess"
|
(subtopologyId.equals(SUBTOPOLOGY_ID1) && partitionId == 4) ? "anyOtherProcess"
|
||||||
: null)
|
: null)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds(
|
.withCurrentWarmupTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(expectedMember, updatedMember);
|
assertEquals(expectedMember, updatedMember);
|
||||||
|
@ -748,8 +748,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 6)))
|
mkTasks(SUBTOPOLOGY_ID2, 6)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
assertEquals(
|
assertEquals(
|
||||||
|
@ -803,8 +803,8 @@ public class CurrentAssignmentBuilderTest {
|
||||||
mkTasks(SUBTOPOLOGY_ID2, 6)))
|
mkTasks(SUBTOPOLOGY_ID2, 6)))
|
||||||
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
.withCurrentActiveTaskProcessId((subtopologyId, partitionId) -> PROCESS_ID)
|
||||||
.withCurrentStandbyTaskProcessIds(
|
.withCurrentStandbyTaskProcessIds(
|
||||||
(subtopologyId, partitionId) -> Collections.emptySet())
|
(subtopologyId, partitionId) -> Set.of())
|
||||||
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Collections.emptySet())
|
.withCurrentWarmupTaskProcessIds((subtopologyId, partitionId) -> Set.of())
|
||||||
.withOwnedAssignment(mkTasksTuple(taskRole))
|
.withOwnedAssignment(mkTasksTuple(taskRole))
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
|
|
@ -155,7 +155,7 @@ public class StreamsGroupMemberTest {
|
||||||
.setUserEndpoint(USER_ENDPOINT)
|
.setUserEndpoint(USER_ENDPOINT)
|
||||||
.setClientTags(CLIENT_TAGS.entrySet().stream()
|
.setClientTags(CLIENT_TAGS.entrySet().stream()
|
||||||
.map(e -> new KeyValue().setKey(e.getKey()).setValue(e.getValue()))
|
.map(e -> new KeyValue().setKey(e.getKey()).setValue(e.getValue()))
|
||||||
.collect(Collectors.toList()));
|
.toList());
|
||||||
|
|
||||||
StreamsGroupMember member = new StreamsGroupMember.Builder("member-id")
|
StreamsGroupMember member = new StreamsGroupMember.Builder("member-id")
|
||||||
.updateWith(record)
|
.updateWith(record)
|
||||||
|
|
|
@ -68,7 +68,6 @@ import java.util.OptionalLong;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasks;
|
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasks;
|
||||||
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksPerSubtopology;
|
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksPerSubtopology;
|
||||||
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksTuple;
|
import static org.apache.kafka.coordinator.group.streams.TaskAssignmentTestUtil.mkTasksTuple;
|
||||||
|
@ -221,19 +220,19 @@ public class StreamsGroupTest {
|
||||||
streamsGroup.updateMember(member);
|
streamsGroup.updateMember(member);
|
||||||
|
|
||||||
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
|
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
|
||||||
assertEquals(Collections.singleton("process"),
|
assertEquals(Set.of("process"),
|
||||||
streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
|
streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
|
||||||
assertEquals(Collections.singleton("process"),
|
assertEquals(Set.of("process"),
|
||||||
streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
|
streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
|
||||||
assertEquals("process", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
|
assertEquals("process", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
|
||||||
assertEquals(Collections.singleton("process"),
|
assertEquals(Set.of("process"),
|
||||||
streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
|
streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
|
||||||
assertEquals(Collections.singleton("process"),
|
assertEquals(Set.of("process"),
|
||||||
streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
|
streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
|
||||||
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
||||||
assertEquals(Collections.emptySet(),
|
assertEquals(Set.of(),
|
||||||
streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
||||||
assertEquals(Collections.emptySet(),
|
assertEquals(Set.of(),
|
||||||
streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
||||||
|
|
||||||
member = new StreamsGroupMember.Builder(member)
|
member = new StreamsGroupMember.Builder(member)
|
||||||
|
@ -257,19 +256,19 @@ public class StreamsGroupTest {
|
||||||
streamsGroup.updateMember(member);
|
streamsGroup.updateMember(member);
|
||||||
|
|
||||||
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
|
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
|
||||||
assertEquals(Collections.singleton("process1"),
|
assertEquals(Set.of("process1"),
|
||||||
streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
|
streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
|
||||||
assertEquals(Collections.singleton("process1"),
|
assertEquals(Set.of("process1"),
|
||||||
streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
|
streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
|
||||||
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
|
assertEquals("process1", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
|
||||||
assertEquals(Collections.singleton("process1"),
|
assertEquals(Set.of("process1"),
|
||||||
streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
|
streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
|
||||||
assertEquals(Collections.singleton("process1"),
|
assertEquals(Set.of("process1"),
|
||||||
streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
|
streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
|
||||||
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
||||||
assertEquals(Collections.emptySet(),
|
assertEquals(Set.of(),
|
||||||
streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
||||||
assertEquals(Collections.emptySet(),
|
assertEquals(Set.of(),
|
||||||
streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -284,9 +283,9 @@ public class StreamsGroupTest {
|
||||||
.setProcessId("process")
|
.setProcessId("process")
|
||||||
.setAssignedTasks(
|
.setAssignedTasks(
|
||||||
new TasksTuple(
|
new TasksTuple(
|
||||||
emptyMap(),
|
Map.of(),
|
||||||
emptyMap(),
|
Map.of(),
|
||||||
emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.setTasksPendingRevocation(
|
.setTasksPendingRevocation(
|
||||||
|
@ -329,8 +328,8 @@ public class StreamsGroupTest {
|
||||||
.setAssignedTasks(
|
.setAssignedTasks(
|
||||||
new TasksTuple(
|
new TasksTuple(
|
||||||
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 1)),
|
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 1)),
|
||||||
emptyMap(),
|
Map.of(),
|
||||||
emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
@ -342,8 +341,8 @@ public class StreamsGroupTest {
|
||||||
.setAssignedTasks(
|
.setAssignedTasks(
|
||||||
new TasksTuple(
|
new TasksTuple(
|
||||||
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 1)),
|
mkTasksPerSubtopology(mkTasks(fooSubtopologyId, 1)),
|
||||||
emptyMap(),
|
Map.of(),
|
||||||
emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
.build();
|
.build();
|
||||||
|
@ -436,26 +435,26 @@ public class StreamsGroupTest {
|
||||||
streamsGroup.updateMember(member);
|
streamsGroup.updateMember(member);
|
||||||
|
|
||||||
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
|
assertEquals("process", streamsGroup.currentActiveTaskProcessId(fooSubtopology, 1));
|
||||||
assertEquals(Collections.singleton("process"), streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
|
assertEquals(Set.of("process"), streamsGroup.currentStandbyTaskProcessIds(fooSubtopology, 2));
|
||||||
assertEquals(Collections.singleton("process"), streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
|
assertEquals(Set.of("process"), streamsGroup.currentWarmupTaskProcessIds(fooSubtopology, 3));
|
||||||
assertEquals("process", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
|
assertEquals("process", streamsGroup.currentActiveTaskProcessId(barSubtopology, 4));
|
||||||
assertEquals(Collections.singleton("process"), streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
|
assertEquals(Set.of("process"), streamsGroup.currentStandbyTaskProcessIds(barSubtopology, 5));
|
||||||
assertEquals(Collections.singleton("process"), streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
|
assertEquals(Set.of("process"), streamsGroup.currentWarmupTaskProcessIds(barSubtopology, 6));
|
||||||
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
||||||
|
|
||||||
streamsGroup.removeMember(member.memberId());
|
streamsGroup.removeMember(member.memberId());
|
||||||
|
|
||||||
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 1));
|
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 1));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 2));
|
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 2));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 3));
|
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 3));
|
||||||
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 3));
|
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 3));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 4));
|
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 4));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 5));
|
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 5));
|
||||||
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
assertNull(streamsGroup.currentActiveTaskProcessId(zarSubtopology, 7));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
assertEquals(Set.of(), streamsGroup.currentStandbyTaskProcessIds(zarSubtopology, 8));
|
||||||
assertEquals(Collections.emptySet(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
assertEquals(Set.of(), streamsGroup.currentWarmupTaskProcessIds(zarSubtopology, 9));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -475,7 +474,7 @@ public class StreamsGroupTest {
|
||||||
assertEquals(MemberState.STABLE, member1.state());
|
assertEquals(MemberState.STABLE, member1.state());
|
||||||
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY, streamsGroup.state());
|
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY, streamsGroup.state());
|
||||||
|
|
||||||
streamsGroup.setTopology(new StreamsTopology(1, Collections.emptyMap()));
|
streamsGroup.setTopology(new StreamsTopology(1, Map.of()));
|
||||||
|
|
||||||
assertEquals(MemberState.STABLE, member1.state());
|
assertEquals(MemberState.STABLE, member1.state());
|
||||||
assertEquals(StreamsGroup.StreamsGroupState.ASSIGNING, streamsGroup.state());
|
assertEquals(StreamsGroup.StreamsGroupState.ASSIGNING, streamsGroup.state());
|
||||||
|
@ -672,7 +671,7 @@ public class StreamsGroupTest {
|
||||||
mock(GroupCoordinatorMetricsShard.class)
|
mock(GroupCoordinatorMetricsShard.class)
|
||||||
);
|
);
|
||||||
group.setGroupEpoch(1);
|
group.setGroupEpoch(1);
|
||||||
group.setTopology(new StreamsTopology(1, Collections.emptyMap()));
|
group.setTopology(new StreamsTopology(1, Map.of()));
|
||||||
group.setTargetAssignmentEpoch(1);
|
group.setTargetAssignmentEpoch(1);
|
||||||
group.updateMember(new StreamsGroupMember.Builder("member1")
|
group.updateMember(new StreamsGroupMember.Builder("member1")
|
||||||
.setMemberEpoch(1)
|
.setMemberEpoch(1)
|
||||||
|
@ -737,7 +736,7 @@ public class StreamsGroupTest {
|
||||||
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY, streamsGroup.state());
|
assertEquals(StreamsGroup.StreamsGroupState.NOT_READY, streamsGroup.state());
|
||||||
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
|
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
|
||||||
|
|
||||||
streamsGroup.setTopology(new StreamsTopology(1, Collections.emptyMap()));
|
streamsGroup.setTopology(new StreamsTopology(1, Map.of()));
|
||||||
|
|
||||||
assertEquals(StreamsGroup.StreamsGroupState.RECONCILING, streamsGroup.state());
|
assertEquals(StreamsGroup.StreamsGroupState.RECONCILING, streamsGroup.state());
|
||||||
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
|
assertThrows(GroupNotEmptyException.class, streamsGroup::validateDeleteGroup);
|
||||||
|
@ -781,7 +780,7 @@ public class StreamsGroupTest {
|
||||||
assertEquals(StreamsGroup.StreamsGroupState.EMPTY.toString(), group.stateAsString(0));
|
assertEquals(StreamsGroup.StreamsGroupState.EMPTY.toString(), group.stateAsString(0));
|
||||||
|
|
||||||
group.setGroupEpoch(1);
|
group.setGroupEpoch(1);
|
||||||
group.setTopology(new StreamsTopology(1, Collections.emptyMap()));
|
group.setTopology(new StreamsTopology(1, Map.of()));
|
||||||
group.setTargetAssignmentEpoch(1);
|
group.setTargetAssignmentEpoch(1);
|
||||||
group.updateMember(new StreamsGroupMember.Builder("member1")
|
group.updateMember(new StreamsGroupMember.Builder("member1")
|
||||||
.setMemberEpoch(1)
|
.setMemberEpoch(1)
|
||||||
|
@ -795,9 +794,9 @@ public class StreamsGroupTest {
|
||||||
.setTopologyEpoch(1)
|
.setTopologyEpoch(1)
|
||||||
.setProcessId("process1")
|
.setProcessId("process1")
|
||||||
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host1").setPort(9092))
|
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host1").setPort(9092))
|
||||||
.setClientTags(Collections.singletonMap("tag1", "value1"))
|
.setClientTags(Map.of("tag1", "value1"))
|
||||||
.setAssignedTasks(new TasksTuple(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))
|
.setAssignedTasks(new TasksTuple(Map.of(), Map.of(), Map.of()))
|
||||||
.setTasksPendingRevocation(new TasksTuple(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))
|
.setTasksPendingRevocation(new TasksTuple(Map.of(), Map.of(), Map.of()))
|
||||||
.build());
|
.build());
|
||||||
group.updateMember(new StreamsGroupMember.Builder("member2")
|
group.updateMember(new StreamsGroupMember.Builder("member2")
|
||||||
.setMemberEpoch(1)
|
.setMemberEpoch(1)
|
||||||
|
@ -811,9 +810,9 @@ public class StreamsGroupTest {
|
||||||
.setTopologyEpoch(1)
|
.setTopologyEpoch(1)
|
||||||
.setProcessId("process2")
|
.setProcessId("process2")
|
||||||
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host2").setPort(9092))
|
.setUserEndpoint(new StreamsGroupMemberMetadataValue.Endpoint().setHost("host2").setPort(9092))
|
||||||
.setClientTags(Collections.singletonMap("tag2", "value2"))
|
.setClientTags(Map.of("tag2", "value2"))
|
||||||
.setAssignedTasks(new TasksTuple(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))
|
.setAssignedTasks(new TasksTuple(Map.of(), Map.of(), Map.of()))
|
||||||
.setTasksPendingRevocation(new TasksTuple(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap()))
|
.setTasksPendingRevocation(new TasksTuple(Map.of(), Map.of(), Map.of()))
|
||||||
.build());
|
.build());
|
||||||
snapshotRegistry.idempotentCreateSnapshot(1);
|
snapshotRegistry.idempotentCreateSnapshot(1);
|
||||||
|
|
||||||
|
@ -821,7 +820,7 @@ public class StreamsGroupTest {
|
||||||
.setGroupId("group-id-1")
|
.setGroupId("group-id-1")
|
||||||
.setGroupState(StreamsGroup.StreamsGroupState.STABLE.toString())
|
.setGroupState(StreamsGroup.StreamsGroupState.STABLE.toString())
|
||||||
.setGroupEpoch(1)
|
.setGroupEpoch(1)
|
||||||
.setTopology(new StreamsGroupDescribeResponseData.Topology().setEpoch(1).setSubtopologies(Collections.emptyList()))
|
.setTopology(new StreamsGroupDescribeResponseData.Topology().setEpoch(1).setSubtopologies(List.of()))
|
||||||
.setAssignmentEpoch(1)
|
.setAssignmentEpoch(1)
|
||||||
.setMembers(Arrays.asList(
|
.setMembers(Arrays.asList(
|
||||||
new StreamsGroupDescribeResponseData.Member()
|
new StreamsGroupDescribeResponseData.Member()
|
||||||
|
@ -834,7 +833,7 @@ public class StreamsGroupTest {
|
||||||
.setTopologyEpoch(1)
|
.setTopologyEpoch(1)
|
||||||
.setProcessId("process1")
|
.setProcessId("process1")
|
||||||
.setUserEndpoint(new StreamsGroupDescribeResponseData.Endpoint().setHost("host1").setPort(9092))
|
.setUserEndpoint(new StreamsGroupDescribeResponseData.Endpoint().setHost("host1").setPort(9092))
|
||||||
.setClientTags(Collections.singletonList(new StreamsGroupDescribeResponseData.KeyValue().setKey("tag1").setValue("value1")))
|
.setClientTags(List.of(new StreamsGroupDescribeResponseData.KeyValue().setKey("tag1").setValue("value1")))
|
||||||
.setAssignment(new StreamsGroupDescribeResponseData.Assignment())
|
.setAssignment(new StreamsGroupDescribeResponseData.Assignment())
|
||||||
.setTargetAssignment(new StreamsGroupDescribeResponseData.Assignment()),
|
.setTargetAssignment(new StreamsGroupDescribeResponseData.Assignment()),
|
||||||
new StreamsGroupDescribeResponseData.Member()
|
new StreamsGroupDescribeResponseData.Member()
|
||||||
|
@ -847,7 +846,7 @@ public class StreamsGroupTest {
|
||||||
.setTopologyEpoch(1)
|
.setTopologyEpoch(1)
|
||||||
.setProcessId("process2")
|
.setProcessId("process2")
|
||||||
.setUserEndpoint(new StreamsGroupDescribeResponseData.Endpoint().setHost("host2").setPort(9092))
|
.setUserEndpoint(new StreamsGroupDescribeResponseData.Endpoint().setHost("host2").setPort(9092))
|
||||||
.setClientTags(Collections.singletonList(new StreamsGroupDescribeResponseData.KeyValue().setKey("tag2").setValue("value2")))
|
.setClientTags(List.of(new StreamsGroupDescribeResponseData.KeyValue().setKey("tag2").setValue("value2")))
|
||||||
.setAssignment(new StreamsGroupDescribeResponseData.Assignment())
|
.setAssignment(new StreamsGroupDescribeResponseData.Assignment())
|
||||||
.setTargetAssignment(new StreamsGroupDescribeResponseData.Assignment())
|
.setTargetAssignment(new StreamsGroupDescribeResponseData.Assignment())
|
||||||
));
|
));
|
||||||
|
@ -861,20 +860,20 @@ public class StreamsGroupTest {
|
||||||
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
|
SnapshotRegistry snapshotRegistry = new SnapshotRegistry(LOG_CONTEXT);
|
||||||
GroupCoordinatorMetricsShard metricsShard = new GroupCoordinatorMetricsShard(
|
GroupCoordinatorMetricsShard metricsShard = new GroupCoordinatorMetricsShard(
|
||||||
snapshotRegistry,
|
snapshotRegistry,
|
||||||
emptyMap(),
|
Map.of(),
|
||||||
new TopicPartition("__consumer_offsets", 0)
|
new TopicPartition("__consumer_offsets", 0)
|
||||||
);
|
);
|
||||||
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-foo", metricsShard);
|
StreamsGroup group = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "group-foo", metricsShard);
|
||||||
snapshotRegistry.idempotentCreateSnapshot(0);
|
snapshotRegistry.idempotentCreateSnapshot(0);
|
||||||
assertTrue(group.isInStates(Collections.singleton("empty"), 0));
|
assertTrue(group.isInStates(Set.of("empty"), 0));
|
||||||
assertFalse(group.isInStates(Collections.singleton("Empty"), 0));
|
assertFalse(group.isInStates(Set.of("Empty"), 0));
|
||||||
|
|
||||||
group.updateMember(new StreamsGroupMember.Builder("member1")
|
group.updateMember(new StreamsGroupMember.Builder("member1")
|
||||||
.build());
|
.build());
|
||||||
snapshotRegistry.idempotentCreateSnapshot(1);
|
snapshotRegistry.idempotentCreateSnapshot(1);
|
||||||
assertTrue(group.isInStates(Collections.singleton("empty"), 0));
|
assertTrue(group.isInStates(Set.of("empty"), 0));
|
||||||
assertTrue(group.isInStates(Collections.singleton("not_ready"), 1));
|
assertTrue(group.isInStates(Set.of("not_ready"), 1));
|
||||||
assertFalse(group.isInStates(Collections.singleton("empty"), 1));
|
assertFalse(group.isInStates(Set.of("empty"), 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -883,7 +882,7 @@ public class StreamsGroupTest {
|
||||||
GroupCoordinatorMetricsShard metricsShard = mock(GroupCoordinatorMetricsShard.class);
|
GroupCoordinatorMetricsShard metricsShard = mock(GroupCoordinatorMetricsShard.class);
|
||||||
StreamsGroup streamsGroup = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "test-group", metricsShard);
|
StreamsGroup streamsGroup = new StreamsGroup(LOG_CONTEXT, snapshotRegistry, "test-group", metricsShard);
|
||||||
|
|
||||||
StreamsTopology topology = new StreamsTopology(1, Collections.emptyMap());
|
StreamsTopology topology = new StreamsTopology(1, Map.of());
|
||||||
|
|
||||||
ConfiguredTopology topo = mock(ConfiguredTopology.class);
|
ConfiguredTopology topo = mock(ConfiguredTopology.class);
|
||||||
when(topo.isReady()).thenReturn(true);
|
when(topo.isReady()).thenReturn(true);
|
||||||
|
@ -925,7 +924,7 @@ public class StreamsGroupTest {
|
||||||
assertTrue(streamsGroup.configuredTopology().isEmpty(), "Configured topology should not be present");
|
assertTrue(streamsGroup.configuredTopology().isEmpty(), "Configured topology should not be present");
|
||||||
assertEquals(partitionMetadata, streamsGroup.partitionMetadata());
|
assertEquals(partitionMetadata, streamsGroup.partitionMetadata());
|
||||||
|
|
||||||
StreamsTopology topology = new StreamsTopology(1, Collections.emptyMap());
|
StreamsTopology topology = new StreamsTopology(1, Map.of());
|
||||||
ConfiguredTopology topo = mock(ConfiguredTopology.class);
|
ConfiguredTopology topo = mock(ConfiguredTopology.class);
|
||||||
when(topo.isReady()).thenReturn(true);
|
when(topo.isReady()).thenReturn(true);
|
||||||
try (MockedStatic<InternalTopicManager> mocked = mockStatic(InternalTopicManager.class)) {
|
try (MockedStatic<InternalTopicManager> mocked = mockStatic(InternalTopicManager.class)) {
|
||||||
|
@ -955,7 +954,7 @@ public class StreamsGroupTest {
|
||||||
assertTrue(streamsGroup.configuredTopology().isEmpty(), "Configured topology should not be present");
|
assertTrue(streamsGroup.configuredTopology().isEmpty(), "Configured topology should not be present");
|
||||||
assertEquals(partitionMetadata, streamsGroup.partitionMetadata());
|
assertEquals(partitionMetadata, streamsGroup.partitionMetadata());
|
||||||
|
|
||||||
StreamsTopology topology = new StreamsTopology(1, Collections.emptyMap());
|
StreamsTopology topology = new StreamsTopology(1, Map.of());
|
||||||
streamsGroup.setTopology(topology);
|
streamsGroup.setTopology(topology);
|
||||||
ConfiguredTopology topo = mock(ConfiguredTopology.class);
|
ConfiguredTopology topo = mock(ConfiguredTopology.class);
|
||||||
when(topo.isReady()).thenReturn(true);
|
when(topo.isReady()).thenReturn(true);
|
||||||
|
@ -995,7 +994,7 @@ public class StreamsGroupTest {
|
||||||
when(topicImage.partitions()).thenReturn(Collections.singletonMap(0, null));
|
when(topicImage.partitions()).thenReturn(Collections.singletonMap(0, null));
|
||||||
when(topicsImage.getTopic("topic1")).thenReturn(topicImage);
|
when(topicsImage.getTopic("topic1")).thenReturn(topicImage);
|
||||||
StreamsTopology topology = mock(StreamsTopology.class);
|
StreamsTopology topology = mock(StreamsTopology.class);
|
||||||
when(topology.requiredTopics()).thenReturn(Collections.singleton("topic1"));
|
when(topology.requiredTopics()).thenReturn(Set.of("topic1"));
|
||||||
|
|
||||||
Map<String, TopicMetadata> partitionMetadata = streamsGroup.computePartitionMetadata(topicsImage, topology);
|
Map<String, TopicMetadata> partitionMetadata = streamsGroup.computePartitionMetadata(topicsImage, topology);
|
||||||
|
|
||||||
|
|
|
@ -33,7 +33,6 @@ import org.junit.jupiter.api.Test;
|
||||||
import org.junit.jupiter.params.ParameterizedTest;
|
import org.junit.jupiter.params.ParameterizedTest;
|
||||||
import org.junit.jupiter.params.provider.EnumSource;
|
import org.junit.jupiter.params.provider.EnumSource;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -74,12 +73,12 @@ public class TargetAssignmentBuilderTest {
|
||||||
|
|
||||||
TargetAssignmentBuilder.TargetAssignmentResult result = builder.build();
|
TargetAssignmentBuilder.TargetAssignmentResult result = builder.build();
|
||||||
|
|
||||||
List<CoordinatorRecord> expectedRecords = Collections.singletonList(
|
List<CoordinatorRecord> expectedRecords = List.of(
|
||||||
StreamsCoordinatorRecordHelpers.newStreamsGroupTargetAssignmentEpochRecord(groupId, groupEpoch)
|
StreamsCoordinatorRecordHelpers.newStreamsGroupTargetAssignmentEpochRecord(groupId, groupEpoch)
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(expectedRecords, result.records());
|
assertEquals(expectedRecords, result.records());
|
||||||
assertEquals(Collections.emptyMap(), result.targetAssignment());
|
assertEquals(Map.of(), result.targetAssignment());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -42,9 +41,9 @@ public class TasksTupleTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testTasksCannotBeNull() {
|
public void testTasksCannotBeNull() {
|
||||||
assertThrows(NullPointerException.class, () -> new TasksTuple(null, Collections.emptyMap(), Collections.emptyMap()));
|
assertThrows(NullPointerException.class, () -> new TasksTuple(null, Map.of(), Map.of()));
|
||||||
assertThrows(NullPointerException.class, () -> new TasksTuple(Collections.emptyMap(), null, Collections.emptyMap()));
|
assertThrows(NullPointerException.class, () -> new TasksTuple(Map.of(), null, Map.of()));
|
||||||
assertThrows(NullPointerException.class, () -> new TasksTuple(Collections.emptyMap(), Collections.emptyMap(), null));
|
assertThrows(NullPointerException.class, () -> new TasksTuple(Map.of(), Map.of(), null));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -61,11 +60,11 @@ public class TasksTupleTest {
|
||||||
TasksTuple tuple = new TasksTuple(activeTasks, standbyTasks, warmupTasks);
|
TasksTuple tuple = new TasksTuple(activeTasks, standbyTasks, warmupTasks);
|
||||||
|
|
||||||
assertEquals(activeTasks, tuple.activeTasks());
|
assertEquals(activeTasks, tuple.activeTasks());
|
||||||
assertThrows(UnsupportedOperationException.class, () -> tuple.activeTasks().put("not allowed", Collections.emptySet()));
|
assertThrows(UnsupportedOperationException.class, () -> tuple.activeTasks().put("not allowed", Set.of()));
|
||||||
assertEquals(standbyTasks, tuple.standbyTasks());
|
assertEquals(standbyTasks, tuple.standbyTasks());
|
||||||
assertThrows(UnsupportedOperationException.class, () -> tuple.standbyTasks().put("not allowed", Collections.emptySet()));
|
assertThrows(UnsupportedOperationException.class, () -> tuple.standbyTasks().put("not allowed", Set.of()));
|
||||||
assertEquals(warmupTasks, tuple.warmupTasks());
|
assertEquals(warmupTasks, tuple.warmupTasks());
|
||||||
assertThrows(UnsupportedOperationException.class, () -> tuple.warmupTasks().put("not allowed", Collections.emptySet()));
|
assertThrows(UnsupportedOperationException.class, () -> tuple.warmupTasks().put("not allowed", Set.of()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -19,7 +19,6 @@ package org.apache.kafka.coordinator.group.streams.assignor;
|
||||||
import org.junit.jupiter.api.BeforeEach;
|
import org.junit.jupiter.api.BeforeEach;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -39,13 +38,13 @@ public class GroupSpecImplTest {
|
||||||
members.put("test-member", new AssignmentMemberSpec(
|
members.put("test-member", new AssignmentMemberSpec(
|
||||||
Optional.of("test-instance"),
|
Optional.of("test-instance"),
|
||||||
Optional.of("test-rack"),
|
Optional.of("test-rack"),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
));
|
));
|
||||||
|
|
||||||
groupSpec = new GroupSpecImpl(
|
groupSpec = new GroupSpecImpl(
|
||||||
|
|
|
@ -18,7 +18,6 @@ package org.apache.kafka.coordinator.group.streams.assignor;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -43,10 +42,10 @@ public class MockAssignorTest {
|
||||||
|
|
||||||
TaskAssignorException ex = assertThrows(TaskAssignorException.class, () -> assignor.assign(
|
TaskAssignorException ex = assertThrows(TaskAssignorException.class, () -> assignor.assign(
|
||||||
new GroupSpecImpl(
|
new GroupSpecImpl(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
new HashMap<>()
|
new HashMap<>()
|
||||||
),
|
),
|
||||||
new TopologyDescriberImpl(5, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(5, List.of("test-subtopology"))
|
||||||
));
|
));
|
||||||
|
|
||||||
assertEquals("No member available to assign task 0 of subtopology test-subtopology", ex.getMessage());
|
assertEquals("No member available to assign task 0 of subtopology test-subtopology", ex.getMessage());
|
||||||
|
@ -58,25 +57,25 @@ public class MockAssignorTest {
|
||||||
final AssignmentMemberSpec memberSpec1 = new AssignmentMemberSpec(
|
final AssignmentMemberSpec memberSpec1 = new AssignmentMemberSpec(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singletonMap("test-subtopology", new HashSet<>(List.of(0))),
|
Map.of("test-subtopology", new HashSet<>(List.of(0))),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
final AssignmentMemberSpec memberSpec2 = new AssignmentMemberSpec(
|
final AssignmentMemberSpec memberSpec2 = new AssignmentMemberSpec(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.singletonMap("test-subtopology", new HashSet<>(List.of(0))),
|
Map.of("test-subtopology", new HashSet<>(List.of(0))),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
TaskAssignorException ex = assertThrows(TaskAssignorException.class, () -> assignor.assign(
|
TaskAssignorException ex = assertThrows(TaskAssignorException.class, () -> assignor.assign(
|
||||||
|
@ -84,7 +83,7 @@ public class MockAssignorTest {
|
||||||
Map.of("member1", memberSpec1, "member2", memberSpec2),
|
Map.of("member1", memberSpec1, "member2", memberSpec2),
|
||||||
new HashMap<>()
|
new HashMap<>()
|
||||||
),
|
),
|
||||||
new TopologyDescriberImpl(5, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(5, List.of("test-subtopology"))
|
||||||
));
|
));
|
||||||
|
|
||||||
assertEquals("Task 0 of subtopology test-subtopology is assigned to multiple members", ex.getMessage());
|
assertEquals("Task 0 of subtopology test-subtopology is assigned to multiple members", ex.getMessage());
|
||||||
|
@ -95,10 +94,10 @@ public class MockAssignorTest {
|
||||||
|
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(
|
new GroupSpecImpl(
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
new HashMap<>()
|
new HashMap<>()
|
||||||
),
|
),
|
||||||
new TopologyDescriberImpl(5, Collections.emptyList())
|
new TopologyDescriberImpl(5, List.of())
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(0, result.members().size());
|
assertEquals(0, result.members().size());
|
||||||
|
@ -111,18 +110,18 @@ public class MockAssignorTest {
|
||||||
final AssignmentMemberSpec memberSpec = new AssignmentMemberSpec(
|
final AssignmentMemberSpec memberSpec = new AssignmentMemberSpec(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(
|
new GroupSpecImpl(
|
||||||
Collections.singletonMap("test_member", memberSpec),
|
Map.of("test_member", memberSpec),
|
||||||
new HashMap<>()
|
new HashMap<>()
|
||||||
),
|
),
|
||||||
new TopologyDescriberImpl(4, List.of("test-subtopology"))
|
new TopologyDescriberImpl(4, List.of("test-subtopology"))
|
||||||
|
@ -143,25 +142,25 @@ public class MockAssignorTest {
|
||||||
final AssignmentMemberSpec memberSpec1 = new AssignmentMemberSpec(
|
final AssignmentMemberSpec memberSpec1 = new AssignmentMemberSpec(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
final AssignmentMemberSpec memberSpec2 = new AssignmentMemberSpec(
|
final AssignmentMemberSpec memberSpec2 = new AssignmentMemberSpec(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
|
@ -200,12 +199,12 @@ public class MockAssignorTest {
|
||||||
mkEntry("test-subtopology1", new HashSet<>(List.of(0, 2, 3))),
|
mkEntry("test-subtopology1", new HashSet<>(List.of(0, 2, 3))),
|
||||||
mkEntry("test-subtopology2", new HashSet<>(List.of(0)))
|
mkEntry("test-subtopology2", new HashSet<>(List.of(0)))
|
||||||
),
|
),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
|
|
||||||
final AssignmentMemberSpec memberSpec2 = new AssignmentMemberSpec(
|
final AssignmentMemberSpec memberSpec2 = new AssignmentMemberSpec(
|
||||||
|
@ -215,12 +214,12 @@ public class MockAssignorTest {
|
||||||
mkEntry("test-subtopology1", new HashSet<>(List.of(1))),
|
mkEntry("test-subtopology1", new HashSet<>(List.of(1))),
|
||||||
mkEntry("test-subtopology2", new HashSet<>(List.of(3)))
|
mkEntry("test-subtopology2", new HashSet<>(List.of(3)))
|
||||||
),
|
),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
"test-process",
|
"test-process",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
);
|
);
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(
|
new GroupSpecImpl(
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.mockito.internal.util.collections.Sets;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -59,7 +58,7 @@ public class StickyTaskAssignorTest {
|
||||||
mkMap(mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3)),
|
mkMap(mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3)),
|
||||||
new HashMap<>()
|
new HashMap<>()
|
||||||
),
|
),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(3, result.members().size());
|
assertEquals(3, result.members().size());
|
||||||
|
@ -132,13 +131,13 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldNotMigrateActiveTaskToOtherProcess() {
|
public void shouldNotMigrateActiveTaskToOtherProcess() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Collections.singleton(0))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Set.of(0))), Map.of());
|
||||||
AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
Map<String, AssignmentMemberSpec> members = mkMap(mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
|
@ -151,13 +150,13 @@ public class StickyTaskAssignorTest {
|
||||||
testMember1.activeTasks().get("test-subtopology").size() + testMember2.activeTasks().get("test-subtopology").size());
|
testMember1.activeTasks().get("test-subtopology").size() + testMember2.activeTasks().get("test-subtopology").size());
|
||||||
|
|
||||||
// flip the previous active tasks assignment around.
|
// flip the previous active tasks assignment around.
|
||||||
memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Collections.singleton(2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Set.of(2))), Map.of());
|
||||||
members = mkMap(mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
members = mkMap(mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
||||||
|
|
||||||
result = assignor.assign(
|
result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
testMember2 = result.members().get("member2");
|
testMember2 = result.members().get("member2");
|
||||||
|
@ -172,15 +171,15 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldMigrateActiveTasksToNewProcessWithoutChangingAllAssignments() {
|
public void shouldMigrateActiveTasksToNewProcessWithoutChangingAllAssignments() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 2))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3");
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3");
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
|
@ -208,7 +207,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
|
@ -227,7 +226,7 @@ public class StickyTaskAssignorTest {
|
||||||
final Map<String, Set<Integer>> activeTasks = mkMap(
|
final Map<String, Set<Integer>> activeTasks = mkMap(
|
||||||
mkEntry("test-subtopology1", Sets.newSet(0, 1, 2, 3, 4, 5)),
|
mkEntry("test-subtopology1", Sets.newSet(0, 1, 2, 3, 4, 5)),
|
||||||
mkEntry("test-subtopology2", Sets.newSet(0)));
|
mkEntry("test-subtopology2", Sets.newSet(0)));
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", activeTasks, Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", activeTasks, Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
||||||
|
@ -254,9 +253,9 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldKeepActiveTaskStickinessWhenMoreClientThanActiveTasks() {
|
public void shouldKeepActiveTaskStickinessWhenMoreClientThanActiveTasks() {
|
||||||
AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Collections.singleton(0))), Collections.emptyMap());
|
AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Set.of(0))), Map.of());
|
||||||
AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(2))), Collections.emptyMap());
|
AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(2))), Map.of());
|
||||||
AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4");
|
AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4");
|
||||||
AssignmentMemberSpec memberSpec5 = createAssignmentMemberSpec("process5");
|
AssignmentMemberSpec memberSpec5 = createAssignmentMemberSpec("process5");
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
|
@ -265,21 +264,21 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
assertNotNull(testMember1);
|
assertNotNull(testMember1);
|
||||||
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(0), testMember1.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(0), testMember1.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember2 = result.members().get("member2");
|
MemberAssignment testMember2 = result.members().get("member2");
|
||||||
assertNotNull(testMember2);
|
assertNotNull(testMember2);
|
||||||
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(2), testMember2.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(2), testMember2.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember3 = result.members().get("member3");
|
MemberAssignment testMember3 = result.members().get("member3");
|
||||||
assertNotNull(testMember3);
|
assertNotNull(testMember3);
|
||||||
assertEquals(1, testMember3.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember3.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(1), testMember3.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(1), testMember3.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember4 = result.members().get("member4");
|
MemberAssignment testMember4 = result.members().get("member4");
|
||||||
assertNotNull(testMember4);
|
assertNotNull(testMember4);
|
||||||
assertNull(testMember4.activeTasks().get("test-subtopology"));
|
assertNull(testMember4.activeTasks().get("test-subtopology"));
|
||||||
|
@ -289,17 +288,17 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
// change up the assignment and make sure it is still sticky
|
// change up the assignment and make sure it is still sticky
|
||||||
memberSpec1 = createAssignmentMemberSpec("process1");
|
memberSpec1 = createAssignmentMemberSpec("process1");
|
||||||
memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(0))), Collections.emptyMap());
|
memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(0))), Map.of());
|
||||||
memberSpec3 = createAssignmentMemberSpec("process3");
|
memberSpec3 = createAssignmentMemberSpec("process3");
|
||||||
memberSpec4 = createAssignmentMemberSpec("process4", mkMap(mkEntry("test-subtopology", Collections.singleton(2))), Collections.emptyMap());
|
memberSpec4 = createAssignmentMemberSpec("process4", mkMap(mkEntry("test-subtopology", Set.of(2))), Map.of());
|
||||||
memberSpec5 = createAssignmentMemberSpec("process5", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
memberSpec5 = createAssignmentMemberSpec("process5", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
members = mkMap(
|
members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2),
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2),
|
||||||
mkEntry("member3", memberSpec3), mkEntry("member4", memberSpec4), mkEntry("member5", memberSpec5));
|
mkEntry("member3", memberSpec3), mkEntry("member4", memberSpec4), mkEntry("member5", memberSpec5));
|
||||||
|
|
||||||
result = assignor.assign(
|
result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
testMember1 = result.members().get("member1");
|
testMember1 = result.members().get("member1");
|
||||||
|
@ -308,113 +307,113 @@ public class StickyTaskAssignorTest {
|
||||||
testMember2 = result.members().get("member2");
|
testMember2 = result.members().get("member2");
|
||||||
assertNotNull(testMember2);
|
assertNotNull(testMember2);
|
||||||
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(0), testMember2.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(0), testMember2.activeTasks().get("test-subtopology"));
|
||||||
testMember3 = result.members().get("member3");
|
testMember3 = result.members().get("member3");
|
||||||
assertNotNull(testMember3);
|
assertNotNull(testMember3);
|
||||||
assertNull(testMember3.activeTasks().get("test-subtopology"));
|
assertNull(testMember3.activeTasks().get("test-subtopology"));
|
||||||
testMember4 = result.members().get("member4");
|
testMember4 = result.members().get("member4");
|
||||||
assertNotNull(testMember4);
|
assertNotNull(testMember4);
|
||||||
assertEquals(1, testMember4.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember4.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(2), testMember4.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(2), testMember4.activeTasks().get("test-subtopology"));
|
||||||
testMember5 = result.members().get("member5");
|
testMember5 = result.members().get("member5");
|
||||||
assertNotNull(testMember5);
|
assertNotNull(testMember5);
|
||||||
assertEquals(1, testMember5.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember5.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(1), testMember5.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(1), testMember5.activeTasks().get("test-subtopology"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignTasksToClientWithPreviousStandbyTasks() {
|
public void shouldAssignTasksToClientWithPreviousStandbyTasks() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", Collections.emptyMap(), mkMap(mkEntry("test-subtopology", Collections.singleton(2))));
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", Map.of(), mkMap(mkEntry("test-subtopology", Set.of(2))));
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", Collections.emptyMap(), mkMap(mkEntry("test-subtopology", Collections.singleton(1))));
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", Map.of(), mkMap(mkEntry("test-subtopology", Set.of(1))));
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", Collections.emptyMap(), mkMap(mkEntry("test-subtopology", Collections.singleton(0))));
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", Map.of(), mkMap(mkEntry("test-subtopology", Set.of(0))));
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
assertNotNull(testMember1);
|
assertNotNull(testMember1);
|
||||||
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(2), testMember1.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(2), testMember1.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember2 = result.members().get("member2");
|
MemberAssignment testMember2 = result.members().get("member2");
|
||||||
assertNotNull(testMember2);
|
assertNotNull(testMember2);
|
||||||
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(1), testMember2.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(1), testMember2.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember3 = result.members().get("member3");
|
MemberAssignment testMember3 = result.members().get("member3");
|
||||||
assertNotNull(testMember3);
|
assertNotNull(testMember3);
|
||||||
assertEquals(1, testMember3.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember3.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(0), testMember3.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(0), testMember3.activeTasks().get("test-subtopology"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldNotAssignStandbyTasksToClientWithPreviousStandbyTasksAndCurrentActiveTasks() {
|
public void shouldNotAssignStandbyTasksToClientWithPreviousStandbyTasksAndCurrentActiveTasks() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", Collections.emptyMap(), mkMap(mkEntry("test-subtopology", Collections.singleton(0))));
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", Map.of(), mkMap(mkEntry("test-subtopology", Set.of(0))));
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", Collections.emptyMap(), mkMap(mkEntry("test-subtopology", Collections.singleton(1))));
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", Map.of(), mkMap(mkEntry("test-subtopology", Set.of(1))));
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
new GroupSpecImpl(members, mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(2, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(2, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
assertNotNull(testMember1);
|
assertNotNull(testMember1);
|
||||||
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(0), testMember1.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(0), testMember1.activeTasks().get("test-subtopology"));
|
||||||
assertEquals(1, testMember1.standbyTasks().get("test-subtopology").size());
|
assertEquals(1, testMember1.standbyTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(1), testMember1.standbyTasks().get("test-subtopology"));
|
assertEquals(Set.of(1), testMember1.standbyTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember2 = result.members().get("member2");
|
MemberAssignment testMember2 = result.members().get("member2");
|
||||||
assertNotNull(testMember2);
|
assertNotNull(testMember2);
|
||||||
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember2.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(1), testMember2.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(1), testMember2.activeTasks().get("test-subtopology"));
|
||||||
assertEquals(1, testMember2.standbyTasks().get("test-subtopology").size());
|
assertEquals(1, testMember2.standbyTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(0), testMember2.standbyTasks().get("test-subtopology"));
|
assertEquals(Set.of(0), testMember2.standbyTasks().get("test-subtopology"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignBasedOnCapacityWhenMultipleClientHaveStandbyTasks() {
|
public void shouldAssignBasedOnCapacityWhenMultipleClientHaveStandbyTasks() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1",
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1",
|
||||||
mkMap(mkEntry("test-subtopology", Collections.singleton(0))),
|
mkMap(mkEntry("test-subtopology", Set.of(0))),
|
||||||
mkMap(mkEntry("test-subtopology", Collections.singleton(1))));
|
mkMap(mkEntry("test-subtopology", Set.of(1))));
|
||||||
final AssignmentMemberSpec memberSpec21 = createAssignmentMemberSpec("process2",
|
final AssignmentMemberSpec memberSpec21 = createAssignmentMemberSpec("process2",
|
||||||
mkMap(mkEntry("test-subtopology", Collections.singleton(2))),
|
mkMap(mkEntry("test-subtopology", Set.of(2))),
|
||||||
mkMap(mkEntry("test-subtopology", Collections.singleton(1))));
|
mkMap(mkEntry("test-subtopology", Set.of(1))));
|
||||||
final AssignmentMemberSpec memberSpec22 = createAssignmentMemberSpec("process2",
|
final AssignmentMemberSpec memberSpec22 = createAssignmentMemberSpec("process2",
|
||||||
Collections.emptyMap(), Collections.emptyMap());
|
Map.of(), Map.of());
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1),
|
mkEntry("member1", memberSpec1),
|
||||||
mkEntry("member2_1", memberSpec21), mkEntry("member2_2", memberSpec22));
|
mkEntry("member2_1", memberSpec21), mkEntry("member2_2", memberSpec22));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
MemberAssignment testMember1 = result.members().get("member1");
|
MemberAssignment testMember1 = result.members().get("member1");
|
||||||
assertNotNull(testMember1);
|
assertNotNull(testMember1);
|
||||||
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember1.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(0), testMember1.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(0), testMember1.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember21 = result.members().get("member2_1");
|
MemberAssignment testMember21 = result.members().get("member2_1");
|
||||||
assertNotNull(testMember21);
|
assertNotNull(testMember21);
|
||||||
assertEquals(1, testMember21.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember21.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(2), testMember21.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(2), testMember21.activeTasks().get("test-subtopology"));
|
||||||
MemberAssignment testMember22 = result.members().get("member2_2");
|
MemberAssignment testMember22 = result.members().get("member2_2");
|
||||||
assertNotNull(testMember22);
|
assertNotNull(testMember22);
|
||||||
assertEquals(1, testMember22.activeTasks().get("test-subtopology").size());
|
assertEquals(1, testMember22.activeTasks().get("test-subtopology").size());
|
||||||
assertEquals(Collections.singleton(1), testMember22.activeTasks().get("test-subtopology"));
|
assertEquals(Set.of(1), testMember22.activeTasks().get("test-subtopology"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignStandbyTasksToDifferentClientThanCorrespondingActiveTaskIsAssignedTo() {
|
public void shouldAssignStandbyTasksToDifferentClientThanCorrespondingActiveTaskIsAssignedTo() {
|
||||||
final Map<String, Set<Integer>> tasks = mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 3)));
|
final Map<String, Set<Integer>> tasks = mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 3)));
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Collections.singleton(0))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Set.of(0))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Collections.singleton(2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Set.of(2))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4", mkMap(mkEntry("test-subtopology", Collections.singleton(3))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4", mkMap(mkEntry("test-subtopology", Set.of(3))), Map.of());
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2),
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2),
|
||||||
mkEntry("member3", memberSpec3), mkEntry("member4", memberSpec4));
|
mkEntry("member3", memberSpec3), mkEntry("member4", memberSpec4));
|
||||||
|
@ -422,7 +421,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(4, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<Integer> member1TaskIds = getAllStandbyTaskIds(result, "member1");
|
final List<Integer> member1TaskIds = getAllStandbyTaskIds(result, "member1");
|
||||||
|
@ -448,9 +447,9 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignMultipleReplicasOfStandbyTask() {
|
public void shouldAssignMultipleReplicasOfStandbyTask() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Collections.singleton(0))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Set.of(0))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Collections.singleton(1))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Set.of(1))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Collections.singleton(2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Set.of(2))), Map.of());
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2),
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2),
|
||||||
mkEntry("member3", memberSpec3));
|
mkEntry("member3", memberSpec3));
|
||||||
|
@ -458,7 +457,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "2"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "2"))),
|
||||||
new TopologyDescriberImpl(3, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(Sets.newSet(1, 2), new HashSet<>(getAllStandbyTaskIds(result, "member1")));
|
assertEquals(Sets.newSet(1, 2), new HashSet<>(getAllStandbyTaskIds(result, "member1")));
|
||||||
|
@ -475,7 +474,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(1, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(1, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertTrue(getAllStandbyTasks(result, "member1").isEmpty());
|
assertTrue(getAllStandbyTasks(result, "member1").isEmpty());
|
||||||
|
@ -493,7 +492,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(3, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(Sets.newSet(0, 1, 2), new HashSet<>(getAllActiveTaskIds(result)));
|
assertEquals(Sets.newSet(0, 1, 2), new HashSet<>(getAllActiveTaskIds(result)));
|
||||||
|
@ -513,7 +512,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(1, getAllActiveTaskIds(result, "member1_1", "member1_2", "member1_3").size());
|
assertEquals(1, getAllActiveTaskIds(result, "member1_1", "member1_2", "member1_3").size());
|
||||||
|
@ -535,7 +534,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(3, getAllActiveTaskIds(result, "member1", "member2", "member3", "member4", "member5", "member6").size());
|
assertEquals(3, getAllActiveTaskIds(result, "member1", "member2", "member3", "member4", "member5", "member6").size());
|
||||||
|
@ -557,7 +556,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(3, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
for (String memberId : result.members().keySet()) {
|
for (String memberId : result.members().keySet()) {
|
||||||
|
@ -595,7 +594,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(4, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
for (final String memberId : allMemberIds) {
|
for (final String memberId : allMemberIds) {
|
||||||
|
@ -610,9 +609,9 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldNotHaveSameAssignmentOnAnyTwoHostsWhenThereArePreviousActiveTasks() {
|
public void shouldNotHaveSameAssignmentOnAnyTwoHostsWhenThereArePreviousActiveTasks() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(1, 2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(1, 2))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(3))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(3))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Sets.newSet(0))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Sets.newSet(0))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4");
|
final AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4");
|
||||||
final List<String> allMemberIds = asList("member1", "member2", "member3", "member4");
|
final List<String> allMemberIds = asList("member1", "member2", "member3", "member4");
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
|
@ -621,7 +620,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(4, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
for (final String memberId : allMemberIds) {
|
for (final String memberId : allMemberIds) {
|
||||||
|
@ -649,7 +648,7 @@ public class StickyTaskAssignorTest {
|
||||||
final GroupAssignment result = assignor.assign(
|
final GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members,
|
new GroupSpecImpl(members,
|
||||||
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
mkMap(mkEntry(NUM_STANDBY_REPLICAS_CONFIG, "1"))),
|
||||||
new TopologyDescriberImpl(4, true, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, true, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
for (final String memberId : allMemberIds) {
|
for (final String memberId : allMemberIds) {
|
||||||
|
@ -664,7 +663,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldReBalanceTasksAcrossAllClientsWhenCapacityAndTaskCountTheSame() {
|
public void shouldReBalanceTasksAcrossAllClientsWhenCapacityAndTaskCountTheSame() {
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 3))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 3))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1");
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1");
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
||||||
final AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4");
|
final AssignmentMemberSpec memberSpec4 = createAssignmentMemberSpec("process4");
|
||||||
|
@ -673,7 +672,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(4, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(1, getAllActiveTaskCount(result, "member1"));
|
assertEquals(1, getAllActiveTaskCount(result, "member1"));
|
||||||
|
@ -684,7 +683,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldReBalanceTasksAcrossClientsWhenCapacityLessThanTaskCount() {
|
public void shouldReBalanceTasksAcrossClientsWhenCapacityLessThanTaskCount() {
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 3))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 3))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1");
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1");
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
|
@ -692,7 +691,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(4, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(1, getAllActiveTaskCount(result, "member1"));
|
assertEquals(1, getAllActiveTaskCount(result, "member1"));
|
||||||
|
@ -702,7 +701,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldRebalanceTasksToClientsBasedOnCapacity() {
|
public void shouldRebalanceTasksToClientsBasedOnCapacity() {
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 3, 2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 3, 2))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec31 = createAssignmentMemberSpec("process3");
|
final AssignmentMemberSpec memberSpec31 = createAssignmentMemberSpec("process3");
|
||||||
final AssignmentMemberSpec memberSpec32 = createAssignmentMemberSpec("process3");
|
final AssignmentMemberSpec memberSpec32 = createAssignmentMemberSpec("process3");
|
||||||
Map<String, AssignmentMemberSpec> members = mkMap(
|
Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
|
@ -710,7 +709,7 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(3, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(3, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(1, getAllActiveTaskCount(result, "member2"));
|
assertEquals(1, getAllActiveTaskCount(result, "member2"));
|
||||||
|
@ -721,15 +720,15 @@ public class StickyTaskAssignorTest {
|
||||||
public void shouldMoveMinimalNumberOfTasksWhenPreviouslyAboveCapacityAndNewClientAdded() {
|
public void shouldMoveMinimalNumberOfTasksWhenPreviouslyAboveCapacityAndNewClientAdded() {
|
||||||
final Set<Integer> p1PrevTasks = Sets.newSet(0, 2);
|
final Set<Integer> p1PrevTasks = Sets.newSet(0, 2);
|
||||||
final Set<Integer> p2PrevTasks = Sets.newSet(1, 3);
|
final Set<Integer> p2PrevTasks = Sets.newSet(1, 3);
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", p1PrevTasks)), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", p1PrevTasks)), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", p2PrevTasks)), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", p2PrevTasks)), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3");
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3");
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(4, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(4, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(1, getAllActiveTaskCount(result, "member3"));
|
assertEquals(1, getAllActiveTaskCount(result, "member3"));
|
||||||
|
@ -743,14 +742,14 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldNotMoveAnyTasksWhenNewTasksAdded() {
|
public void shouldNotMoveAnyTasksWhenNewTasksAdded() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(2, 3))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(2, 3))), Map.of());
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(6, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(6, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
||||||
|
@ -763,15 +762,15 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignNewTasksToNewClientWhenPreviousTasksAssignedToOldClients() {
|
public void shouldAssignNewTasksToNewClientWhenPreviousTasksAssignedToOldClients() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(2, 1))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(2, 1))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 3))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 3))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3");
|
final AssignmentMemberSpec memberSpec3 = createAssignmentMemberSpec("process3");
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(6, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(6, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
||||||
|
@ -797,7 +796,7 @@ public class StickyTaskAssignorTest {
|
||||||
mkMap(mkEntry("test-subtopology2", Sets.newSet(0, 1, 3))),
|
mkMap(mkEntry("test-subtopology2", Sets.newSet(0, 1, 3))),
|
||||||
mkMap(mkEntry("test-subtopology0", Sets.newSet(2)), mkEntry("test-subtopology1", Sets.newSet(2))));
|
mkMap(mkEntry("test-subtopology0", Sets.newSet(2)), mkEntry("test-subtopology1", Sets.newSet(2))));
|
||||||
final AssignmentMemberSpec newMemberSpec = createAssignmentMemberSpec("process4",
|
final AssignmentMemberSpec newMemberSpec = createAssignmentMemberSpec("process4",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
mkMap(mkEntry("test-subtopology0", Sets.newSet(0, 1, 2, 3)), mkEntry("test-subtopology1", Sets.newSet(0, 1, 2, 3)), mkEntry("test-subtopology2", Sets.newSet(0, 1, 2, 3))));
|
mkMap(mkEntry("test-subtopology0", Sets.newSet(0, 1, 2, 3)), mkEntry("test-subtopology1", Sets.newSet(0, 1, 2, 3)), mkEntry("test-subtopology2", Sets.newSet(0, 1, 2, 3))));
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3), mkEntry("newMember", newMemberSpec));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("member3", memberSpec3), mkEntry("newMember", newMemberSpec));
|
||||||
|
@ -826,10 +825,10 @@ public class StickyTaskAssignorTest {
|
||||||
mkMap(mkEntry("test-subtopology0", Sets.newSet(0)), mkEntry("test-subtopology1", Sets.newSet(1)), mkEntry("test-subtopology2", Sets.newSet(2))),
|
mkMap(mkEntry("test-subtopology0", Sets.newSet(0)), mkEntry("test-subtopology1", Sets.newSet(1)), mkEntry("test-subtopology2", Sets.newSet(2))),
|
||||||
mkMap(mkEntry("test-subtopology0", Sets.newSet(1, 2, 3)), mkEntry("test-subtopology1", Sets.newSet(0, 2, 3)), mkEntry("test-subtopology2", Sets.newSet(0, 1, 3))));
|
mkMap(mkEntry("test-subtopology0", Sets.newSet(1, 2, 3)), mkEntry("test-subtopology1", Sets.newSet(0, 2, 3)), mkEntry("test-subtopology2", Sets.newSet(0, 1, 3))));
|
||||||
final AssignmentMemberSpec bounce1 = createAssignmentMemberSpec("bounce1",
|
final AssignmentMemberSpec bounce1 = createAssignmentMemberSpec("bounce1",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
mkMap(mkEntry("test-subtopology2", Sets.newSet(0, 1, 3))));
|
mkMap(mkEntry("test-subtopology2", Sets.newSet(0, 1, 3))));
|
||||||
final AssignmentMemberSpec bounce2 = createAssignmentMemberSpec("bounce2",
|
final AssignmentMemberSpec bounce2 = createAssignmentMemberSpec("bounce2",
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
mkMap(mkEntry("test-subtopology0", Sets.newSet(2, 3)), mkEntry("test-subtopology1", Sets.newSet(0))));
|
mkMap(mkEntry("test-subtopology0", Sets.newSet(2, 3)), mkEntry("test-subtopology1", Sets.newSet(0))));
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("bounce_member1", bounce1), mkEntry("bounce_member2", bounce2));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("bounce_member1", bounce1), mkEntry("bounce_member2", bounce2));
|
||||||
|
@ -851,14 +850,14 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignTasksToNewClient() {
|
public void shouldAssignTasksToNewClient() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(1, 2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(1, 2))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2");
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(2, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(2, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
assertEquals(1, getAllActiveTaskCount(result, "member1"));
|
assertEquals(1, getAllActiveTaskCount(result, "member1"));
|
||||||
|
@ -866,15 +865,15 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignTasksToNewClientWithoutFlippingAssignmentBetweenExistingClients() {
|
public void shouldAssignTasksToNewClientWithoutFlippingAssignmentBetweenExistingClients() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(3, 4, 5))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", mkMap(mkEntry("test-subtopology", Sets.newSet(3, 4, 5))), Map.of());
|
||||||
final AssignmentMemberSpec newMemberSpec = createAssignmentMemberSpec("process3");
|
final AssignmentMemberSpec newMemberSpec = createAssignmentMemberSpec("process3");
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("newMember", newMemberSpec));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("newMember", newMemberSpec));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(6, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(6, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
||||||
|
@ -892,15 +891,15 @@ public class StickyTaskAssignorTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldAssignTasksToNewClientWithoutFlippingAssignmentBetweenExistingAndBouncedClients() {
|
public void shouldAssignTasksToNewClientWithoutFlippingAssignmentBetweenExistingAndBouncedClients() {
|
||||||
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 6))), Collections.emptyMap());
|
final AssignmentMemberSpec memberSpec1 = createAssignmentMemberSpec("process1", mkMap(mkEntry("test-subtopology", Sets.newSet(0, 1, 2, 6))), Map.of());
|
||||||
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", Collections.emptyMap(), mkMap(mkEntry("test-subtopology", Sets.newSet(3, 4, 5))));
|
final AssignmentMemberSpec memberSpec2 = createAssignmentMemberSpec("process2", Map.of(), mkMap(mkEntry("test-subtopology", Sets.newSet(3, 4, 5))));
|
||||||
final AssignmentMemberSpec newMemberSpec = createAssignmentMemberSpec("newProcess");
|
final AssignmentMemberSpec newMemberSpec = createAssignmentMemberSpec("newProcess");
|
||||||
final Map<String, AssignmentMemberSpec> members = mkMap(
|
final Map<String, AssignmentMemberSpec> members = mkMap(
|
||||||
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("newMember", newMemberSpec));
|
mkEntry("member1", memberSpec1), mkEntry("member2", memberSpec2), mkEntry("newMember", newMemberSpec));
|
||||||
|
|
||||||
GroupAssignment result = assignor.assign(
|
GroupAssignment result = assignor.assign(
|
||||||
new GroupSpecImpl(members, new HashMap<>()),
|
new GroupSpecImpl(members, new HashMap<>()),
|
||||||
new TopologyDescriberImpl(7, false, Collections.singletonList("test-subtopology"))
|
new TopologyDescriberImpl(7, false, List.of("test-subtopology"))
|
||||||
);
|
);
|
||||||
|
|
||||||
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
final List<Integer> mem1Tasks = getAllActiveTaskIds(result, "member1");
|
||||||
|
@ -1054,13 +1053,13 @@ public class StickyTaskAssignorTest {
|
||||||
return new AssignmentMemberSpec(
|
return new AssignmentMemberSpec(
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
processId,
|
processId,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap());
|
Map.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
private AssignmentMemberSpec createAssignmentMemberSpec(final String processId, final Map<String, Set<Integer>> prevActiveTasks,
|
private AssignmentMemberSpec createAssignmentMemberSpec(final String processId, final Map<String, Set<Integer>> prevActiveTasks,
|
||||||
|
@ -1070,11 +1069,11 @@ public class StickyTaskAssignorTest {
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
prevActiveTasks,
|
prevActiveTasks,
|
||||||
prevStandbyTasks,
|
prevStandbyTasks,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
processId,
|
processId,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptyMap());
|
Map.of());
|
||||||
}
|
}
|
||||||
|
|
||||||
static class TopologyDescriberImpl implements TopologyDescriber {
|
static class TopologyDescriberImpl implements TopologyDescriber {
|
||||||
|
|
|
@ -24,7 +24,6 @@ import org.apache.kafka.coordinator.group.generated.StreamsGroupTopologyValue.To
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.OptionalInt;
|
import java.util.OptionalInt;
|
||||||
|
@ -46,23 +45,23 @@ public class ChangelogTopicsTest {
|
||||||
.setTopicConfigs(List.of(TOPIC_CONFIG));
|
.setTopicConfigs(List.of(TOPIC_CONFIG));
|
||||||
private static final Subtopology SUBTOPOLOGY_NO_SOURCE = new Subtopology()
|
private static final Subtopology SUBTOPOLOGY_NO_SOURCE = new Subtopology()
|
||||||
.setSubtopologyId("SUBTOPOLOGY_NO_SOURCE")
|
.setSubtopologyId("SUBTOPOLOGY_NO_SOURCE")
|
||||||
.setSourceTopics(Collections.emptyList())
|
.setSourceTopics(List.of())
|
||||||
.setRepartitionSinkTopics(Collections.singletonList(SINK_TOPIC_NAME))
|
.setRepartitionSinkTopics(List.of(SINK_TOPIC_NAME))
|
||||||
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
||||||
.setStateChangelogTopics(Collections.emptyList());
|
.setStateChangelogTopics(List.of());
|
||||||
private static final Subtopology SUBTOPOLOGY_STATELESS = new Subtopology()
|
private static final Subtopology SUBTOPOLOGY_STATELESS = new Subtopology()
|
||||||
.setSubtopologyId("SUBTOPOLOGY_STATELESS")
|
.setSubtopologyId("SUBTOPOLOGY_STATELESS")
|
||||||
.setSourceTopics(Collections.singletonList(SOURCE_TOPIC_NAME))
|
.setSourceTopics(List.of(SOURCE_TOPIC_NAME))
|
||||||
.setRepartitionSinkTopics(Collections.singletonList(SINK_TOPIC_NAME))
|
.setRepartitionSinkTopics(List.of(SINK_TOPIC_NAME))
|
||||||
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
||||||
.setStateChangelogTopics(Collections.emptyList());
|
.setStateChangelogTopics(List.of());
|
||||||
private static final TopicInfo SOURCE_CHANGELOG_TOPIC_CONFIG = new TopicInfo()
|
private static final TopicInfo SOURCE_CHANGELOG_TOPIC_CONFIG = new TopicInfo()
|
||||||
.setName(SOURCE_TOPIC_NAME)
|
.setName(SOURCE_TOPIC_NAME)
|
||||||
.setTopicConfigs(List.of(TOPIC_CONFIG));
|
.setTopicConfigs(List.of(TOPIC_CONFIG));
|
||||||
private static final Subtopology SUBTOPOLOGY_SOURCE_CHANGELOG = new Subtopology()
|
private static final Subtopology SUBTOPOLOGY_SOURCE_CHANGELOG = new Subtopology()
|
||||||
.setSubtopologyId("SUBTOPOLOGY_SOURCE_CHANGELOG")
|
.setSubtopologyId("SUBTOPOLOGY_SOURCE_CHANGELOG")
|
||||||
.setSourceTopics(Collections.singletonList(SOURCE_TOPIC_NAME))
|
.setSourceTopics(List.of(SOURCE_TOPIC_NAME))
|
||||||
.setRepartitionSinkTopics(Collections.singletonList(SINK_TOPIC_NAME))
|
.setRepartitionSinkTopics(List.of(SINK_TOPIC_NAME))
|
||||||
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
||||||
.setStateChangelogTopics(List.of(SOURCE_CHANGELOG_TOPIC_CONFIG));
|
.setStateChangelogTopics(List.of(SOURCE_CHANGELOG_TOPIC_CONFIG));
|
||||||
private static final TopicInfo CHANGELOG_TOPIC_CONFIG = new TopicInfo()
|
private static final TopicInfo CHANGELOG_TOPIC_CONFIG = new TopicInfo()
|
||||||
|
@ -70,14 +69,14 @@ public class ChangelogTopicsTest {
|
||||||
.setTopicConfigs(List.of(TOPIC_CONFIG));
|
.setTopicConfigs(List.of(TOPIC_CONFIG));
|
||||||
private static final Subtopology SUBTOPOLOGY_STATEFUL = new Subtopology()
|
private static final Subtopology SUBTOPOLOGY_STATEFUL = new Subtopology()
|
||||||
.setSubtopologyId("SUBTOPOLOGY_STATEFUL")
|
.setSubtopologyId("SUBTOPOLOGY_STATEFUL")
|
||||||
.setSourceTopics(Collections.singletonList(SOURCE_TOPIC_NAME))
|
.setSourceTopics(List.of(SOURCE_TOPIC_NAME))
|
||||||
.setRepartitionSinkTopics(Collections.singletonList(SINK_TOPIC_NAME))
|
.setRepartitionSinkTopics(List.of(SINK_TOPIC_NAME))
|
||||||
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
||||||
.setStateChangelogTopics(List.of(CHANGELOG_TOPIC_CONFIG));
|
.setStateChangelogTopics(List.of(CHANGELOG_TOPIC_CONFIG));
|
||||||
private static final Subtopology SUBTOPOLOGY_BOTH = new Subtopology()
|
private static final Subtopology SUBTOPOLOGY_BOTH = new Subtopology()
|
||||||
.setSubtopologyId("SUBTOPOLOGY_BOTH")
|
.setSubtopologyId("SUBTOPOLOGY_BOTH")
|
||||||
.setSourceTopics(Collections.singletonList(SOURCE_TOPIC_NAME))
|
.setSourceTopics(List.of(SOURCE_TOPIC_NAME))
|
||||||
.setRepartitionSinkTopics(Collections.singletonList(SINK_TOPIC_NAME))
|
.setRepartitionSinkTopics(List.of(SINK_TOPIC_NAME))
|
||||||
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
.setRepartitionSourceTopics(List.of(REPARTITION_TOPIC_INFO))
|
||||||
.setStateChangelogTopics(List.of(SOURCE_CHANGELOG_TOPIC_CONFIG, CHANGELOG_TOPIC_CONFIG));
|
.setStateChangelogTopics(List.of(SOURCE_CHANGELOG_TOPIC_CONFIG, CHANGELOG_TOPIC_CONFIG));
|
||||||
|
|
||||||
|
@ -104,7 +103,7 @@ public class ChangelogTopicsTest {
|
||||||
new ChangelogTopics(LOG_CONTEXT, subtopologies, ChangelogTopicsTest::topicPartitionProvider);
|
new ChangelogTopics(LOG_CONTEXT, subtopologies, ChangelogTopicsTest::topicPartitionProvider);
|
||||||
Map<String, Integer> setup = changelogTopics.setup();
|
Map<String, Integer> setup = changelogTopics.setup();
|
||||||
|
|
||||||
assertEquals(Collections.emptyMap(), setup);
|
assertEquals(Map.of(), setup);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -126,7 +125,7 @@ public class ChangelogTopicsTest {
|
||||||
new ChangelogTopics(LOG_CONTEXT, subtopologies, ChangelogTopicsTest::topicPartitionProvider);
|
new ChangelogTopics(LOG_CONTEXT, subtopologies, ChangelogTopicsTest::topicPartitionProvider);
|
||||||
Map<String, Integer> setup = changelogTopics.setup();
|
Map<String, Integer> setup = changelogTopics.setup();
|
||||||
|
|
||||||
assertEquals(Collections.emptyMap(), setup);
|
assertEquals(Map.of(), setup);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.kafka.common.message.StreamsGroupDescribeResponseData;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -34,13 +33,13 @@ public class ConfiguredInternalTopicTest {
|
||||||
@Test
|
@Test
|
||||||
public void testConstructorWithNullName() {
|
public void testConstructorWithNullName() {
|
||||||
assertThrows(NullPointerException.class,
|
assertThrows(NullPointerException.class,
|
||||||
() -> new ConfiguredInternalTopic(null, 1, Optional.empty(), Collections.emptyMap()));
|
() -> new ConfiguredInternalTopic(null, 1, Optional.empty(), Map.of()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConstructorWithInvalidName() {
|
public void testConstructorWithInvalidName() {
|
||||||
assertThrows(InvalidTopicException.class,
|
assertThrows(InvalidTopicException.class,
|
||||||
() -> new ConfiguredInternalTopic("invalid topic name", 1, Optional.empty(), Collections.emptyMap()));
|
() -> new ConfiguredInternalTopic("invalid topic name", 1, Optional.empty(), Map.of()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -52,7 +51,7 @@ public class ConfiguredInternalTopicTest {
|
||||||
@Test
|
@Test
|
||||||
public void testConstructorWithZeroPartitions() {
|
public void testConstructorWithZeroPartitions() {
|
||||||
assertThrows(IllegalArgumentException.class,
|
assertThrows(IllegalArgumentException.class,
|
||||||
() -> new ConfiguredInternalTopic("test-topic", 0, Optional.empty(), Collections.emptyMap()));
|
() -> new ConfiguredInternalTopic("test-topic", 0, Optional.empty(), Map.of()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
|
|
@ -20,7 +20,6 @@ import org.apache.kafka.common.message.StreamsGroupDescribeResponseData;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -38,9 +37,9 @@ public class ConfiguredSubtopologyTest {
|
||||||
assertThrows(NullPointerException.class,
|
assertThrows(NullPointerException.class,
|
||||||
() -> new ConfiguredSubtopology(
|
() -> new ConfiguredSubtopology(
|
||||||
null,
|
null,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -49,10 +48,10 @@ public class ConfiguredSubtopologyTest {
|
||||||
public void testConstructorWithNullRepartitionSourceTopics() {
|
public void testConstructorWithNullRepartitionSourceTopics() {
|
||||||
assertThrows(NullPointerException.class,
|
assertThrows(NullPointerException.class,
|
||||||
() -> new ConfiguredSubtopology(
|
() -> new ConfiguredSubtopology(
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
null,
|
null,
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -61,10 +60,10 @@ public class ConfiguredSubtopologyTest {
|
||||||
public void testConstructorWithNullRepartitionSinkTopics() {
|
public void testConstructorWithNullRepartitionSinkTopics() {
|
||||||
assertThrows(NullPointerException.class,
|
assertThrows(NullPointerException.class,
|
||||||
() -> new ConfiguredSubtopology(
|
() -> new ConfiguredSubtopology(
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
null,
|
null,
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -73,9 +72,9 @@ public class ConfiguredSubtopologyTest {
|
||||||
public void testConstructorWithNullStateChangelogTopics() {
|
public void testConstructorWithNullStateChangelogTopics() {
|
||||||
assertThrows(NullPointerException.class,
|
assertThrows(NullPointerException.class,
|
||||||
() -> new ConfiguredSubtopology(
|
() -> new ConfiguredSubtopology(
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Collections.emptySet(),
|
Set.of(),
|
||||||
null
|
null
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.kafka.common.message.StreamsGroupDescribeResponseData;
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
@ -44,7 +43,7 @@ public class ConfiguredTopologyTest {
|
||||||
() -> new ConfiguredTopology(
|
() -> new ConfiguredTopology(
|
||||||
0,
|
0,
|
||||||
null,
|
null,
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Optional.empty()
|
Optional.empty()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -68,7 +67,7 @@ public class ConfiguredTopologyTest {
|
||||||
() -> new ConfiguredTopology(
|
() -> new ConfiguredTopology(
|
||||||
0,
|
0,
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
null
|
null
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -80,7 +79,7 @@ public class ConfiguredTopologyTest {
|
||||||
() -> new ConfiguredTopology(
|
() -> new ConfiguredTopology(
|
||||||
-1,
|
-1,
|
||||||
Optional.of(new TreeMap<>()),
|
Optional.of(new TreeMap<>()),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Optional.empty()
|
Optional.empty()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -92,7 +91,7 @@ public class ConfiguredTopologyTest {
|
||||||
() -> new ConfiguredTopology(
|
() -> new ConfiguredTopology(
|
||||||
1,
|
1,
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap(),
|
Map.of(),
|
||||||
Optional.empty()
|
Optional.empty()
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.kafka.common.utils.LogContext;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.OptionalInt;
|
import java.util.OptionalInt;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -49,7 +48,7 @@ public class CopartitionedTopicsEnforcerTest {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void shouldThrowIllegalStateExceptionIfNoPartitionsFoundForCoPartitionedTopic() {
|
public void shouldThrowIllegalStateExceptionIfNoPartitionsFoundForCoPartitionedTopic() {
|
||||||
final Map<String, Integer> topicPartitionCounts = Collections.emptyMap();
|
final Map<String, Integer> topicPartitionCounts = Map.of();
|
||||||
final CopartitionedTopicsEnforcer enforcer =
|
final CopartitionedTopicsEnforcer enforcer =
|
||||||
new CopartitionedTopicsEnforcer(LOG_CONTEXT, topicPartitionProvider(topicPartitionCounts));
|
new CopartitionedTopicsEnforcer(LOG_CONTEXT, topicPartitionProvider(topicPartitionCounts));
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,8 @@ import org.apache.kafka.coordinator.group.streams.TopicMetadata;
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
@ -94,7 +94,7 @@ class InternalTopicManagerTest {
|
||||||
.setReplicationFactor((short) -1)
|
.setReplicationFactor((short) -1)
|
||||||
.setConfigs(
|
.setConfigs(
|
||||||
new CreatableTopicConfigCollection(
|
new CreatableTopicConfigCollection(
|
||||||
Collections.singletonList(new CreatableTopicConfig().setName(CONFIG_KEY).setValue(CONFIG_VALUE)).iterator())
|
List.of(new CreatableTopicConfig().setName(CONFIG_KEY).setValue(CONFIG_VALUE)).iterator())
|
||||||
),
|
),
|
||||||
internalTopicsToBeCreated.get(STATE_CHANGELOG_TOPIC_1));
|
internalTopicsToBeCreated.get(STATE_CHANGELOG_TOPIC_1));
|
||||||
|
|
||||||
|
@ -125,7 +125,7 @@ class InternalTopicManagerTest {
|
||||||
new ConfiguredInternalTopic(REPARTITION_TOPIC,
|
new ConfiguredInternalTopic(REPARTITION_TOPIC,
|
||||||
2,
|
2,
|
||||||
Optional.of((short) 3),
|
Optional.of((short) 3),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
Set.of(),
|
Set.of(),
|
||||||
|
@ -133,7 +133,7 @@ class InternalTopicManagerTest {
|
||||||
new ConfiguredInternalTopic(STATE_CHANGELOG_TOPIC_2,
|
new ConfiguredInternalTopic(STATE_CHANGELOG_TOPIC_2,
|
||||||
2,
|
2,
|
||||||
Optional.empty(),
|
Optional.empty(),
|
||||||
Collections.emptyMap()
|
Map.of()
|
||||||
)))
|
)))
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -143,12 +143,12 @@ class InternalTopicManagerTest {
|
||||||
// Create a subtopology source -> repartition
|
// Create a subtopology source -> repartition
|
||||||
Subtopology subtopology1 = new Subtopology()
|
Subtopology subtopology1 = new Subtopology()
|
||||||
.setSubtopologyId(SUBTOPOLOGY_1)
|
.setSubtopologyId(SUBTOPOLOGY_1)
|
||||||
.setSourceTopics(Collections.singletonList(SOURCE_TOPIC_1))
|
.setSourceTopics(List.of(SOURCE_TOPIC_1))
|
||||||
.setRepartitionSinkTopics(Collections.singletonList(REPARTITION_TOPIC))
|
.setRepartitionSinkTopics(List.of(REPARTITION_TOPIC))
|
||||||
.setStateChangelogTopics(Collections.singletonList(
|
.setStateChangelogTopics(List.of(
|
||||||
new StreamsGroupTopologyValue.TopicInfo()
|
new StreamsGroupTopologyValue.TopicInfo()
|
||||||
.setName(STATE_CHANGELOG_TOPIC_1)
|
.setName(STATE_CHANGELOG_TOPIC_1)
|
||||||
.setTopicConfigs(Collections.singletonList(
|
.setTopicConfigs(List.of(
|
||||||
new StreamsGroupTopologyValue.TopicConfig()
|
new StreamsGroupTopologyValue.TopicConfig()
|
||||||
.setKey(CONFIG_KEY)
|
.setKey(CONFIG_KEY)
|
||||||
.setValue(CONFIG_VALUE)
|
.setValue(CONFIG_VALUE)
|
||||||
|
@ -157,20 +157,20 @@ class InternalTopicManagerTest {
|
||||||
// Create a subtopology repartition/source2 -> sink (copartitioned)
|
// Create a subtopology repartition/source2 -> sink (copartitioned)
|
||||||
Subtopology subtopology2 = new Subtopology()
|
Subtopology subtopology2 = new Subtopology()
|
||||||
.setSubtopologyId(SUBTOPOLOGY_2)
|
.setSubtopologyId(SUBTOPOLOGY_2)
|
||||||
.setSourceTopics(Collections.singletonList(SOURCE_TOPIC_2))
|
.setSourceTopics(List.of(SOURCE_TOPIC_2))
|
||||||
.setRepartitionSourceTopics(Collections.singletonList(
|
.setRepartitionSourceTopics(List.of(
|
||||||
new StreamsGroupTopologyValue.TopicInfo()
|
new StreamsGroupTopologyValue.TopicInfo()
|
||||||
.setName(REPARTITION_TOPIC)
|
.setName(REPARTITION_TOPIC)
|
||||||
.setReplicationFactor((short) 3)
|
.setReplicationFactor((short) 3)
|
||||||
))
|
))
|
||||||
.setStateChangelogTopics(Collections.singletonList(
|
.setStateChangelogTopics(List.of(
|
||||||
new StreamsGroupTopologyValue.TopicInfo()
|
new StreamsGroupTopologyValue.TopicInfo()
|
||||||
.setName(STATE_CHANGELOG_TOPIC_2)
|
.setName(STATE_CHANGELOG_TOPIC_2)
|
||||||
))
|
))
|
||||||
.setCopartitionGroups(Collections.singletonList(
|
.setCopartitionGroups(List.of(
|
||||||
new StreamsGroupTopologyValue.CopartitionGroup()
|
new StreamsGroupTopologyValue.CopartitionGroup()
|
||||||
.setSourceTopics(Collections.singletonList((short) 0))
|
.setSourceTopics(List.of((short) 0))
|
||||||
.setRepartitionSourceTopics(Collections.singletonList((short) 0))
|
.setRepartitionSourceTopics(List.of((short) 0))
|
||||||
));
|
));
|
||||||
|
|
||||||
return new StreamsTopology(3, Map.of(SUBTOPOLOGY_1, subtopology1, SUBTOPOLOGY_2, subtopology2));
|
return new StreamsTopology(3, Map.of(SUBTOPOLOGY_1, subtopology1, SUBTOPOLOGY_2, subtopology2));
|
||||||
|
|
|
@ -23,7 +23,6 @@ import org.apache.kafka.coordinator.group.generated.StreamsGroupTopologyValue.To
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
import org.junit.jupiter.api.Test;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -198,7 +197,7 @@ public class RepartitionTopicsTest {
|
||||||
|
|
||||||
final Map<String, Integer> setup = repartitionTopics.setup();
|
final Map<String, Integer> setup = repartitionTopics.setup();
|
||||||
|
|
||||||
assertEquals(Collections.emptyMap(), setup);
|
assertEquals(Map.of(), setup);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
Loading…
Reference in New Issue