MINOR: Cleanup Connect Module (1/n) (#19869)
CI / build (push) Waiting to run Details

Now that Kafka support Java 17, this PR makes some changes in connect
module. The changes in this PR are limited to only some files. A future
PR(s) shall follow.
The changes mostly include:
- Collections.emptyList(), Collections.singletonList() and
Arrays.asList() are replaced with List.of()
- Collections.emptyMap() and Collections.singletonMap() are replaced
with Map.of()
- Collections.singleton() is replaced with Set.of()

Sub modules targeted: api, basic-auth-extensions, file, json, mirror,
mirror-client

Reviewers: Ken Huang <s7133700@gmail.com>, TengYao Chi
<kitingiao@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
Sanskar Jhajharia 2025-07-10 22:05:41 +05:30 committed by GitHub
parent 56a3c6dde9
commit 27383970b6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
58 changed files with 506 additions and 632 deletions

View File

@ -20,59 +20,55 @@ import org.apache.kafka.connect.errors.DataException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.EnumMap; import java.util.EnumMap;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
public class ConnectSchema implements Schema { public class ConnectSchema implements Schema {
/** /**
* Maps {@link Schema.Type}s to a list of Java classes that can be used to represent them. * Maps {@link Schema.Type}s to a list of Java classes that can be used to represent them.
*/ */
private static final Map<Type, List<Class<?>>> SCHEMA_TYPE_CLASSES = new EnumMap<>(Type.class); private static final Map<Type, List<Class<?>>> SCHEMA_TYPE_CLASSES = Collections.unmodifiableMap(new EnumMap<>(Map.ofEntries(
Map.entry(Type.INT8, List.of(Byte.class)),
Map.entry(Type.INT16, List.of(Short.class)),
Map.entry(Type.INT32, List.of(Integer.class)),
Map.entry(Type.INT64, List.of(Long.class)),
Map.entry(Type.FLOAT32, List.of(Float.class)),
Map.entry(Type.FLOAT64, List.of(Double.class)),
Map.entry(Type.BOOLEAN, List.of(Boolean.class)),
Map.entry(Type.STRING, List.of(String.class)),
// Bytes are special and have 2 representations. byte[] causes problems because it doesn't handle equals() and
// hashCode() like we want objects to, so we support both byte[] and ByteBuffer. Using plain byte[] can cause
// those methods to fail, so ByteBuffers are recommended
Map.entry(Type.BYTES, List.of(byte[].class, ByteBuffer.class)),
Map.entry(Type.ARRAY, List.of(List.class)),
Map.entry(Type.MAP, List.of(Map.class)),
Map.entry(Type.STRUCT, List.of(Struct.class))
)));
/** /**
* Maps known logical types to a list of Java classes that can be used to represent them. * Maps known logical types to a list of Java classes that can be used to represent them.
*/ */
private static final Map<String, List<Class<?>>> LOGICAL_TYPE_CLASSES = new HashMap<>(); // We don't need to put these into JAVA_CLASS_SCHEMA_TYPES since that's only used to determine schemas for
// schemaless data and logical types will have ambiguous schemas (e.g. many of them use the same Java class) so
// they should not be used without schemas.
private static final Map<String, List<Class<?>>> LOGICAL_TYPE_CLASSES = Map.of(
Decimal.LOGICAL_NAME, List.of(BigDecimal.class),
Date.LOGICAL_NAME, List.of(java.util.Date.class),
Time.LOGICAL_NAME, List.of(java.util.Date.class),
Timestamp.LOGICAL_NAME, List.of(java.util.Date.class)
);
/** /**
* Maps the Java classes to the corresponding {@link Schema.Type}. * Maps the Java classes to the corresponding {@link Schema.Type}.
*/ */
private static final Map<Class<?>, Type> JAVA_CLASS_SCHEMA_TYPES = new HashMap<>(); private static final Map<Class<?>, Type> JAVA_CLASS_SCHEMA_TYPES = SCHEMA_TYPE_CLASSES.entrySet()
.stream()
static { .flatMap(entry -> entry.getValue().stream().map(klass -> Map.entry(klass, entry.getKey())))
SCHEMA_TYPE_CLASSES.put(Type.INT8, Collections.singletonList(Byte.class)); .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
SCHEMA_TYPE_CLASSES.put(Type.INT16, Collections.singletonList(Short.class));
SCHEMA_TYPE_CLASSES.put(Type.INT32, Collections.singletonList(Integer.class));
SCHEMA_TYPE_CLASSES.put(Type.INT64, Collections.singletonList(Long.class));
SCHEMA_TYPE_CLASSES.put(Type.FLOAT32, Collections.singletonList(Float.class));
SCHEMA_TYPE_CLASSES.put(Type.FLOAT64, Collections.singletonList(Double.class));
SCHEMA_TYPE_CLASSES.put(Type.BOOLEAN, Collections.singletonList(Boolean.class));
SCHEMA_TYPE_CLASSES.put(Type.STRING, Collections.singletonList(String.class));
// Bytes are special and have 2 representations. byte[] causes problems because it doesn't handle equals() and
// hashCode() like we want objects to, so we support both byte[] and ByteBuffer. Using plain byte[] can cause
// those methods to fail, so ByteBuffers are recommended
SCHEMA_TYPE_CLASSES.put(Type.BYTES, Arrays.asList(byte[].class, ByteBuffer.class));
SCHEMA_TYPE_CLASSES.put(Type.ARRAY, Collections.singletonList(List.class));
SCHEMA_TYPE_CLASSES.put(Type.MAP, Collections.singletonList(Map.class));
SCHEMA_TYPE_CLASSES.put(Type.STRUCT, Collections.singletonList(Struct.class));
for (Map.Entry<Type, List<Class<?>>> schemaClasses : SCHEMA_TYPE_CLASSES.entrySet()) {
for (Class<?> schemaClass : schemaClasses.getValue())
JAVA_CLASS_SCHEMA_TYPES.put(schemaClass, schemaClasses.getKey());
}
LOGICAL_TYPE_CLASSES.put(Decimal.LOGICAL_NAME, Collections.singletonList(BigDecimal.class));
LOGICAL_TYPE_CLASSES.put(Date.LOGICAL_NAME, Collections.singletonList(java.util.Date.class));
LOGICAL_TYPE_CLASSES.put(Time.LOGICAL_NAME, Collections.singletonList(java.util.Date.class));
LOGICAL_TYPE_CLASSES.put(Timestamp.LOGICAL_NAME, Collections.singletonList(java.util.Date.class));
// We don't need to put these into JAVA_CLASS_SCHEMA_TYPES since that's only used to determine schemas for
// schemaless data and logical types will have ambiguous schemas (e.g. many of them use the same Java class) so
// they should not be used without schemas.
}
// The type of the field // The type of the field
private final Type type; private final Type type;
@ -110,7 +106,7 @@ public class ConnectSchema implements Schema {
this.parameters = parameters; this.parameters = parameters;
if (this.type == Type.STRUCT) { if (this.type == Type.STRUCT) {
this.fields = fields == null ? Collections.emptyList() : fields; this.fields = fields == null ? List.of() : fields;
this.fieldsByName = new HashMap<>(this.fields.size()); this.fieldsByName = new HashMap<>(this.fields.size());
for (Field field : this.fields) for (Field field : this.fields)
fieldsByName.put(field.name(), field); fieldsByName.put(field.name(), field);
@ -283,9 +279,12 @@ public class ConnectSchema implements Schema {
} }
private static List<Class<?>> expectedClassesFor(Schema schema) { private static List<Class<?>> expectedClassesFor(Schema schema) {
List<Class<?>> expectedClasses = LOGICAL_TYPE_CLASSES.get(schema.name()); List<Class<?>> expectedClasses = null;
if (schema.name() != null) {
expectedClasses = LOGICAL_TYPE_CLASSES.get(schema.name());
}
if (expectedClasses == null) if (expectedClasses == null)
expectedClasses = SCHEMA_TYPE_CLASSES.getOrDefault(schema.type(), Collections.emptyList()); expectedClasses = SCHEMA_TYPE_CLASSES.getOrDefault(schema.type(), List.of());
return expectedClasses; return expectedClasses;
} }

View File

@ -117,19 +117,10 @@ public interface Schema {
} }
public boolean isPrimitive() { public boolean isPrimitive() {
switch (this) { return switch (this) {
case INT8: case INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, STRING, BYTES -> true;
case INT16: default -> false;
case INT32: };
case INT64:
case FLOAT32:
case FLOAT64:
case BOOLEAN:
case STRING:
case BYTES:
return true;
}
return false;
} }
} }

View File

@ -78,25 +78,13 @@ public class SchemaProjector {
} }
private static Object projectRequiredSchema(Schema source, Object record, Schema target) throws SchemaProjectorException { private static Object projectRequiredSchema(Schema source, Object record, Schema target) throws SchemaProjectorException {
switch (target.type()) { return switch (target.type()) {
case INT8: case INT8, INT16, INT32, INT64, FLOAT32, FLOAT64, BOOLEAN, BYTES, STRING ->
case INT16: projectPrimitive(source, record, target);
case INT32: case STRUCT -> projectStruct(source, (Struct) record, target);
case INT64: case ARRAY -> projectArray(source, record, target);
case FLOAT32: case MAP -> projectMap(source, record, target);
case FLOAT64: };
case BOOLEAN:
case BYTES:
case STRING:
return projectPrimitive(source, record, target);
case STRUCT:
return projectStruct(source, (Struct) record, target);
case ARRAY:
return projectArray(source, record, target);
case MAP:
return projectMap(source, record, target);
}
return null;
} }
private static Object projectStruct(Schema source, Struct sourceStruct, Schema target) throws SchemaProjectorException { private static Object projectStruct(Schema source, Struct sourceStruct, Schema target) throws SchemaProjectorException {
@ -161,28 +149,15 @@ public class SchemaProjector {
assert target.type().isPrimitive(); assert target.type().isPrimitive();
Object result; Object result;
if (isPromotable(source.type(), target.type()) && record instanceof Number numberRecord) { if (isPromotable(source.type(), target.type()) && record instanceof Number numberRecord) {
switch (target.type()) { result = switch (target.type()) {
case INT8: case INT8 -> numberRecord.byteValue();
result = numberRecord.byteValue(); case INT16 -> numberRecord.shortValue();
break; case INT32 -> numberRecord.intValue();
case INT16: case INT64 -> numberRecord.longValue();
result = numberRecord.shortValue(); case FLOAT32 -> numberRecord.floatValue();
break; case FLOAT64 -> numberRecord.doubleValue();
case INT32: default -> throw new SchemaProjectorException("Not promotable type.");
result = numberRecord.intValue(); };
break;
case INT64:
result = numberRecord.longValue();
break;
case FLOAT32:
result = numberRecord.floatValue();
break;
case FLOAT64:
result = numberRecord.doubleValue();
break;
default:
throw new SchemaProjectorException("Not promotable type.");
}
} else { } else {
result = record; result = record;
} }

View File

@ -430,33 +430,20 @@ public class Values {
} }
throw new DataException("Unable to convert a null value to a schema that requires a value"); throw new DataException("Unable to convert a null value to a schema that requires a value");
} }
switch (toSchema.type()) { return switch (toSchema.type()) {
case BYTES: case BYTES -> convertMaybeLogicalBytes(toSchema, value);
return convertMaybeLogicalBytes(toSchema, value); case STRING -> convertToString(fromSchema, value);
case STRING: case BOOLEAN -> convertToBoolean(fromSchema, value);
return convertToString(fromSchema, value); case INT8 -> convertToByte(fromSchema, value);
case BOOLEAN: case INT16 -> convertToShort(fromSchema, value);
return convertToBoolean(fromSchema, value); case INT32 -> convertMaybeLogicalInteger(toSchema, fromSchema, value);
case INT8: case INT64 -> convertMaybeLogicalLong(toSchema, fromSchema, value);
return convertToByte(fromSchema, value); case FLOAT32 -> convertToFloat(fromSchema, value);
case INT16: case FLOAT64 -> convertToDouble(fromSchema, value);
return convertToShort(fromSchema, value); case ARRAY -> convertToArray(toSchema, value);
case INT32: case MAP -> convertToMapInternal(toSchema, value);
return convertMaybeLogicalInteger(toSchema, fromSchema, value); case STRUCT -> convertToStructInternal(toSchema, value);
case INT64: };
return convertMaybeLogicalLong(toSchema, fromSchema, value);
case FLOAT32:
return convertToFloat(fromSchema, value);
case FLOAT64:
return convertToDouble(fromSchema, value);
case ARRAY:
return convertToArray(toSchema, value);
case MAP:
return convertToMapInternal(toSchema, value);
case STRUCT:
return convertToStructInternal(toSchema, value);
}
throw new DataException("Unable to convert " + value + " (" + value.getClass() + ") to " + toSchema);
} }
private static Serializable convertMaybeLogicalBytes(Schema toSchema, Object value) { private static Serializable convertMaybeLogicalBytes(Schema toSchema, Object value) {
@ -1144,21 +1131,15 @@ public class Values {
Type previousType = previous.type(); Type previousType = previous.type();
Type newType = newSchema.type(); Type newType = newSchema.type();
if (previousType != newType) { if (previousType != newType) {
switch (previous.type()) { return switch (previous.type()) {
case INT8: case INT8 -> commonSchemaForInt8(newSchema, newType);
return commonSchemaForInt8(newSchema, newType); case INT16 -> commonSchemaForInt16(previous, newSchema, newType);
case INT16: case INT32 -> commonSchemaForInt32(previous, newSchema, newType);
return commonSchemaForInt16(previous, newSchema, newType); case INT64 -> commonSchemaForInt64(previous, newSchema, newType);
case INT32: case FLOAT32 -> commonSchemaForFloat32(previous, newSchema, newType);
return commonSchemaForInt32(previous, newSchema, newType); case FLOAT64 -> commonSchemaForFloat64(previous, newType);
case INT64: default -> null;
return commonSchemaForInt64(previous, newSchema, newType); };
case FLOAT32:
return commonSchemaForFloat32(previous, newSchema, newType);
case FLOAT64:
return commonSchemaForFloat64(previous, newType);
}
return null;
} }
if (previous.isOptional() == newSchema.isOptional()) { if (previous.isOptional() == newSchema.isOptional()) {
// Use the optional one // Use the optional one
@ -1273,10 +1254,8 @@ public class Values {
} }
if (knownType == null) { if (knownType == null) {
knownType = schema.type(); knownType = schema.type();
} else if (knownType != schema.type()) {
return false;
} }
return true; return knownType == schema.type();
} }
public Schema schema() { public Schema schema() {

View File

@ -16,10 +16,10 @@
*/ */
package org.apache.kafka.connect.storage; package org.apache.kafka.connect.storage;
import java.util.Collections; import java.util.Arrays;
import java.util.HashMap;
import java.util.Locale; import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.stream.Collectors;
/** /**
* The type of {@link Converter} and {@link HeaderConverter}. * The type of {@link Converter} and {@link HeaderConverter}.
@ -29,16 +29,8 @@ public enum ConverterType {
VALUE, VALUE,
HEADER; HEADER;
private static final Map<String, ConverterType> NAME_TO_TYPE; private static final Map<String, ConverterType> NAME_TO_TYPE = Arrays.stream(ConverterType.values())
.collect(Collectors.toUnmodifiableMap(ConverterType::getName, t -> t));
static {
ConverterType[] types = ConverterType.values();
Map<String, ConverterType> nameToType = new HashMap<>(types.length);
for (ConverterType type : types) {
nameToType.put(type.name, type);
}
NAME_TO_TYPE = Collections.unmodifiableMap(nameToType);
}
/** /**
* Find the ConverterType with the given name, using a case-insensitive match. * Find the ConverterType with the given name, using a case-insensitive match.

View File

@ -21,7 +21,6 @@ import org.apache.kafka.connect.errors.ConnectException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -33,15 +32,15 @@ public class ConnectorReconfigurationTest {
@Test @Test
public void testDefaultReconfigure() { public void testDefaultReconfigure() {
TestConnector conn = new TestConnector(false); TestConnector conn = new TestConnector(false);
conn.reconfigure(Collections.emptyMap()); conn.reconfigure(Map.of());
assertEquals(conn.stopOrder, 0); assertEquals(0, conn.stopOrder);
assertEquals(conn.configureOrder, 1); assertEquals(1, conn.configureOrder);
} }
@Test @Test
public void testReconfigureStopException() { public void testReconfigureStopException() {
TestConnector conn = new TestConnector(true); TestConnector conn = new TestConnector(true);
assertThrows(ConnectException.class, () -> conn.reconfigure(Collections.emptyMap())); assertThrows(ConnectException.class, () -> conn.reconfigure(Map.of()));
} }
private static class TestConnector extends Connector { private static class TestConnector extends Connector {

View File

@ -27,6 +27,7 @@ import java.nio.CharBuffer;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -86,17 +87,17 @@ public class ConnectSchemaTest {
ConnectSchema.validateValue(Schema.STRING_SCHEMA, "a string"); ConnectSchema.validateValue(Schema.STRING_SCHEMA, "a string");
ConnectSchema.validateValue(Schema.BYTES_SCHEMA, "a byte array".getBytes()); ConnectSchema.validateValue(Schema.BYTES_SCHEMA, "a byte array".getBytes());
ConnectSchema.validateValue(Schema.BYTES_SCHEMA, ByteBuffer.wrap("a byte array".getBytes())); ConnectSchema.validateValue(Schema.BYTES_SCHEMA, ByteBuffer.wrap("a byte array".getBytes()));
ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, 3)); ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), List.of(1, 2, 3));
ConnectSchema.validateValue( ConnectSchema.validateValue(
SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.STRING_SCHEMA).build(), SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.STRING_SCHEMA).build(),
Collections.singletonMap(1, "value") Map.of(1, "value")
); );
// Struct tests the basic struct layout + complex field types + nested structs // Struct tests the basic struct layout + complex field types + nested structs
Struct structValue = new Struct(STRUCT_SCHEMA) Struct structValue = new Struct(STRUCT_SCHEMA)
.put("first", 1) .put("first", 1)
.put("second", "foo") .put("second", "foo")
.put("array", Arrays.asList(1, 2, 3)) .put("array", List.of(1, 2, 3))
.put("map", Collections.singletonMap(1, "value")) .put("map", Map.of(1, "value"))
.put("nested", new Struct(FLAT_STRUCT_SCHEMA).put("field", 12)); .put("nested", new Struct(FLAT_STRUCT_SCHEMA).put("field", 12));
ConnectSchema.validateValue(STRUCT_SCHEMA, structValue); ConnectSchema.validateValue(STRUCT_SCHEMA, structValue);
} }
@ -171,7 +172,7 @@ public class ConnectSchemaTest {
@Test @Test
public void testValidateValueMismatchArray() { public void testValidateValueMismatchArray() {
assertThrows(DataException.class, assertThrows(DataException.class,
() -> ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList("a", "b", "c"))); () -> ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), List.of("a", "b", "c")));
} }
@Test @Test
@ -179,19 +180,19 @@ public class ConnectSchemaTest {
// Even if some match the right type, this should fail if any mismatch. In this case, type erasure loses // Even if some match the right type, this should fail if any mismatch. In this case, type erasure loses
// the fact that the list is actually List<Object>, but we couldn't tell if only checking the first element // the fact that the list is actually List<Object>, but we couldn't tell if only checking the first element
assertThrows(DataException.class, assertThrows(DataException.class,
() -> ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, "c"))); () -> ConnectSchema.validateValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), List.of(1, 2, "c")));
} }
@Test @Test
public void testValidateValueMismatchMapKey() { public void testValidateValueMismatchMapKey() {
assertThrows(DataException.class, assertThrows(DataException.class,
() -> ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, Collections.singletonMap("wrong key type", "value"))); () -> ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, Map.of("wrong key type", "value")));
} }
@Test @Test
public void testValidateValueMismatchMapValue() { public void testValidateValueMismatchMapValue() {
assertThrows(DataException.class, assertThrows(DataException.class,
() -> ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, Collections.singletonMap(1, 2))); () -> ConnectSchema.validateValue(MAP_INT_STRING_SCHEMA, Map.of(1, 2)));
} }
@Test @Test
@ -259,7 +260,7 @@ public class ConnectSchemaTest {
ConnectSchema differentName = new ConnectSchema(Schema.Type.INT8, false, null, "otherName", 2, "doc"); ConnectSchema differentName = new ConnectSchema(Schema.Type.INT8, false, null, "otherName", 2, "doc");
ConnectSchema differentVersion = new ConnectSchema(Schema.Type.INT8, false, null, "name", 4, "doc"); ConnectSchema differentVersion = new ConnectSchema(Schema.Type.INT8, false, null, "name", 4, "doc");
ConnectSchema differentDoc = new ConnectSchema(Schema.Type.INT8, false, null, "name", 2, "other doc"); ConnectSchema differentDoc = new ConnectSchema(Schema.Type.INT8, false, null, "name", 2, "other doc");
ConnectSchema differentParameters = new ConnectSchema(Schema.Type.INT8, false, null, "name", 2, "doc", Collections.singletonMap("param", "value"), null, null, null); ConnectSchema differentParameters = new ConnectSchema(Schema.Type.INT8, false, null, "name", 2, "doc", Map.of("param", "value"), null, null, null);
assertEquals(s1, s2); assertEquals(s1, s2);
assertNotEquals(s1, differentType); assertNotEquals(s1, differentType);
@ -311,13 +312,13 @@ public class ConnectSchemaTest {
// Same as testArrayEquality, but checks differences in fields. Only does a simple check, relying on tests of // Same as testArrayEquality, but checks differences in fields. Only does a simple check, relying on tests of
// Field's equals() method to validate all variations in the list of fields will be checked // Field's equals() method to validate all variations in the list of fields will be checked
ConnectSchema s1 = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null, null, ConnectSchema s1 = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null, null,
Arrays.asList(new Field("field", 0, SchemaBuilder.int8().build()), List.of(new Field("field", 0, SchemaBuilder.int8().build()),
new Field("field2", 1, SchemaBuilder.int16().build())), null, null); new Field("field2", 1, SchemaBuilder.int16().build())), null, null);
ConnectSchema s2 = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null, null, ConnectSchema s2 = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null, null,
Arrays.asList(new Field("field", 0, SchemaBuilder.int8().build()), List.of(new Field("field", 0, SchemaBuilder.int8().build()),
new Field("field2", 1, SchemaBuilder.int16().build())), null, null); new Field("field2", 1, SchemaBuilder.int16().build())), null, null);
ConnectSchema differentField = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null, null, ConnectSchema differentField = new ConnectSchema(Schema.Type.STRUCT, false, null, null, null, null, null,
Arrays.asList(new Field("field", 0, SchemaBuilder.int8().build()), List.of(new Field("field", 0, SchemaBuilder.int8().build()),
new Field("different field name", 1, SchemaBuilder.int16().build())), null, null); new Field("different field name", 1, SchemaBuilder.int16().build())), null, null);
assertEquals(s1, s2); assertEquals(s1, s2);
@ -365,44 +366,44 @@ public class ConnectSchemaTest {
// Optional element schema // Optional element schema
Schema optionalStrings = SchemaBuilder.array(Schema.OPTIONAL_STRING_SCHEMA); Schema optionalStrings = SchemaBuilder.array(Schema.OPTIONAL_STRING_SCHEMA);
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.emptyList()); ConnectSchema.validateValue(fieldName, optionalStrings, List.of());
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonList("hello")); ConnectSchema.validateValue(fieldName, optionalStrings, List.of("hello"));
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonList(null)); ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonList(null));
ConnectSchema.validateValue(fieldName, optionalStrings, Arrays.asList("hello", "world")); ConnectSchema.validateValue(fieldName, optionalStrings, List.of("hello", "world"));
ConnectSchema.validateValue(fieldName, optionalStrings, Arrays.asList("hello", null)); ConnectSchema.validateValue(fieldName, optionalStrings, Arrays.asList("hello", null));
ConnectSchema.validateValue(fieldName, optionalStrings, Arrays.asList(null, "world")); ConnectSchema.validateValue(fieldName, optionalStrings, Arrays.asList(null, "world"));
assertInvalidValueForSchema(fieldName, optionalStrings, Collections.singletonList(true), assertInvalidValueForSchema(fieldName, optionalStrings, List.of(true),
"Invalid Java object for schema with type STRING: class java.lang.Boolean for element of array field: \"field\""); "Invalid Java object for schema with type STRING: class java.lang.Boolean for element of array field: \"field\"");
// Required element schema // Required element schema
Schema requiredStrings = SchemaBuilder.array(Schema.STRING_SCHEMA); Schema requiredStrings = SchemaBuilder.array(Schema.STRING_SCHEMA);
ConnectSchema.validateValue(fieldName, requiredStrings, Collections.emptyList()); ConnectSchema.validateValue(fieldName, requiredStrings, List.of());
ConnectSchema.validateValue(fieldName, requiredStrings, Collections.singletonList("hello")); ConnectSchema.validateValue(fieldName, requiredStrings, List.of("hello"));
assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonList(null), assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonList(null),
"Invalid value: null used for required element of array field: \"field\", schema type: STRING"); "Invalid value: null used for required element of array field: \"field\", schema type: STRING");
ConnectSchema.validateValue(fieldName, requiredStrings, Arrays.asList("hello", "world")); ConnectSchema.validateValue(fieldName, requiredStrings, List.of("hello", "world"));
assertInvalidValueForSchema(fieldName, requiredStrings, Arrays.asList("hello", null), assertInvalidValueForSchema(fieldName, requiredStrings, Arrays.asList("hello", null),
"Invalid value: null used for required element of array field: \"field\", schema type: STRING"); "Invalid value: null used for required element of array field: \"field\", schema type: STRING");
assertInvalidValueForSchema(fieldName, requiredStrings, Arrays.asList(null, "world"), assertInvalidValueForSchema(fieldName, requiredStrings, Arrays.asList(null, "world"),
"Invalid value: null used for required element of array field: \"field\", schema type: STRING"); "Invalid value: null used for required element of array field: \"field\", schema type: STRING");
assertInvalidValueForSchema(fieldName, optionalStrings, Collections.singletonList(true), assertInvalidValueForSchema(fieldName, optionalStrings, List.of(true),
"Invalid Java object for schema with type STRING: class java.lang.Boolean for element of array field: \"field\""); "Invalid Java object for schema with type STRING: class java.lang.Boolean for element of array field: \"field\"");
// Null element schema // Null element schema
Schema nullElements = SchemaBuilder.type(Schema.Type.ARRAY); Schema nullElements = SchemaBuilder.type(Schema.Type.ARRAY);
assertInvalidValueForSchema(fieldName, nullElements, Collections.emptyList(), assertInvalidValueForSchema(fieldName, nullElements, List.of(),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
assertInvalidValueForSchema(fieldName, nullElements, Collections.singletonList("hello"), assertInvalidValueForSchema(fieldName, nullElements, List.of("hello"),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
assertInvalidValueForSchema(fieldName, nullElements, Collections.singletonList(null), assertInvalidValueForSchema(fieldName, nullElements, Collections.singletonList(null),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
assertInvalidValueForSchema(fieldName, nullElements, Arrays.asList("hello", "world"), assertInvalidValueForSchema(fieldName, nullElements, List.of("hello", "world"),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
assertInvalidValueForSchema(fieldName, nullElements, Arrays.asList("hello", null), assertInvalidValueForSchema(fieldName, nullElements, Arrays.asList("hello", null),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
assertInvalidValueForSchema(fieldName, nullElements, Arrays.asList(null, "world"), assertInvalidValueForSchema(fieldName, nullElements, Arrays.asList(null, "world"),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
assertInvalidValueForSchema(fieldName, nullElements, Collections.singletonList(true), assertInvalidValueForSchema(fieldName, nullElements, List.of(true),
"No schema defined for element of array field: \"field\""); "No schema defined for element of array field: \"field\"");
} }
@ -412,36 +413,36 @@ public class ConnectSchemaTest {
// Optional element schema // Optional element schema
Schema optionalStrings = SchemaBuilder.map(Schema.OPTIONAL_STRING_SCHEMA, Schema.OPTIONAL_STRING_SCHEMA); Schema optionalStrings = SchemaBuilder.map(Schema.OPTIONAL_STRING_SCHEMA, Schema.OPTIONAL_STRING_SCHEMA);
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.emptyMap()); ConnectSchema.validateValue(fieldName, optionalStrings, Map.of());
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap("key", "value")); ConnectSchema.validateValue(fieldName, optionalStrings, Map.of("key", "value"));
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap("key", null)); ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap("key", null));
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap(null, "value")); ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap(null, "value"));
ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap(null, null)); ConnectSchema.validateValue(fieldName, optionalStrings, Collections.singletonMap(null, null));
assertInvalidValueForSchema(fieldName, optionalStrings, Collections.singletonMap("key", true), assertInvalidValueForSchema(fieldName, optionalStrings, Map.of("key", true),
"Invalid Java object for schema with type STRING: class java.lang.Boolean for value of map field: \"field\""); "Invalid Java object for schema with type STRING: class java.lang.Boolean for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, optionalStrings, Collections.singletonMap(true, "value"), assertInvalidValueForSchema(fieldName, optionalStrings, Map.of(true, "value"),
"Invalid Java object for schema with type STRING: class java.lang.Boolean for key of map field: \"field\""); "Invalid Java object for schema with type STRING: class java.lang.Boolean for key of map field: \"field\"");
// Required element schema // Required element schema
Schema requiredStrings = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA); Schema requiredStrings = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA);
ConnectSchema.validateValue(fieldName, requiredStrings, Collections.emptyMap()); ConnectSchema.validateValue(fieldName, requiredStrings, Map.of());
ConnectSchema.validateValue(fieldName, requiredStrings, Collections.singletonMap("key", "value")); ConnectSchema.validateValue(fieldName, requiredStrings, Map.of("key", "value"));
assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap("key", null), assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap("key", null),
"Invalid value: null used for required value of map field: \"field\", schema type: STRING"); "Invalid value: null used for required value of map field: \"field\", schema type: STRING");
assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap(null, "value"), assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap(null, "value"),
"Invalid value: null used for required key of map field: \"field\", schema type: STRING"); "Invalid value: null used for required key of map field: \"field\", schema type: STRING");
assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap(null, null), assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap(null, null),
"Invalid value: null used for required key of map field: \"field\", schema type: STRING"); "Invalid value: null used for required key of map field: \"field\", schema type: STRING");
assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap("key", true), assertInvalidValueForSchema(fieldName, requiredStrings, Map.of("key", true),
"Invalid Java object for schema with type STRING: class java.lang.Boolean for value of map field: \"field\""); "Invalid Java object for schema with type STRING: class java.lang.Boolean for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, requiredStrings, Collections.singletonMap(true, "value"), assertInvalidValueForSchema(fieldName, requiredStrings, Map.of(true, "value"),
"Invalid Java object for schema with type STRING: class java.lang.Boolean for key of map field: \"field\""); "Invalid Java object for schema with type STRING: class java.lang.Boolean for key of map field: \"field\"");
// Null key schema // Null key schema
Schema nullKeys = SchemaBuilder.type(Schema.Type.MAP); Schema nullKeys = SchemaBuilder.type(Schema.Type.MAP);
assertInvalidValueForSchema(fieldName, nullKeys, Collections.emptyMap(), assertInvalidValueForSchema(fieldName, nullKeys, Map.of(),
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap("key", "value"), assertInvalidValueForSchema(fieldName, nullKeys, Map.of("key", "value"),
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap("key", null), assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap("key", null),
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
@ -449,16 +450,16 @@ public class ConnectSchemaTest {
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap(null, null), assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap(null, null),
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap("key", true), assertInvalidValueForSchema(fieldName, nullKeys, Map.of("key", true),
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullKeys, Collections.singletonMap(true, "value"), assertInvalidValueForSchema(fieldName, nullKeys, Map.of(true, "value"),
"No schema defined for key of map field: \"field\""); "No schema defined for key of map field: \"field\"");
// Null value schema // Null value schema
Schema nullValues = SchemaBuilder.mapWithNullValues(Schema.OPTIONAL_STRING_SCHEMA); Schema nullValues = SchemaBuilder.mapWithNullValues(Schema.OPTIONAL_STRING_SCHEMA);
assertInvalidValueForSchema(fieldName, nullValues, Collections.emptyMap(), assertInvalidValueForSchema(fieldName, nullValues, Map.of(),
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap("key", "value"), assertInvalidValueForSchema(fieldName, nullValues, Map.of("key", "value"),
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap("key", null), assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap("key", null),
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
@ -466,9 +467,9 @@ public class ConnectSchemaTest {
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap(null, null), assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap(null, null),
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap("key", true), assertInvalidValueForSchema(fieldName, nullValues, Map.of("key", true),
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
assertInvalidValueForSchema(fieldName, nullValues, Collections.singletonMap(true, "value"), assertInvalidValueForSchema(fieldName, nullValues, Map.of(true, "value"),
"No schema defined for value of map field: \"field\""); "No schema defined for value of map field: \"field\"");
} }
} }

View File

@ -20,7 +20,7 @@ import org.junit.jupiter.api.Test;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Collections; import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -36,7 +36,7 @@ public class DecimalTest {
public void testBuilder() { public void testBuilder() {
Schema plain = Decimal.builder(2).build(); Schema plain = Decimal.builder(2).build();
assertEquals(Decimal.LOGICAL_NAME, plain.name()); assertEquals(Decimal.LOGICAL_NAME, plain.name());
assertEquals(Collections.singletonMap(Decimal.SCALE_FIELD, "2"), plain.parameters()); assertEquals(Map.of(Decimal.SCALE_FIELD, "2"), plain.parameters());
assertEquals(1, (Object) plain.version()); assertEquals(1, (Object) plain.version());
} }

View File

@ -21,8 +21,6 @@ import org.apache.kafka.connect.errors.SchemaBuilderException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -234,14 +232,14 @@ public class SchemaBuilderTest {
public void testArrayBuilder() { public void testArrayBuilder() {
Schema schema = SchemaBuilder.array(Schema.INT8_SCHEMA).build(); Schema schema = SchemaBuilder.array(Schema.INT8_SCHEMA).build();
assertTypeAndDefault(schema, Schema.Type.ARRAY, false, null); assertTypeAndDefault(schema, Schema.Type.ARRAY, false, null);
assertEquals(schema.valueSchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema); assertNoMetadata(schema);
// Default value // Default value
List<Byte> defArray = Arrays.asList((byte) 1, (byte) 2); List<Byte> defArray = List.of((byte) 1, (byte) 2);
schema = SchemaBuilder.array(Schema.INT8_SCHEMA).defaultValue(defArray).build(); schema = SchemaBuilder.array(Schema.INT8_SCHEMA).defaultValue(defArray).build();
assertTypeAndDefault(schema, Schema.Type.ARRAY, false, defArray); assertTypeAndDefault(schema, Schema.Type.ARRAY, false, defArray);
assertEquals(schema.valueSchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema); assertNoMetadata(schema);
} }
@ -249,7 +247,7 @@ public class SchemaBuilderTest {
public void testArrayBuilderInvalidDefault() { public void testArrayBuilderInvalidDefault() {
// Array, but wrong embedded type // Array, but wrong embedded type
assertThrows(SchemaBuilderException.class, assertThrows(SchemaBuilderException.class,
() -> SchemaBuilder.array(Schema.INT8_SCHEMA).defaultValue(Collections.singletonList("string")).build()); () -> SchemaBuilder.array(Schema.INT8_SCHEMA).defaultValue(List.of("string")).build());
} }
@Test @Test
@ -257,30 +255,30 @@ public class SchemaBuilderTest {
// SchemaBuilder should also pass the check // SchemaBuilder should also pass the check
Schema schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA); Schema schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA);
assertTypeAndDefault(schema, Schema.Type.MAP, false, null); assertTypeAndDefault(schema, Schema.Type.MAP, false, null);
assertEquals(schema.keySchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.keySchema());
assertEquals(schema.valueSchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema); assertNoMetadata(schema);
schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA).build(); schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA).build();
assertTypeAndDefault(schema, Schema.Type.MAP, false, null); assertTypeAndDefault(schema, Schema.Type.MAP, false, null);
assertEquals(schema.keySchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.keySchema());
assertEquals(schema.valueSchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema); assertNoMetadata(schema);
// Default value // Default value
Map<Byte, Byte> defMap = Collections.singletonMap((byte) 5, (byte) 10); Map<Byte, Byte> defMap = Map.of((byte) 5, (byte) 10);
schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA) schema = SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA)
.defaultValue(defMap).build(); .defaultValue(defMap).build();
assertTypeAndDefault(schema, Schema.Type.MAP, false, defMap); assertTypeAndDefault(schema, Schema.Type.MAP, false, defMap);
assertEquals(schema.keySchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.keySchema());
assertEquals(schema.valueSchema(), Schema.INT8_SCHEMA); assertEquals(Schema.INT8_SCHEMA, schema.valueSchema());
assertNoMetadata(schema); assertNoMetadata(schema);
} }
@Test @Test
public void testMapBuilderInvalidDefault() { public void testMapBuilderInvalidDefault() {
// Map, but wrong embedded type // Map, but wrong embedded type
Map<Byte, String> defMap = Collections.singletonMap((byte) 5, "foo"); Map<Byte, String> defMap = Map.of((byte) 5, "foo");
assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA) assertThrows(SchemaBuilderException.class, () -> SchemaBuilder.map(Schema.INT8_SCHEMA, Schema.INT8_SCHEMA)
.defaultValue(defMap).build()); .defaultValue(defMap).build());
} }

View File

@ -24,8 +24,6 @@ import org.junit.jupiter.api.Test;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -78,12 +76,12 @@ public class SchemaProjectorTest {
Object[] values = {(byte) 127, (short) 255, 32767, 327890L, 1.2F, 1.2345}; Object[] values = {(byte) 127, (short) 255, 32767, 327890L, 1.2F, 1.2345};
Map<Object, List<?>> expectedProjected = new HashMap<>(); Map<Object, List<?>> expectedProjected = new HashMap<>();
expectedProjected.put(values[0], Arrays.asList((byte) 127, (short) 127, 127, 127L, 127.F, 127.)); expectedProjected.put(values[0], List.of((byte) 127, (short) 127, 127, 127L, 127.F, 127.));
expectedProjected.put(values[1], Arrays.asList((short) 255, 255, 255L, 255.F, 255.)); expectedProjected.put(values[1], List.of((short) 255, 255, 255L, 255.F, 255.));
expectedProjected.put(values[2], Arrays.asList(32767, 32767L, 32767.F, 32767.)); expectedProjected.put(values[2], List.of(32767, 32767L, 32767.F, 32767.));
expectedProjected.put(values[3], Arrays.asList(327890L, 327890.F, 327890.)); expectedProjected.put(values[3], List.of(327890L, 327890.F, 327890.));
expectedProjected.put(values[4], Arrays.asList(1.2F, 1.2)); expectedProjected.put(values[4], List.of(1.2F, 1.2));
expectedProjected.put(values[5], Collections.singletonList(1.2345)); expectedProjected.put(values[5], List.of(1.2345));
Object promoted; Object promoted;
for (int i = 0; i < promotableSchemas.length; ++i) { for (int i = 0; i < promotableSchemas.length; ++i) {
@ -298,16 +296,16 @@ public class SchemaProjectorTest {
Struct sourceNestedStruct = new Struct(sourceNestedSchema); Struct sourceNestedStruct = new Struct(sourceNestedSchema);
sourceNestedStruct.put("first", 1); sourceNestedStruct.put("first", 1);
sourceNestedStruct.put("second", "abc"); sourceNestedStruct.put("second", "abc");
sourceNestedStruct.put("array", Arrays.asList(1, 2)); sourceNestedStruct.put("array", List.of(1, 2));
sourceNestedStruct.put("map", Collections.singletonMap(5, "def")); sourceNestedStruct.put("map", Map.of(5, "def"));
sourceNestedStruct.put("nested", sourceFlatStruct); sourceNestedStruct.put("nested", sourceFlatStruct);
Struct targetNestedStruct = (Struct) SchemaProjector.project(sourceNestedSchema, sourceNestedStruct, Struct targetNestedStruct = (Struct) SchemaProjector.project(sourceNestedSchema, sourceNestedStruct,
targetNestedSchema); targetNestedSchema);
assertEquals(1, targetNestedStruct.get("first")); assertEquals(1, targetNestedStruct.get("first"));
assertEquals("abc", targetNestedStruct.get("second")); assertEquals("abc", targetNestedStruct.get("second"));
assertEquals(Arrays.asList(1, 2), targetNestedStruct.get("array")); assertEquals(List.of(1, 2), targetNestedStruct.get("array"));
assertEquals(Collections.singletonMap(5, "def"), targetNestedStruct.get("map")); assertEquals(Map.of(5, "def"), targetNestedStruct.get("map"));
Struct projectedStruct = (Struct) targetNestedStruct.get("nested"); Struct projectedStruct = (Struct) targetNestedStruct.get("nested");
assertEquals(113, projectedStruct.get("field")); assertEquals(113, projectedStruct.get("field"));
@ -360,22 +358,22 @@ public class SchemaProjectorTest {
public void testArrayProjection() { public void testArrayProjection() {
Schema source = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); Schema source = SchemaBuilder.array(Schema.INT32_SCHEMA).build();
Object projected = SchemaProjector.project(source, Arrays.asList(1, 2, 3), source); Object projected = SchemaProjector.project(source, List.of(1, 2, 3), source);
assertEquals(Arrays.asList(1, 2, 3), projected); assertEquals(List.of(1, 2, 3), projected);
Schema optionalSource = SchemaBuilder.array(Schema.INT32_SCHEMA).optional().build(); Schema optionalSource = SchemaBuilder.array(Schema.INT32_SCHEMA).optional().build();
Schema target = SchemaBuilder.array(Schema.INT32_SCHEMA).defaultValue(Arrays.asList(1, 2, 3)).build(); Schema target = SchemaBuilder.array(Schema.INT32_SCHEMA).defaultValue(List.of(1, 2, 3)).build();
projected = SchemaProjector.project(optionalSource, Arrays.asList(4, 5), target); projected = SchemaProjector.project(optionalSource, List.of(4, 5), target);
assertEquals(Arrays.asList(4, 5), projected); assertEquals(List.of(4, 5), projected);
projected = SchemaProjector.project(optionalSource, null, target); projected = SchemaProjector.project(optionalSource, null, target);
assertEquals(Arrays.asList(1, 2, 3), projected); assertEquals(List.of(1, 2, 3), projected);
Schema promotedTarget = SchemaBuilder.array(Schema.INT64_SCHEMA).defaultValue(Arrays.asList(1L, 2L, 3L)).build(); Schema promotedTarget = SchemaBuilder.array(Schema.INT64_SCHEMA).defaultValue(List.of(1L, 2L, 3L)).build();
projected = SchemaProjector.project(optionalSource, Arrays.asList(4, 5), promotedTarget); projected = SchemaProjector.project(optionalSource, List.of(4, 5), promotedTarget);
List<Long> expectedProjected = Arrays.asList(4L, 5L); List<Long> expectedProjected = List.of(4L, 5L);
assertEquals(expectedProjected, projected); assertEquals(expectedProjected, projected);
projected = SchemaProjector.project(optionalSource, null, promotedTarget); projected = SchemaProjector.project(optionalSource, null, promotedTarget);
assertEquals(Arrays.asList(1L, 2L, 3L), projected); assertEquals(List.of(1L, 2L, 3L), projected);
Schema noDefaultValueTarget = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); Schema noDefaultValueTarget = SchemaBuilder.array(Schema.INT32_SCHEMA).build();
assertThrows(SchemaProjectorException.class, () -> SchemaProjector.project(optionalSource, null, assertThrows(SchemaProjectorException.class, () -> SchemaProjector.project(optionalSource, null,
@ -391,18 +389,18 @@ public class SchemaProjectorTest {
public void testMapProjection() { public void testMapProjection() {
Schema source = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).optional().build(); Schema source = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).optional().build();
Schema target = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).defaultValue(Collections.singletonMap(1, 2)).build(); Schema target = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).defaultValue(Map.of(1, 2)).build();
Object projected = SchemaProjector.project(source, Collections.singletonMap(3, 4), target); Object projected = SchemaProjector.project(source, Map.of(3, 4), target);
assertEquals(Collections.singletonMap(3, 4), projected); assertEquals(Map.of(3, 4), projected);
projected = SchemaProjector.project(source, null, target); projected = SchemaProjector.project(source, null, target);
assertEquals(Collections.singletonMap(1, 2), projected); assertEquals(Map.of(1, 2), projected);
Schema promotedTarget = SchemaBuilder.map(Schema.INT64_SCHEMA, Schema.FLOAT32_SCHEMA).defaultValue( Schema promotedTarget = SchemaBuilder.map(Schema.INT64_SCHEMA, Schema.FLOAT32_SCHEMA).defaultValue(
Collections.singletonMap(3L, 4.5F)).build(); Map.of(3L, 4.5F)).build();
projected = SchemaProjector.project(source, Collections.singletonMap(3, 4), promotedTarget); projected = SchemaProjector.project(source, Map.of(3, 4), promotedTarget);
assertEquals(Collections.singletonMap(3L, 4.F), projected); assertEquals(Map.of(3L, 4.F), projected);
projected = SchemaProjector.project(source, null, promotedTarget); projected = SchemaProjector.project(source, null, promotedTarget);
assertEquals(Collections.singletonMap(3L, 4.5F), projected); assertEquals(Map.of(3L, 4.5F), projected);
Schema noDefaultValueTarget = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build(); Schema noDefaultValueTarget = SchemaBuilder.map(Schema.INT32_SCHEMA, Schema.INT32_SCHEMA).build();
assertThrows(SchemaProjectorException.class, assertThrows(SchemaProjectorException.class,
@ -424,7 +422,7 @@ public class SchemaProjectorTest {
() -> SchemaProjector.project(source, 12, target), () -> SchemaProjector.project(source, 12, target),
"Source name and target name mismatch."); "Source name and target name mismatch.");
Schema targetWithParameters = SchemaBuilder.int32().parameters(Collections.singletonMap("key", "value")); Schema targetWithParameters = SchemaBuilder.int32().parameters(Map.of("key", "value"));
assertThrows(SchemaProjectorException.class, assertThrows(SchemaProjectorException.class,
() -> SchemaProjector.project(source, 34, targetWithParameters), () -> SchemaProjector.project(source, 34, targetWithParameters),
"Source parameters and target parameters mismatch."); "Source parameters and target parameters mismatch.");

View File

@ -21,8 +21,6 @@ import org.apache.kafka.connect.errors.DataException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -93,8 +91,8 @@ public class StructTest {
@Test @Test
public void testComplexStruct() { public void testComplexStruct() {
List<Byte> array = Arrays.asList((byte) 1, (byte) 2); List<Byte> array = List.of((byte) 1, (byte) 2);
Map<Integer, String> map = Collections.singletonMap(1, "string"); Map<Integer, String> map = Map.of(1, "string");
Struct struct = new Struct(NESTED_SCHEMA) Struct struct = new Struct(NESTED_SCHEMA)
.put("array", array) .put("array", array)
.put("map", map) .put("map", map)
@ -124,13 +122,13 @@ public class StructTest {
@Test @Test
public void testInvalidArrayFieldElements() { public void testInvalidArrayFieldElements() {
assertThrows(DataException.class, assertThrows(DataException.class,
() -> new Struct(NESTED_SCHEMA).put("array", Collections.singletonList("should fail since elements should be int8s"))); () -> new Struct(NESTED_SCHEMA).put("array", List.of("should fail since elements should be int8s")));
} }
@Test @Test
public void testInvalidMapKeyElements() { public void testInvalidMapKeyElements() {
assertThrows(DataException.class, assertThrows(DataException.class,
() -> new Struct(NESTED_SCHEMA).put("map", Collections.singletonMap("should fail because keys should be int8s", (byte) 12))); () -> new Struct(NESTED_SCHEMA).put("map", Map.of("should fail because keys should be int8s", (byte) 12)));
} }
@Test @Test
@ -219,20 +217,20 @@ public class StructTest {
assertEquals(struct1, struct2); assertEquals(struct1, struct2);
assertNotEquals(struct1, struct3); assertNotEquals(struct1, struct3);
List<Byte> array = Arrays.asList((byte) 1, (byte) 2); List<Byte> array = List.of((byte) 1, (byte) 2);
Map<Integer, String> map = Collections.singletonMap(1, "string"); Map<Integer, String> map = Map.of(1, "string");
struct1 = new Struct(NESTED_SCHEMA) struct1 = new Struct(NESTED_SCHEMA)
.put("array", array) .put("array", array)
.put("map", map) .put("map", map)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12)); .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
List<Byte> array2 = Arrays.asList((byte) 1, (byte) 2); List<Byte> array2 = List.of((byte) 1, (byte) 2);
Map<Integer, String> map2 = Collections.singletonMap(1, "string"); Map<Integer, String> map2 = Map.of(1, "string");
struct2 = new Struct(NESTED_SCHEMA) struct2 = new Struct(NESTED_SCHEMA)
.put("array", array2) .put("array", array2)
.put("map", map2) .put("map", map2)
.put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12)); .put("nested", new Struct(NESTED_CHILD_SCHEMA).put("int8", (byte) 12));
List<Byte> array3 = Arrays.asList((byte) 1, (byte) 2, (byte) 3); List<Byte> array3 = List.of((byte) 1, (byte) 2, (byte) 3);
Map<Integer, String> map3 = Collections.singletonMap(2, "string"); Map<Integer, String> map3 = Map.of(2, "string");
struct3 = new Struct(NESTED_SCHEMA) struct3 = new Struct(NESTED_SCHEMA)
.put("array", array3) .put("array", array3)
.put("map", map3) .put("map", map3)

View File

@ -38,7 +38,6 @@ import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.time.temporal.ChronoUnit; import java.time.temporal.ChronoUnit;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
@ -184,7 +183,7 @@ public class ValuesTest {
SchemaAndValue schemaAndValue = Values.parseString("[true, false]"); SchemaAndValue schemaAndValue = Values.parseString("[true, false]");
assertEquals(Type.ARRAY, schemaAndValue.schema().type()); assertEquals(Type.ARRAY, schemaAndValue.schema().type());
assertEquals(Type.BOOLEAN, schemaAndValue.schema().valueSchema().type()); assertEquals(Type.BOOLEAN, schemaAndValue.schema().valueSchema().type());
assertEquals(Arrays.asList(true, false), schemaAndValue.value()); assertEquals(List.of(true, false), schemaAndValue.value());
} }
@Test @Test
@ -217,14 +216,14 @@ public class ValuesTest {
public void shouldParseEmptyMap() { public void shouldParseEmptyMap() {
SchemaAndValue schemaAndValue = Values.parseString("{}"); SchemaAndValue schemaAndValue = Values.parseString("{}");
assertEquals(Type.MAP, schemaAndValue.schema().type()); assertEquals(Type.MAP, schemaAndValue.schema().type());
assertEquals(Collections.emptyMap(), schemaAndValue.value()); assertEquals(Map.of(), schemaAndValue.value());
} }
@Test @Test
public void shouldParseEmptyArray() { public void shouldParseEmptyArray() {
SchemaAndValue schemaAndValue = Values.parseString("[]"); SchemaAndValue schemaAndValue = Values.parseString("[]");
assertEquals(Type.ARRAY, schemaAndValue.schema().type()); assertEquals(Type.ARRAY, schemaAndValue.schema().type());
assertEquals(Collections.emptyList(), schemaAndValue.value()); assertEquals(List.of(), schemaAndValue.value());
} }
@Test @Test
@ -468,16 +467,16 @@ public class ValuesTest {
@Test @Test
public void shouldParseStringListWithMultipleElementTypes() { public void shouldParseStringListWithMultipleElementTypes() {
assertParseStringArrayWithNoSchema( assertParseStringArrayWithNoSchema(
Arrays.asList((byte) 1, (byte) 2, (short) 300, "four"), List.of((byte) 1, (byte) 2, (short) 300, "four"),
"[1, 2, 300, \"four\"]"); "[1, 2, 300, \"four\"]");
assertParseStringArrayWithNoSchema( assertParseStringArrayWithNoSchema(
Arrays.asList((byte) 2, (short) 300, "four", (byte) 1), List.of((byte) 2, (short) 300, "four", (byte) 1),
"[2, 300, \"four\", 1]"); "[2, 300, \"four\", 1]");
assertParseStringArrayWithNoSchema( assertParseStringArrayWithNoSchema(
Arrays.asList((short) 300, "four", (byte) 1, (byte) 2), List.of((short) 300, "four", (byte) 1, (byte) 2),
"[300, \"four\", 1, 2]"); "[300, \"four\", 1, 2]");
assertParseStringArrayWithNoSchema( assertParseStringArrayWithNoSchema(
Arrays.asList("four", (byte) 1, (byte) 2, (short) 300), List.of("four", (byte) 1, (byte) 2, (short) 300),
"[\"four\", 1, 2, 300]"); "[\"four\", 1, 2, 300]");
} }
@ -648,7 +647,7 @@ public class ValuesTest {
assertEquals(Type.INT32, elementSchema.type()); assertEquals(Type.INT32, elementSchema.type());
assertEquals(Date.LOGICAL_NAME, elementSchema.name()); assertEquals(Date.LOGICAL_NAME, elementSchema.name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_DATE_FORMAT_PATTERN).parse(dateStr); java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_DATE_FORMAT_PATTERN).parse(dateStr);
assertEquals(Collections.singletonList(expected), result.value()); assertEquals(List.of(expected), result.value());
} }
@Test @Test
@ -661,7 +660,7 @@ public class ValuesTest {
assertEquals(Type.INT32, elementSchema.type()); assertEquals(Type.INT32, elementSchema.type());
assertEquals(Time.LOGICAL_NAME, elementSchema.name()); assertEquals(Time.LOGICAL_NAME, elementSchema.name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(timeStr); java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(timeStr);
assertEquals(Collections.singletonList(expected), result.value()); assertEquals(List.of(expected), result.value());
} }
@Test @Test
@ -674,7 +673,7 @@ public class ValuesTest {
assertEquals(Type.INT64, elementSchema.type()); assertEquals(Type.INT64, elementSchema.type());
assertEquals(Timestamp.LOGICAL_NAME, elementSchema.name()); assertEquals(Timestamp.LOGICAL_NAME, elementSchema.name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr); java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr);
assertEquals(Collections.singletonList(expected), result.value()); assertEquals(List.of(expected), result.value());
} }
@Test @Test
@ -691,7 +690,7 @@ public class ValuesTest {
java.util.Date expected1 = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr1); java.util.Date expected1 = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr1);
java.util.Date expected2 = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr2); java.util.Date expected2 = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr2);
java.util.Date expected3 = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr3); java.util.Date expected3 = new SimpleDateFormat(Values.ISO_8601_TIMESTAMP_FORMAT_PATTERN).parse(tsStr3);
assertEquals(Arrays.asList(expected1, expected2, expected3), result.value()); assertEquals(List.of(expected1, expected2, expected3), result.value());
} }
@Test @Test
@ -707,7 +706,7 @@ public class ValuesTest {
assertEquals(Type.INT32, valueSchema.type()); assertEquals(Type.INT32, valueSchema.type());
assertEquals(Time.LOGICAL_NAME, valueSchema.name()); assertEquals(Time.LOGICAL_NAME, valueSchema.name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(timeStr); java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(timeStr);
assertEquals(Collections.singletonMap(keyStr, expected), result.value()); assertEquals(Map.of(keyStr, expected), result.value());
} }
@Test @Test
@ -723,7 +722,7 @@ public class ValuesTest {
assertEquals(Type.INT32, valueSchema.type()); assertEquals(Type.INT32, valueSchema.type());
assertEquals(Time.LOGICAL_NAME, valueSchema.name()); assertEquals(Time.LOGICAL_NAME, valueSchema.name());
java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(timeStr); java.util.Date expected = new SimpleDateFormat(Values.ISO_8601_TIME_FORMAT_PATTERN).parse(timeStr);
assertEquals(Collections.singletonMap(keyStr, expected), result.value()); assertEquals(Map.of(keyStr, expected), result.value());
} }
@Test @Test
@ -991,25 +990,25 @@ public class ValuesTest {
@Test @Test
public void shouldInferNoSchemaForEmptyList() { public void shouldInferNoSchemaForEmptyList() {
Schema listSchema = Values.inferSchema(Collections.emptyList()); Schema listSchema = Values.inferSchema(List.of());
assertNull(listSchema); assertNull(listSchema);
} }
@Test @Test
public void shouldInferNoSchemaForListContainingObject() { public void shouldInferNoSchemaForListContainingObject() {
Schema listSchema = Values.inferSchema(Collections.singletonList(new Object())); Schema listSchema = Values.inferSchema(List.of(new Object()));
assertNull(listSchema); assertNull(listSchema);
} }
@Test @Test
public void shouldInferNoSchemaForEmptyMap() { public void shouldInferNoSchemaForEmptyMap() {
Schema listSchema = Values.inferSchema(Collections.emptyMap()); Schema listSchema = Values.inferSchema(Map.of());
assertNull(listSchema); assertNull(listSchema);
} }
@Test @Test
public void shouldInferNoSchemaForMapContainingObject() { public void shouldInferNoSchemaForMapContainingObject() {
Schema listSchema = Values.inferSchema(Collections.singletonMap(new Object(), new Object())); Schema listSchema = Values.inferSchema(Map.of(new Object(), new Object()));
assertNull(listSchema); assertNull(listSchema);
} }
@ -1019,7 +1018,7 @@ public class ValuesTest {
*/ */
@Test @Test
public void shouldNotConvertArrayValuesToDecimal() { public void shouldNotConvertArrayValuesToDecimal() {
List<Object> decimals = Arrays.asList("\"1.0\"", BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE), List<Object> decimals = List.of("\"1.0\"", BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE),
BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE), (byte) 1, (byte) 1); BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE), (byte) 1, (byte) 1);
List<Object> expected = new ArrayList<>(decimals); // most values are directly reproduced with the same type List<Object> expected = new ArrayList<>(decimals); // most values are directly reproduced with the same type
expected.set(0, "1.0"); // The quotes are parsed away, but the value remains a string expected.set(0, "1.0"); // The quotes are parsed away, but the value remains a string
@ -1032,7 +1031,7 @@ public class ValuesTest {
@Test @Test
public void shouldParseArrayOfOnlyDecimals() { public void shouldParseArrayOfOnlyDecimals() {
List<Object> decimals = Arrays.asList(BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE), List<Object> decimals = List.of(BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE),
BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE)); BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE));
SchemaAndValue schemaAndValue = Values.parseString(decimals.toString()); SchemaAndValue schemaAndValue = Values.parseString(decimals.toString());
Schema schema = schemaAndValue.schema(); Schema schema = schemaAndValue.schema();

View File

@ -37,10 +37,11 @@ import java.math.RoundingMode;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Calendar; import java.util.Calendar;
import java.util.Collections;
import java.util.GregorianCalendar; import java.util.GregorianCalendar;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TimeZone; import java.util.TimeZone;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -380,9 +381,9 @@ public class ConnectHeadersTest {
assertSchemaMatches(Schema.FLOAT64_SCHEMA, 1.0d); assertSchemaMatches(Schema.FLOAT64_SCHEMA, 1.0d);
assertSchemaMatches(Schema.STRING_SCHEMA, "value"); assertSchemaMatches(Schema.STRING_SCHEMA, "value");
assertSchemaMatches(SchemaBuilder.array(Schema.STRING_SCHEMA), new ArrayList<String>()); assertSchemaMatches(SchemaBuilder.array(Schema.STRING_SCHEMA), new ArrayList<String>());
assertSchemaMatches(SchemaBuilder.array(Schema.STRING_SCHEMA), Collections.singletonList("value")); assertSchemaMatches(SchemaBuilder.array(Schema.STRING_SCHEMA), List.of("value"));
assertSchemaMatches(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA), new HashMap<String, Integer>()); assertSchemaMatches(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA), new HashMap<String, Integer>());
assertSchemaMatches(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA), Collections.singletonMap("a", 0)); assertSchemaMatches(SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA), Map.of("a", 0));
Schema emptyStructSchema = SchemaBuilder.struct(); Schema emptyStructSchema = SchemaBuilder.struct();
assertSchemaMatches(emptyStructSchema, new Struct(emptyStructSchema)); assertSchemaMatches(emptyStructSchema, new Struct(emptyStructSchema));
Schema structSchema = SchemaBuilder.struct().field("foo", Schema.OPTIONAL_BOOLEAN_SCHEMA).field("bar", Schema.STRING_SCHEMA) Schema structSchema = SchemaBuilder.struct().field("foo", Schema.OPTIONAL_BOOLEAN_SCHEMA).field("bar", Schema.STRING_SCHEMA)

View File

@ -25,7 +25,6 @@ import org.apache.kafka.connect.header.Headers;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.Map; import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -38,8 +37,8 @@ import static org.junit.jupiter.api.Assertions.assertTrue;
public class SourceRecordTest { public class SourceRecordTest {
private static final Map<String, ?> SOURCE_PARTITION = Collections.singletonMap("src", "abc"); private static final Map<String, ?> SOURCE_PARTITION = Map.of("src", "abc");
private static final Map<String, ?> SOURCE_OFFSET = Collections.singletonMap("offset", "1"); private static final Map<String, ?> SOURCE_OFFSET = Map.of("offset", "1");
private static final String TOPIC_NAME = "myTopic"; private static final String TOPIC_NAME = "myTopic";
private static final Integer PARTITION_NUMBER = 0; private static final Integer PARTITION_NUMBER = 0;
private static final Long KAFKA_TIMESTAMP = 0L; private static final Long KAFKA_TIMESTAMP = 0L;

View File

@ -24,7 +24,7 @@ import org.apache.kafka.connect.data.SchemaAndValue;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Collections; import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertArrayEquals; import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -58,7 +58,7 @@ public class StringConverterTest {
@Test @Test
public void testToBytesNonUtf8Encoding() { public void testToBytesNonUtf8Encoding() {
converter.configure(Collections.singletonMap("converter.encoding", StandardCharsets.UTF_16.name()), true); converter.configure(Map.of("converter.encoding", StandardCharsets.UTF_16.name()), true);
assertArrayEquals(SAMPLE_STRING.getBytes(StandardCharsets.UTF_16), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING)); assertArrayEquals(SAMPLE_STRING.getBytes(StandardCharsets.UTF_16), converter.fromConnectData(TOPIC, Schema.STRING_SCHEMA, SAMPLE_STRING));
} }
@ -78,7 +78,7 @@ public class StringConverterTest {
@Test @Test
public void testBytesToStringNonUtf8Encoding() { public void testBytesToStringNonUtf8Encoding() {
converter.configure(Collections.singletonMap("converter.encoding", StandardCharsets.UTF_16.name()), true); converter.configure(Map.of("converter.encoding", StandardCharsets.UTF_16.name()), true);
SchemaAndValue data = converter.toConnectData(TOPIC, SAMPLE_STRING.getBytes(StandardCharsets.UTF_16)); SchemaAndValue data = converter.toConnectData(TOPIC, SAMPLE_STRING.getBytes(StandardCharsets.UTF_16));
assertEquals(Schema.OPTIONAL_STRING_SCHEMA, data.schema()); assertEquals(Schema.OPTIONAL_STRING_SCHEMA, data.schema());
assertEquals(SAMPLE_STRING, data.value()); assertEquals(SAMPLE_STRING, data.value());

View File

@ -18,8 +18,6 @@ package org.apache.kafka.connect.util;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -27,37 +25,37 @@ import static org.junit.jupiter.api.Assertions.assertThrows;
public class ConnectorUtilsTest { public class ConnectorUtilsTest {
private static final List<Integer> FIVE_ELEMENTS = Arrays.asList(1, 2, 3, 4, 5); private static final List<Integer> FIVE_ELEMENTS = List.of(1, 2, 3, 4, 5);
@Test @Test
public void testGroupPartitions() { public void testGroupPartitions() {
List<List<Integer>> grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 1); List<List<Integer>> grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 1);
assertEquals(Collections.singletonList(FIVE_ELEMENTS), grouped); assertEquals(List.of(FIVE_ELEMENTS), grouped);
grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 2); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 2);
assertEquals(Arrays.asList(Arrays.asList(1, 2, 3), Arrays.asList(4, 5)), grouped); assertEquals(List.of(List.of(1, 2, 3), List.of(4, 5)), grouped);
grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 3); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 3);
assertEquals(Arrays.asList(Arrays.asList(1, 2), assertEquals(List.of(List.of(1, 2),
Arrays.asList(3, 4), List.of(3, 4),
Collections.singletonList(5)), grouped); List.of(5)), grouped);
grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 5); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 5);
assertEquals(Arrays.asList(Collections.singletonList(1), assertEquals(List.of(List.of(1),
Collections.singletonList(2), List.of(2),
Collections.singletonList(3), List.of(3),
Collections.singletonList(4), List.of(4),
Collections.singletonList(5)), grouped); List.of(5)), grouped);
grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 7); grouped = ConnectorUtils.groupPartitions(FIVE_ELEMENTS, 7);
assertEquals(Arrays.asList(Collections.singletonList(1), assertEquals(List.of(List.of(1),
Collections.singletonList(2), List.of(2),
Collections.singletonList(3), List.of(3),
Collections.singletonList(4), List.of(4),
Collections.singletonList(5), List.of(5),
Collections.emptyList(), List.of(),
Collections.emptyList()), grouped); List.of()), grouped);
} }
@Test @Test

View File

@ -26,7 +26,7 @@ import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor; import org.mockito.ArgumentCaptor;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -107,7 +107,7 @@ public class BasicAuthSecurityRestExtensionTest {
BasicAuthSecurityRestExtension extension = new BasicAuthSecurityRestExtension(configuration); BasicAuthSecurityRestExtension extension = new BasicAuthSecurityRestExtension(configuration);
Exception thrownException = assertThrows(Exception.class, () -> extension.configure(Collections.emptyMap())); Exception thrownException = assertThrows(Exception.class, () -> extension.configure(Map.of()));
assertEquals(jaasConfigurationException, thrownException); assertEquals(jaasConfigurationException, thrownException);
thrownException = assertThrows(Exception.class, () -> extension.register(mock(ConnectRestExtensionContext.class))); thrownException = assertThrows(Exception.class, () -> extension.register(mock(ConnectRestExtensionContext.class)));

View File

@ -32,7 +32,6 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Base64; import java.util.Base64;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -261,8 +260,8 @@ public class JaasBasicAuthFilterTest {
private JaasBasicAuthFilter setupJaasFilter(String name, String credentialFilePath) { private JaasBasicAuthFilter setupJaasFilter(String name, String credentialFilePath) {
TestJaasConfig configuration = new TestJaasConfig(); TestJaasConfig configuration = new TestJaasConfig();
Map<String, Object> moduleOptions = credentialFilePath != null Map<String, Object> moduleOptions = credentialFilePath != null
? Collections.singletonMap("file", credentialFilePath) ? Map.of("file", credentialFilePath)
: Collections.emptyMap(); : Map.of();
configuration.addEntry(name, LOGIN_MODULE, moduleOptions); configuration.addEntry(name, LOGIN_MODULE, moduleOptions);
return new JaasBasicAuthFilter(configuration); return new JaasBasicAuthFilter(configuration);
} }

View File

@ -90,7 +90,7 @@ public class FileStreamSourceTask extends SourceTask {
if (stream == null) { if (stream == null) {
try { try {
stream = Files.newInputStream(Paths.get(filename)); stream = Files.newInputStream(Paths.get(filename));
Map<String, Object> offset = context.offsetStorageReader().offset(Collections.singletonMap(FILENAME_FIELD, filename)); Map<String, Object> offset = context.offsetStorageReader().offset(Map.of(FILENAME_FIELD, filename));
if (offset != null) { if (offset != null) {
Object lastRecordedOffset = offset.get(POSITION_FIELD); Object lastRecordedOffset = offset.get(POSITION_FIELD);
if (lastRecordedOffset != null && !(lastRecordedOffset instanceof Long)) if (lastRecordedOffset != null && !(lastRecordedOffset instanceof Long))

View File

@ -32,9 +32,8 @@ import java.io.PrintStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -63,14 +62,14 @@ public class FileStreamSinkTaskTest {
// We do not call task.start() since it would override the output stream // We do not call task.start() since it would override the output stream
task.put(Collections.singletonList( task.put(List.of(
new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line1", 1) new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line1", 1)
)); ));
offsets.put(new TopicPartition("topic1", 0), new OffsetAndMetadata(1L)); offsets.put(new TopicPartition("topic1", 0), new OffsetAndMetadata(1L));
task.flush(offsets); task.flush(offsets);
assertEquals("line1" + newLine, os.toString()); assertEquals("line1" + newLine, os.toString());
task.put(Arrays.asList( task.put(List.of(
new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line2", 2), new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line2", 2),
new SinkRecord("topic2", 0, null, null, Schema.STRING_SCHEMA, "line3", 1) new SinkRecord("topic2", 0, null, null, Schema.STRING_SCHEMA, "line3", 1)
)); ));
@ -88,7 +87,7 @@ public class FileStreamSinkTaskTest {
task.start(props); task.start(props);
HashMap<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>(); HashMap<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
task.put(Collections.singletonList( task.put(List.of(
new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line0", 1) new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line0", 1)
)); ));
offsets.put(new TopicPartition("topic1", 0), new OffsetAndMetadata(1L)); offsets.put(new TopicPartition("topic1", 0), new OffsetAndMetadata(1L));
@ -99,7 +98,7 @@ public class FileStreamSinkTaskTest {
int i = 0; int i = 0;
try (BufferedReader reader = Files.newBufferedReader(Paths.get(outputFile))) { try (BufferedReader reader = Files.newBufferedReader(Paths.get(outputFile))) {
lines[i++] = reader.readLine(); lines[i++] = reader.readLine();
task.put(Arrays.asList( task.put(List.of(
new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line1", 2), new SinkRecord("topic1", 0, null, null, Schema.STRING_SCHEMA, "line1", 2),
new SinkRecord("topic2", 0, null, null, Schema.STRING_SCHEMA, "line2", 1) new SinkRecord("topic2", 0, null, null, Schema.STRING_SCHEMA, "line2", 1)
)); ));

View File

@ -158,48 +158,48 @@ public class FileStreamSourceConnectorTest {
@Test @Test
public void testAlterOffsetsStdin() { public void testAlterOffsetsStdin() {
sourceProperties.remove(FileStreamSourceConnector.FILE_CONFIG); sourceProperties.remove(FileStreamSourceConnector.FILE_CONFIG);
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
Collections.singletonMap(FILENAME_FIELD, FILENAME), Map.of(FILENAME_FIELD, FILENAME),
Collections.singletonMap(POSITION_FIELD, 0L) Map.of(POSITION_FIELD, 0L)
); );
assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, offsets)); assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, offsets));
} }
@Test @Test
public void testAlterOffsetsIncorrectPartitionKey() { public void testAlterOffsetsIncorrectPartitionKey() {
assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, Map.of(
Collections.singletonMap("other_partition_key", FILENAME), Map.of("other_partition_key", FILENAME),
Collections.singletonMap(POSITION_FIELD, 0L) Map.of(POSITION_FIELD, 0L)
))); )));
// null partitions are invalid // null partitions are invalid
assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, Collections.singletonMap(
null, null,
Collections.singletonMap(POSITION_FIELD, 0L) Map.of(POSITION_FIELD, 0L)
))); )));
} }
@Test @Test
public void testAlterOffsetsMultiplePartitions() { public void testAlterOffsetsMultiplePartitions() {
Map<Map<String, ?>, Map<String, ?>> offsets = new HashMap<>(); Map<Map<String, ?>, Map<String, ?>> offsets = new HashMap<>();
offsets.put(Collections.singletonMap(FILENAME_FIELD, FILENAME), Collections.singletonMap(POSITION_FIELD, 0L)); offsets.put(Map.of(FILENAME_FIELD, FILENAME), Map.of(POSITION_FIELD, 0L));
offsets.put(Collections.singletonMap(FILENAME_FIELD, "/someotherfilename"), null); offsets.put(Collections.singletonMap(FILENAME_FIELD, "/someotherfilename"), null);
assertTrue(connector.alterOffsets(sourceProperties, offsets)); assertTrue(connector.alterOffsets(sourceProperties, offsets));
} }
@Test @Test
public void testAlterOffsetsIncorrectOffsetKey() { public void testAlterOffsetsIncorrectOffsetKey() {
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
Collections.singletonMap(FILENAME_FIELD, FILENAME), Map.of(FILENAME_FIELD, FILENAME),
Collections.singletonMap("other_offset_key", 0L) Map.of("other_offset_key", 0L)
); );
assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, offsets)); assertThrows(ConnectException.class, () -> connector.alterOffsets(sourceProperties, offsets));
} }
@Test @Test
public void testAlterOffsetsOffsetPositionValues() { public void testAlterOffsetsOffsetPositionValues() {
Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(sourceProperties, Collections.singletonMap( Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(sourceProperties, Map.of(
Collections.singletonMap(FILENAME_FIELD, FILENAME), Map.of(FILENAME_FIELD, FILENAME),
Collections.singletonMap(POSITION_FIELD, offset) Collections.singletonMap(POSITION_FIELD, offset)
)); ));
@ -217,9 +217,9 @@ public class FileStreamSourceConnectorTest {
@Test @Test
public void testSuccessfulAlterOffsets() { public void testSuccessfulAlterOffsets() {
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
Collections.singletonMap(FILENAME_FIELD, FILENAME), Map.of(FILENAME_FIELD, FILENAME),
Collections.singletonMap(POSITION_FIELD, 0L) Map.of(POSITION_FIELD, 0L)
); );
// Expect no exception to be thrown when a valid offsets map is passed. An empty offsets map is treated as valid // Expect no exception to be thrown when a valid offsets map is passed. An empty offsets map is treated as valid
@ -237,9 +237,9 @@ public class FileStreamSourceConnectorTest {
); );
assertTrue(alterOffsets.apply(null)); assertTrue(alterOffsets.apply(null));
assertTrue(alterOffsets.apply(Collections.emptyMap())); assertTrue(alterOffsets.apply(Map.of()));
assertTrue(alterOffsets.apply(Collections.singletonMap(FILENAME_FIELD, FILENAME))); assertTrue(alterOffsets.apply(Map.of(FILENAME_FIELD, FILENAME)));
assertTrue(alterOffsets.apply(Collections.singletonMap(FILENAME_FIELD, "/someotherfilename"))); assertTrue(alterOffsets.apply(Map.of(FILENAME_FIELD, "/someotherfilename")));
assertTrue(alterOffsets.apply(Collections.singletonMap("garbage_partition_key", "garbage_partition_value"))); assertTrue(alterOffsets.apply(Map.of("garbage_partition_key", "garbage_partition_value")));
} }
} }

View File

@ -30,7 +30,6 @@ import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -87,8 +86,8 @@ public class FileStreamSourceTaskTest {
assertEquals(1, records.size()); assertEquals(1, records.size());
assertEquals(TOPIC, records.get(0).topic()); assertEquals(TOPIC, records.get(0).topic());
assertEquals("partial line finished", records.get(0).value()); assertEquals("partial line finished", records.get(0).value());
assertEquals(Collections.singletonMap(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(0).sourcePartition()); assertEquals(Map.of(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(0).sourcePartition());
assertEquals(Collections.singletonMap(FileStreamSourceTask.POSITION_FIELD, 22L), records.get(0).sourceOffset()); assertEquals(Map.of(FileStreamSourceTask.POSITION_FIELD, 22L), records.get(0).sourceOffset());
assertNull(task.poll()); assertNull(task.poll());
// Different line endings, and make sure the final \r doesn't result in a line until we can // Different line endings, and make sure the final \r doesn't result in a line until we can
@ -98,25 +97,25 @@ public class FileStreamSourceTaskTest {
records = task.poll(); records = task.poll();
assertEquals(4, records.size()); assertEquals(4, records.size());
assertEquals("line1", records.get(0).value()); assertEquals("line1", records.get(0).value());
assertEquals(Collections.singletonMap(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(0).sourcePartition()); assertEquals(Map.of(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(0).sourcePartition());
assertEquals(Collections.singletonMap(FileStreamSourceTask.POSITION_FIELD, 28L), records.get(0).sourceOffset()); assertEquals(Map.of(FileStreamSourceTask.POSITION_FIELD, 28L), records.get(0).sourceOffset());
assertEquals("line2", records.get(1).value()); assertEquals("line2", records.get(1).value());
assertEquals(Collections.singletonMap(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(1).sourcePartition()); assertEquals(Map.of(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(1).sourcePartition());
assertEquals(Collections.singletonMap(FileStreamSourceTask.POSITION_FIELD, 35L), records.get(1).sourceOffset()); assertEquals(Map.of(FileStreamSourceTask.POSITION_FIELD, 35L), records.get(1).sourceOffset());
assertEquals("line3", records.get(2).value()); assertEquals("line3", records.get(2).value());
assertEquals(Collections.singletonMap(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(2).sourcePartition()); assertEquals(Map.of(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(2).sourcePartition());
assertEquals(Collections.singletonMap(FileStreamSourceTask.POSITION_FIELD, 41L), records.get(2).sourceOffset()); assertEquals(Map.of(FileStreamSourceTask.POSITION_FIELD, 41L), records.get(2).sourceOffset());
assertEquals("line4", records.get(3).value()); assertEquals("line4", records.get(3).value());
assertEquals(Collections.singletonMap(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(3).sourcePartition()); assertEquals(Map.of(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(3).sourcePartition());
assertEquals(Collections.singletonMap(FileStreamSourceTask.POSITION_FIELD, 47L), records.get(3).sourceOffset()); assertEquals(Map.of(FileStreamSourceTask.POSITION_FIELD, 47L), records.get(3).sourceOffset());
os.write("subsequent text".getBytes()); os.write("subsequent text".getBytes());
os.flush(); os.flush();
records = task.poll(); records = task.poll();
assertEquals(1, records.size()); assertEquals(1, records.size());
assertEquals("", records.get(0).value()); assertEquals("", records.get(0).value());
assertEquals(Collections.singletonMap(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(0).sourcePartition()); assertEquals(Map.of(FileStreamSourceTask.FILENAME_FIELD, tempFile.getAbsolutePath()), records.get(0).sourcePartition());
assertEquals(Collections.singletonMap(FileStreamSourceTask.POSITION_FIELD, 48L), records.get(0).sourceOffset()); assertEquals(Map.of(FileStreamSourceTask.POSITION_FIELD, 48L), records.get(0).sourceOffset());
os.close(); os.close();
task.stop(); task.stop();

View File

@ -30,7 +30,6 @@ import java.io.File;
import java.io.PrintStream; import java.io.PrintStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.StandardOpenOption; import java.nio.file.StandardOpenOption;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
@ -130,8 +129,8 @@ public class FileStreamSourceConnectorIntegrationTest {
// Alter the offsets to make the connector re-process the last line in the file // Alter the offsets to make the connector re-process the last line in the file
connect.alterSourceConnectorOffset( connect.alterSourceConnectorOffset(
CONNECTOR_NAME, CONNECTOR_NAME,
Collections.singletonMap(FILENAME_FIELD, sourceFile.getAbsolutePath()), Map.of(FILENAME_FIELD, sourceFile.getAbsolutePath()),
Collections.singletonMap(POSITION_FIELD, 28L) Map.of(POSITION_FIELD, 28L)
); );
connect.resumeConnector(CONNECTOR_NAME); connect.resumeConnector(CONNECTOR_NAME);

View File

@ -149,14 +149,12 @@ public class JsonConverter implements Converter, HeaderConverter, Versioned {
if (!(value instanceof BigDecimal decimal)) if (!(value instanceof BigDecimal decimal))
throw new DataException("Invalid type for Decimal, expected BigDecimal but was " + value.getClass()); throw new DataException("Invalid type for Decimal, expected BigDecimal but was " + value.getClass());
switch (config.decimalFormat()) { return switch (config.decimalFormat()) {
case NUMERIC: case NUMERIC -> JSON_NODE_FACTORY.numberNode(decimal);
return JSON_NODE_FACTORY.numberNode(decimal); case BASE64 -> JSON_NODE_FACTORY.binaryNode(Decimal.fromLogical(schema, decimal));
case BASE64: default ->
return JSON_NODE_FACTORY.binaryNode(Decimal.fromLogical(schema, decimal));
default:
throw new DataException("Unexpected " + JsonConverterConfig.DECIMAL_FORMAT_CONFIG + ": " + config.decimalFormat()); throw new DataException("Unexpected " + JsonConverterConfig.DECIMAL_FORMAT_CONFIG + ": " + config.decimalFormat());
} };
} }
@Override @Override

View File

@ -26,7 +26,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.module.blackbird.BlackbirdModule; import com.fasterxml.jackson.module.blackbird.BlackbirdModule;
import java.util.Collections;
import java.util.Set; import java.util.Set;
/** /**
@ -40,7 +39,7 @@ public class JsonDeserializer implements Deserializer<JsonNode> {
* Default constructor needed by Kafka * Default constructor needed by Kafka
*/ */
public JsonDeserializer() { public JsonDeserializer() {
this(Collections.emptySet(), new JsonNodeFactory(true), true); this(Set.of(), new JsonNodeFactory(true), true);
} }
/** /**

View File

@ -25,7 +25,6 @@ import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.module.blackbird.BlackbirdModule; import com.fasterxml.jackson.module.blackbird.BlackbirdModule;
import java.util.Collections;
import java.util.Set; import java.util.Set;
/** /**
@ -39,7 +38,7 @@ public class JsonSerializer implements Serializer<JsonNode> {
* Default constructor needed by Kafka * Default constructor needed by Kafka
*/ */
public JsonSerializer() { public JsonSerializer() {
this(Collections.emptySet(), new JsonNodeFactory(true), true); this(Set.of(), new JsonNodeFactory(true), true);
} }
/** /**

View File

@ -35,7 +35,7 @@ public class JsonConverterConfigTest {
configValues.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "NuMeRiC"); configValues.put(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, "NuMeRiC");
final JsonConverterConfig config = new JsonConverterConfig(configValues); final JsonConverterConfig config = new JsonConverterConfig(configValues);
assertEquals(config.decimalFormat(), DecimalFormat.NUMERIC); assertEquals(DecimalFormat.NUMERIC, config.decimalFormat());
} }
} }

View File

@ -44,12 +44,11 @@ import java.math.BigInteger;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Calendar; import java.util.Calendar;
import java.util.Collections;
import java.util.GregorianCalendar; import java.util.GregorianCalendar;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
@ -74,7 +73,7 @@ public class JsonConverterTest {
@BeforeEach @BeforeEach
public void setUp() { public void setUp() {
converter.configure(Collections.emptyMap(), false); converter.configure(Map.of(), false);
} }
// Schema metadata // Schema metadata
@ -155,7 +154,7 @@ public class JsonConverterTest {
@Test @Test
public void arrayToConnect() { public void arrayToConnect() {
byte[] arrayJson = "{ \"schema\": { \"type\": \"array\", \"items\": { \"type\" : \"int32\" } }, \"payload\": [1, 2, 3] }".getBytes(); byte[] arrayJson = "{ \"schema\": { \"type\": \"array\", \"items\": { \"type\" : \"int32\" } }, \"payload\": [1, 2, 3] }".getBytes();
assertEquals(new SchemaAndValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), Arrays.asList(1, 2, 3)), converter.toConnectData(TOPIC, arrayJson)); assertEquals(new SchemaAndValue(SchemaBuilder.array(Schema.INT32_SCHEMA).build(), List.of(1, 2, 3)), converter.toConnectData(TOPIC, arrayJson));
} }
@Test @Test
@ -211,7 +210,7 @@ public class JsonConverterTest {
@Test @Test
public void emptyBytesToConnect() { public void emptyBytesToConnect() {
// This characterizes the messages with empty data when Json schemas is disabled // This characterizes the messages with empty data when Json schemas is disabled
Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true); converter.configure(props, true);
SchemaAndValue converted = converter.toConnectData(TOPIC, "".getBytes()); SchemaAndValue converted = converter.toConnectData(TOPIC, "".getBytes());
assertEquals(SchemaAndValue.NULL, converted); assertEquals(SchemaAndValue.NULL, converted);
@ -223,7 +222,7 @@ public class JsonConverterTest {
@Test @Test
public void schemalessWithEmptyFieldValueToConnect() { public void schemalessWithEmptyFieldValueToConnect() {
// This characterizes the messages with empty data when Json schemas is disabled // This characterizes the messages with empty data when Json schemas is disabled
Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true); converter.configure(props, true);
String input = "{ \"a\": \"\", \"b\": null}"; String input = "{ \"a\": \"\", \"b\": null}";
SchemaAndValue converted = converter.toConnectData(TOPIC, input.getBytes()); SchemaAndValue converted = converter.toConnectData(TOPIC, input.getBytes());
@ -254,7 +253,7 @@ public class JsonConverterTest {
assertEquals(new SchemaAndValue(null, "a string"), converted); assertEquals(new SchemaAndValue(null, "a string"), converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": [1, \"2\", 3] }".getBytes()); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": [1, \"2\", 3] }".getBytes());
assertEquals(new SchemaAndValue(null, Arrays.asList(1L, "2", 3L)), converted); assertEquals(new SchemaAndValue(null, List.of(1L, "2", 3L)), converted);
converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": { \"field1\": 1, \"field2\": 2} }".getBytes()); converted = converter.toConnectData(TOPIC, "{ \"schema\": null, \"payload\": { \"field1\": 1, \"field2\": 2} }".getBytes());
Map<String, Long> obj = new HashMap<>(); Map<String, Long> obj = new HashMap<>();
@ -587,7 +586,7 @@ public class JsonConverterTest {
@Test @Test
public void arrayToJson() { public void arrayToJson() {
Schema int32Array = SchemaBuilder.array(Schema.INT32_SCHEMA).build(); Schema int32Array = SchemaBuilder.array(Schema.INT32_SCHEMA).build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, int32Array, Arrays.asList(1, 2, 3))); JsonNode converted = parse(converter.fromConnectData(TOPIC, int32Array, List.of(1, 2, 3)));
validateEnvelope(converted); validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"array\", \"items\": { \"type\": \"int32\", \"optional\": false }, \"optional\": false }"), assertEquals(parse("{ \"type\": \"array\", \"items\": { \"type\": \"int32\", \"optional\": false }, \"optional\": false }"),
converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME)); converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME));
@ -675,7 +674,7 @@ public class JsonConverterTest {
@Test @Test
public void decimalToNumericJson() { public void decimalToNumericJson() {
converter.configure(Collections.singletonMap(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name()), false); converter.configure(Map.of(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name()), false);
JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(2), new BigDecimal(new BigInteger("156"), 2))); JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(2), new BigDecimal(new BigInteger("156"), 2)));
validateEnvelope(converted); validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }"), assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"2\" } }"),
@ -686,7 +685,7 @@ public class JsonConverterTest {
@Test @Test
public void decimalWithTrailingZerosToNumericJson() { public void decimalWithTrailingZerosToNumericJson() {
converter.configure(Collections.singletonMap(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name()), false); converter.configure(Map.of(JsonConverterConfig.DECIMAL_FORMAT_CONFIG, DecimalFormat.NUMERIC.name()), false);
JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(4), new BigDecimal(new BigInteger("15600"), 4))); JsonNode converted = parse(converter.fromConnectData(TOPIC, Decimal.schema(4), new BigDecimal(new BigInteger("15600"), 4)));
validateEnvelope(converted); validateEnvelope(converted);
assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"4\" } }"), assertEquals(parse("{ \"type\": \"bytes\", \"optional\": false, \"name\": \"org.apache.kafka.connect.data.Decimal\", \"version\": 1, \"parameters\": { \"scale\": \"4\" } }"),
@ -766,7 +765,7 @@ public class JsonConverterTest {
public void nullSchemaAndArrayToJson() { public void nullSchemaAndArrayToJson() {
// This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match // This still needs to do conversion of data, null schema means "anything goes". Make sure we mix and match
// types to verify conversion still works. // types to verify conversion still works.
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, Arrays.asList(1, "string", true))); JsonNode converted = parse(converter.fromConnectData(TOPIC, null, List.of(1, "string", true)));
validateEnvelopeNullSchema(converted); validateEnvelopeNullSchema(converted);
assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull()); assertTrue(converted.get(JsonSchema.ENVELOPE_SCHEMA_FIELD_NAME).isNull());
assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add("string").add(true), assertEquals(JsonNodeFactory.instance.arrayNode().add(1).add("string").add(true),
@ -815,7 +814,7 @@ public class JsonConverterTest {
@Test @Test
public void nullSchemaAndNullValueToJson() { public void nullSchemaAndNullValueToJson() {
// This characterizes the production of tombstone messages when Json schemas is enabled // This characterizes the production of tombstone messages when Json schemas is enabled
Map<String, Boolean> props = Collections.singletonMap("schemas.enable", true); Map<String, Boolean> props = Map.of("schemas.enable", true);
converter.configure(props, true); converter.configure(props, true);
byte[] converted = converter.fromConnectData(TOPIC, null, null); byte[] converted = converter.fromConnectData(TOPIC, null, null);
assertNull(converted); assertNull(converted);
@ -824,7 +823,7 @@ public class JsonConverterTest {
@Test @Test
public void nullValueToJson() { public void nullValueToJson() {
// This characterizes the production of tombstone messages when Json schemas is not enabled // This characterizes the production of tombstone messages when Json schemas is not enabled
Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true); converter.configure(props, true);
byte[] converted = converter.fromConnectData(TOPIC, null, null); byte[] converted = converter.fromConnectData(TOPIC, null, null);
assertNull(converted); assertNull(converted);
@ -839,14 +838,14 @@ public class JsonConverterTest {
@Test @Test
public void noSchemaToConnect() { public void noSchemaToConnect() {
Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true); converter.configure(props, true);
assertEquals(new SchemaAndValue(null, true), converter.toConnectData(TOPIC, "true".getBytes())); assertEquals(new SchemaAndValue(null, true), converter.toConnectData(TOPIC, "true".getBytes()));
} }
@Test @Test
public void noSchemaToJson() { public void noSchemaToJson() {
Map<String, Boolean> props = Collections.singletonMap("schemas.enable", false); Map<String, Boolean> props = Map.of("schemas.enable", false);
converter.configure(props, true); converter.configure(props, true);
JsonNode converted = parse(converter.fromConnectData(TOPIC, null, true)); JsonNode converted = parse(converter.fromConnectData(TOPIC, null, true));
assertTrue(converted.isBoolean()); assertTrue(converted.isBoolean());
@ -876,7 +875,7 @@ public class JsonConverterTest {
File propFile = new File(url.toURI()); File propFile = new File(url.toURI());
String workerPropsFile = propFile.getAbsolutePath(); String workerPropsFile = propFile.getAbsolutePath();
Map<String, String> workerProps = !workerPropsFile.isEmpty() ? Map<String, String> workerProps = !workerPropsFile.isEmpty() ?
Utils.propsToStringMap(Utils.loadProps(workerPropsFile)) : Collections.emptyMap(); Utils.propsToStringMap(Utils.loadProps(workerPropsFile)) : Map.of();
JsonConverter rc = new JsonConverter(); JsonConverter rc = new JsonConverter();
rc.configure(workerProps, false); rc.configure(workerProps, false);
@ -901,7 +900,7 @@ public class JsonConverterTest {
@Test @Test
public void serializeNullToDefault() { public void serializeNullToDefault() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build(); Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, null)); JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, null));
JsonNode expected = parse("{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":\"default\"}"); JsonNode expected = parse("{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":\"default\"}");
@ -910,7 +909,7 @@ public class JsonConverterTest {
@Test @Test
public void serializeNullToNull() { public void serializeNullToNull() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build(); Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, null)); JsonNode converted = parse(converter.fromConnectData(TOPIC, schema, null));
JsonNode expected = parse("{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}"); JsonNode expected = parse("{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}");
@ -919,7 +918,7 @@ public class JsonConverterTest {
@Test @Test
public void deserializeNullToDefault() { public void deserializeNullToDefault() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
String value = "{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}"; String value = "{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes()); SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
assertEquals("default", sav.value()); assertEquals("default", sav.value());
@ -927,7 +926,7 @@ public class JsonConverterTest {
@Test @Test
public void deserializeNullToNull() { public void deserializeNullToNull() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
String value = "{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}"; String value = "{\"schema\":{\"type\":\"string\",\"optional\":true,\"default\":\"default\"},\"payload\":null}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes()); SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
assertNull(sav.value()); assertNull(sav.value());
@ -935,7 +934,7 @@ public class JsonConverterTest {
@Test @Test
public void serializeFieldNullToDefault() { public void serializeFieldNullToDefault() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build(); Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
Schema structSchema = SchemaBuilder.struct().field("field1", schema).build(); Schema structSchema = SchemaBuilder.struct().field("field1", schema).build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, structSchema, new Struct(structSchema))); JsonNode converted = parse(converter.fromConnectData(TOPIC, structSchema, new Struct(structSchema)));
@ -945,7 +944,7 @@ public class JsonConverterTest {
@Test @Test
public void serializeFieldNullToNull() { public void serializeFieldNullToNull() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build(); Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
Schema structSchema = SchemaBuilder.struct().field("field1", schema).build(); Schema structSchema = SchemaBuilder.struct().field("field1", schema).build();
JsonNode converted = parse(converter.fromConnectData(TOPIC, structSchema, new Struct(structSchema))); JsonNode converted = parse(converter.fromConnectData(TOPIC, structSchema, new Struct(structSchema)));
@ -955,7 +954,7 @@ public class JsonConverterTest {
@Test @Test
public void deserializeFieldNullToDefault() { public void deserializeFieldNullToDefault() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, true), false);
String value = "{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}"; String value = "{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes()); SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build(); Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();
@ -965,7 +964,7 @@ public class JsonConverterTest {
@Test @Test
public void deserializeFieldNullToNull() { public void deserializeFieldNullToNull() {
converter.configure(Collections.singletonMap(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false); converter.configure(Map.of(JsonConverterConfig.REPLACE_NULL_WITH_DEFAULT_CONFIG, false), false);
String value = "{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}"; String value = "{\"schema\":{\"type\":\"struct\",\"fields\":[{\"field\":\"field1\",\"type\":\"string\",\"optional\":true,\"default\":\"default\"}],\"optional\":false},\"payload\":{\"field1\":null}}";
SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes()); SchemaAndValue sav = converter.toConnectData(TOPIC, null, value.getBytes());
Schema schema = SchemaBuilder.string().optional().defaultValue("default").build(); Schema schema = SchemaBuilder.string().optional().defaultValue("default").build();

View File

@ -33,7 +33,6 @@ import org.slf4j.LoggerFactory;
import java.time.Duration; import java.time.Duration;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -165,7 +164,7 @@ public class MirrorClient implements AutoCloseable {
// to use ReplicationPolicy to create the checkpoint topic here. // to use ReplicationPolicy to create the checkpoint topic here.
String checkpointTopic = replicationPolicy.checkpointsTopic(remoteClusterAlias); String checkpointTopic = replicationPolicy.checkpointsTopic(remoteClusterAlias);
List<TopicPartition> checkpointAssignment = List<TopicPartition> checkpointAssignment =
Collections.singletonList(new TopicPartition(checkpointTopic, 0)); List.of(new TopicPartition(checkpointTopic, 0));
consumer.assign(checkpointAssignment); consumer.assign(checkpointAssignment);
consumer.seekToBeginning(checkpointAssignment); consumer.seekToBeginning(checkpointAssignment);
while (System.currentTimeMillis() < deadline && !endOfStream(consumer, checkpointAssignment)) { while (System.currentTimeMillis() < deadline && !endOfStream(consumer, checkpointAssignment)) {

View File

@ -20,10 +20,9 @@ import org.apache.kafka.common.Configurable;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -46,7 +45,7 @@ public class MirrorClientTest {
} }
FakeMirrorClient() { FakeMirrorClient() {
this(Collections.emptyList()); this(List.of());
} }
@Override @Override
@ -94,7 +93,7 @@ public class MirrorClientTest {
@Test @Test
public void heartbeatTopicsTest() throws InterruptedException { public void heartbeatTopicsTest() throws InterruptedException {
MirrorClient client = new FakeMirrorClient(Arrays.asList("topic1", "topic2", "heartbeats", MirrorClient client = new FakeMirrorClient(List.of("topic1", "topic2", "heartbeats",
"source1.heartbeats", "source2.source1.heartbeats", "source3.heartbeats")); "source1.heartbeats", "source2.source1.heartbeats", "source3.heartbeats"));
Set<String> heartbeatTopics = client.heartbeatTopics(); Set<String> heartbeatTopics = client.heartbeatTopics();
assertEquals(heartbeatTopics, Set.of("heartbeats", "source1.heartbeats", assertEquals(heartbeatTopics, Set.of("heartbeats", "source1.heartbeats",
@ -103,7 +102,7 @@ public class MirrorClientTest {
@Test @Test
public void checkpointsTopicsTest() throws InterruptedException { public void checkpointsTopicsTest() throws InterruptedException {
MirrorClient client = new FakeMirrorClient(Arrays.asList("topic1", "topic2", "checkpoints.internal", MirrorClient client = new FakeMirrorClient(List.of("topic1", "topic2", "checkpoints.internal",
"source1.checkpoints.internal", "source2.source1.checkpoints.internal", "source3.checkpoints.internal")); "source1.checkpoints.internal", "source2.source1.checkpoints.internal", "source3.checkpoints.internal"));
Set<String> checkpointTopics = client.checkpointTopics(); Set<String> checkpointTopics = client.checkpointTopics();
assertEquals(Set.of("source1.checkpoints.internal", assertEquals(Set.of("source1.checkpoints.internal",
@ -112,7 +111,7 @@ public class MirrorClientTest {
@Test @Test
public void replicationHopsTest() throws InterruptedException { public void replicationHopsTest() throws InterruptedException {
MirrorClient client = new FakeMirrorClient(Arrays.asList("topic1", "topic2", "heartbeats", MirrorClient client = new FakeMirrorClient(List.of("topic1", "topic2", "heartbeats",
"source1.heartbeats", "source1.source2.heartbeats", "source3.heartbeats")); "source1.heartbeats", "source1.source2.heartbeats", "source3.heartbeats"));
assertEquals(1, client.replicationHops("source1")); assertEquals(1, client.replicationHops("source1"));
assertEquals(2, client.replicationHops("source2")); assertEquals(2, client.replicationHops("source2"));
@ -122,7 +121,7 @@ public class MirrorClientTest {
@Test @Test
public void upstreamClustersTest() throws InterruptedException { public void upstreamClustersTest() throws InterruptedException {
MirrorClient client = new FakeMirrorClient(Arrays.asList("topic1", "topic2", "heartbeats", MirrorClient client = new FakeMirrorClient(List.of("topic1", "topic2", "heartbeats",
"source1.heartbeats", "source1.source2.heartbeats", "source3.source4.source5.heartbeats")); "source1.heartbeats", "source1.source2.heartbeats", "source3.source4.source5.heartbeats"));
Set<String> sources = client.upstreamClusters(); Set<String> sources = client.upstreamClusters();
assertTrue(sources.contains("source1")); assertTrue(sources.contains("source1"));
@ -138,7 +137,7 @@ public class MirrorClientTest {
@Test @Test
public void testIdentityReplicationUpstreamClusters() throws InterruptedException { public void testIdentityReplicationUpstreamClusters() throws InterruptedException {
// IdentityReplicationPolicy treats heartbeats as a special case, so these should work as usual. // IdentityReplicationPolicy treats heartbeats as a special case, so these should work as usual.
MirrorClient client = new FakeMirrorClient(identityReplicationPolicy("source"), Arrays.asList("topic1", MirrorClient client = new FakeMirrorClient(identityReplicationPolicy("source"), List.of("topic1",
"topic2", "heartbeats", "source1.heartbeats", "source1.source2.heartbeats", "topic2", "heartbeats", "source1.heartbeats", "source1.source2.heartbeats",
"source3.source4.source5.heartbeats")); "source3.source4.source5.heartbeats"));
Set<String> sources = client.upstreamClusters(); Set<String> sources = client.upstreamClusters();
@ -154,7 +153,7 @@ public class MirrorClientTest {
@Test @Test
public void remoteTopicsTest() throws InterruptedException { public void remoteTopicsTest() throws InterruptedException {
MirrorClient client = new FakeMirrorClient(Arrays.asList("topic1", "topic2", "topic3", MirrorClient client = new FakeMirrorClient(List.of("topic1", "topic2", "topic3",
"source1.topic4", "source1.source2.topic5", "source3.source4.source5.topic6")); "source1.topic4", "source1.source2.topic5", "source3.source4.source5.topic6"));
Set<String> remoteTopics = client.remoteTopics(); Set<String> remoteTopics = client.remoteTopics();
assertFalse(remoteTopics.contains("topic1")); assertFalse(remoteTopics.contains("topic1"));
@ -168,7 +167,7 @@ public class MirrorClientTest {
@Test @Test
public void testIdentityReplicationRemoteTopics() throws InterruptedException { public void testIdentityReplicationRemoteTopics() throws InterruptedException {
// IdentityReplicationPolicy should consider any topic to be remote. // IdentityReplicationPolicy should consider any topic to be remote.
MirrorClient client = new FakeMirrorClient(identityReplicationPolicy("source"), Arrays.asList( MirrorClient client = new FakeMirrorClient(identityReplicationPolicy("source"), List.of(
"topic1", "topic2", "topic3", "heartbeats", "backup.heartbeats")); "topic1", "topic2", "topic3", "heartbeats", "backup.heartbeats"));
Set<String> remoteTopics = client.remoteTopics(); Set<String> remoteTopics = client.remoteTopics();
assertTrue(remoteTopics.contains("topic1")); assertTrue(remoteTopics.contains("topic1"));
@ -181,10 +180,10 @@ public class MirrorClientTest {
@Test @Test
public void remoteTopicsSeparatorTest() throws InterruptedException { public void remoteTopicsSeparatorTest() throws InterruptedException {
MirrorClient client = new FakeMirrorClient(Arrays.asList("topic1", "topic2", "topic3", MirrorClient client = new FakeMirrorClient(List.of("topic1", "topic2", "topic3",
"source1__topic4", "source1__source2__topic5", "source3__source4__source5__topic6")); "source1__topic4", "source1__source2__topic5", "source3__source4__source5__topic6"));
((Configurable) client.replicationPolicy()).configure( ((Configurable) client.replicationPolicy()).configure(
Collections.singletonMap("replication.policy.separator", "__")); Map.of("replication.policy.separator", "__"));
Set<String> remoteTopics = client.remoteTopics(); Set<String> remoteTopics = client.remoteTopics();
assertFalse(remoteTopics.contains("topic1")); assertFalse(remoteTopics.contains("topic1"));
assertFalse(remoteTopics.contains("topic2")); assertFalse(remoteTopics.contains("topic2"));
@ -197,7 +196,7 @@ public class MirrorClientTest {
@Test @Test
public void testIdentityReplicationTopicSource() { public void testIdentityReplicationTopicSource() {
MirrorClient client = new FakeMirrorClient( MirrorClient client = new FakeMirrorClient(
identityReplicationPolicy("primary"), Collections.emptyList()); identityReplicationPolicy("primary"), List.of());
assertEquals("topic1", client.replicationPolicy() assertEquals("topic1", client.replicationPolicy()
.formatRemoteTopic("primary", "topic1")); .formatRemoteTopic("primary", "topic1"));
assertEquals("primary", client.replicationPolicy() assertEquals("primary", client.replicationPolicy()
@ -211,8 +210,7 @@ public class MirrorClientTest {
private ReplicationPolicy identityReplicationPolicy(String source) { private ReplicationPolicy identityReplicationPolicy(String source) {
IdentityReplicationPolicy policy = new IdentityReplicationPolicy(); IdentityReplicationPolicy policy = new IdentityReplicationPolicy();
policy.configure(Collections.singletonMap( policy.configure(Map.of(IdentityReplicationPolicy.SOURCE_CLUSTER_ALIAS_CONFIG, source));
IdentityReplicationPolicy.SOURCE_CLUSTER_ALIAS_CONFIG, source));
return policy; return policy;
} }
} }

View File

@ -20,7 +20,6 @@ package org.apache.kafka.connect.mirror;
import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -33,7 +32,7 @@ public class ReplicationPolicyTest {
@BeforeEach @BeforeEach
public void setUp() { public void setUp() {
DEFAULT_REPLICATION_POLICY.configure(Collections.emptyMap()); DEFAULT_REPLICATION_POLICY.configure(Map.of());
} }
@Test @Test

View File

@ -30,7 +30,6 @@ import org.apache.kafka.connect.util.TopicAdmin;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
@ -97,7 +96,7 @@ public class CheckpointStore implements AutoCloseable {
public Map<TopicPartition, Checkpoint> get(String group) { public Map<TopicPartition, Checkpoint> get(String group) {
Map<TopicPartition, Checkpoint> result = checkpointsPerConsumerGroup.get(group); Map<TopicPartition, Checkpoint> result = checkpointsPerConsumerGroup.get(group);
return result == null ? null : Collections.unmodifiableMap(result); return result == null ? null : Map.copyOf(result);
} }
public Map<String, Map<TopicPartition, OffsetAndMetadata>> computeConvertedUpstreamOffset() { public Map<String, Map<TopicPartition, OffsetAndMetadata>> computeConvertedUpstreamOffset() {

View File

@ -38,7 +38,6 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -136,7 +135,7 @@ public class MirrorCheckpointConnector extends SourceConnector {
// If the replication is disabled or checkpoint emission is disabled by setting 'emit.checkpoints.enabled' to false, // If the replication is disabled or checkpoint emission is disabled by setting 'emit.checkpoints.enabled' to false,
// the interval of checkpoint emission will be negative and no 'MirrorCheckpointTask' will be created. // the interval of checkpoint emission will be negative and no 'MirrorCheckpointTask' will be created.
if (!config.enabled() || config.emitCheckpointsInterval().isNegative()) { if (!config.enabled() || config.emitCheckpointsInterval().isNegative()) {
return Collections.emptyList(); return List.of();
} }
if (knownConsumerGroups == null) { if (knownConsumerGroups == null) {
@ -148,7 +147,7 @@ public class MirrorCheckpointConnector extends SourceConnector {
// If the consumer group is empty, no 'MirrorCheckpointTask' will be created. // If the consumer group is empty, no 'MirrorCheckpointTask' will be created.
if (knownConsumerGroups.isEmpty()) { if (knownConsumerGroups.isEmpty()) {
return Collections.emptyList(); return List.of();
} }
int numTasks = Math.min(maxTasks, knownConsumerGroups.size()); int numTasks = Math.min(maxTasks, knownConsumerGroups.size());
@ -199,7 +198,7 @@ public class MirrorCheckpointConnector extends SourceConnector {
throws InterruptedException, ExecutionException { throws InterruptedException, ExecutionException {
// If loadInitialConsumerGroups fails for any reason(e.g., timeout), knownConsumerGroups may be null. // If loadInitialConsumerGroups fails for any reason(e.g., timeout), knownConsumerGroups may be null.
// We still want this method to recover gracefully in such cases. // We still want this method to recover gracefully in such cases.
Set<String> knownConsumerGroups = this.knownConsumerGroups == null ? Collections.emptySet() : this.knownConsumerGroups; Set<String> knownConsumerGroups = this.knownConsumerGroups == null ? Set.of() : this.knownConsumerGroups;
Set<String> consumerGroups = findConsumerGroups(); Set<String> consumerGroups = findConsumerGroups();
Set<String> newConsumerGroups = new HashSet<>(consumerGroups); Set<String> newConsumerGroups = new HashSet<>(consumerGroups);
newConsumerGroups.removeAll(knownConsumerGroups); newConsumerGroups.removeAll(knownConsumerGroups);

View File

@ -35,7 +35,6 @@ import org.slf4j.LoggerFactory;
import java.time.Duration; import java.time.Duration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -186,7 +185,7 @@ public class MirrorCheckpointTask extends SourceTask {
.collect(Collectors.toList()); .collect(Collectors.toList());
} catch (ExecutionException e) { } catch (ExecutionException e) {
log.error("Error querying offsets for consumer group {} on cluster {}.", group, sourceClusterAlias, e); log.error("Error querying offsets for consumer group {} on cluster {}.", group, sourceClusterAlias, e);
return Collections.emptyList(); return List.of();
} }
} }
@ -195,7 +194,7 @@ public class MirrorCheckpointTask extends SourceTask {
return upstreamGroupOffsets.entrySet().stream() return upstreamGroupOffsets.entrySet().stream()
.filter(x -> shouldCheckpointTopic(x.getKey().topic())) // Only perform relevant checkpoints filtered by "topic filter" .filter(x -> shouldCheckpointTopic(x.getKey().topic())) // Only perform relevant checkpoints filtered by "topic filter"
.map(x -> checkpoint(group, x.getKey(), x.getValue())) .map(x -> checkpoint(group, x.getKey(), x.getValue()))
.flatMap(o -> o.stream()) // do not emit checkpoints for partitions that don't have offset-syncs .flatMap(Optional::stream) // do not emit checkpoints for partitions that don't have offset-syncs
.filter(x -> x.downstreamOffset() >= 0) // ignore offsets we cannot translate accurately .filter(x -> x.downstreamOffset() >= 0) // ignore offsets we cannot translate accurately
.filter(this::checkpointIsMoreRecent) // do not emit checkpoints for partitions that have a later checkpoint .filter(this::checkpointIsMoreRecent) // do not emit checkpoints for partitions that have a later checkpoint
.collect(Collectors.toMap(Checkpoint::topicPartition, Function.identity())); .collect(Collectors.toMap(Checkpoint::topicPartition, Function.identity()));
@ -234,7 +233,7 @@ public class MirrorCheckpointTask extends SourceTask {
throws InterruptedException, ExecutionException { throws InterruptedException, ExecutionException {
if (stopping) { if (stopping) {
// short circuit if stopping // short circuit if stopping
return Collections.emptyMap(); return Map.of();
} }
return adminCall( return adminCall(
() -> sourceAdminClient.listConsumerGroupOffsets(group).partitionsToOffsetAndMetadata().get(), () -> sourceAdminClient.listConsumerGroupOffsets(group).partitionsToOffsetAndMetadata().get(),

View File

@ -18,7 +18,6 @@ package org.apache.kafka.connect.mirror;
import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -35,7 +34,7 @@ public class MirrorCheckpointTaskConfig extends MirrorCheckpointConfig {
Set<String> taskConsumerGroups() { Set<String> taskConsumerGroups() {
List<String> fields = getList(TASK_CONSUMER_GROUPS); List<String> fields = getList(TASK_CONSUMER_GROUPS);
if (fields == null || fields.isEmpty()) { if (fields == null || fields.isEmpty()) {
return Collections.emptySet(); return Set.of();
} }
return new HashSet<>(fields); return new HashSet<>(fields);
} }

View File

@ -24,7 +24,6 @@ import org.apache.kafka.connect.connector.Task;
import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.source.SourceConnector; import org.apache.kafka.connect.source.SourceConnector;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -73,10 +72,10 @@ public class MirrorHeartbeatConnector extends SourceConnector {
// if the heartbeats emission is disabled by setting `emit.heartbeats.enabled` to `false`, // if the heartbeats emission is disabled by setting `emit.heartbeats.enabled` to `false`,
// the interval heartbeat emission will be negative and no `MirrorHeartbeatTask` will be created // the interval heartbeat emission will be negative and no `MirrorHeartbeatTask` will be created
if (config.emitHeartbeatsInterval().isNegative()) { if (config.emitHeartbeatsInterval().isNegative()) {
return Collections.emptyList(); return List.of();
} }
// just need a single task // just need a single task
return Collections.singletonList(config.originalsStrings()); return List.of(config.originalsStrings());
} }
@Override @Override

View File

@ -22,7 +22,6 @@ import org.apache.kafka.connect.source.SourceRecord;
import org.apache.kafka.connect.source.SourceTask; import org.apache.kafka.connect.source.SourceTask;
import java.time.Duration; import java.time.Duration;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -76,7 +75,7 @@ public class MirrorHeartbeatTask extends SourceTask {
Schema.BYTES_SCHEMA, heartbeat.recordKey(), Schema.BYTES_SCHEMA, heartbeat.recordKey(),
Schema.BYTES_SCHEMA, heartbeat.recordValue(), Schema.BYTES_SCHEMA, heartbeat.recordValue(),
timestamp); timestamp);
return Collections.singletonList(record); return List.of(record);
} }
@Override @Override

View File

@ -57,7 +57,6 @@ import java.net.URI;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -250,7 +249,7 @@ public class MirrorMaker {
SharedTopicAdmin sharedAdmin = new SharedTopicAdmin(adminProps); SharedTopicAdmin sharedAdmin = new SharedTopicAdmin(adminProps);
KafkaOffsetBackingStore offsetBackingStore = new KafkaOffsetBackingStore(sharedAdmin, () -> clientIdBase, KafkaOffsetBackingStore offsetBackingStore = new KafkaOffsetBackingStore(sharedAdmin, () -> clientIdBase,
plugins.newInternalConverter(true, JsonConverter.class.getName(), plugins.newInternalConverter(true, JsonConverter.class.getName(),
Collections.singletonMap(JsonConverterConfig.SCHEMAS_ENABLE_CONFIG, "false"))); Map.of(JsonConverterConfig.SCHEMAS_ENABLE_CONFIG, "false")));
offsetBackingStore.configure(distributedConfig); offsetBackingStore.configure(distributedConfig);
ConnectorClientConfigOverridePolicy clientConfigOverridePolicy = new AllConnectorClientConfigOverridePolicy(); ConnectorClientConfigOverridePolicy clientConfigOverridePolicy = new AllConnectorClientConfigOverridePolicy();
clientConfigOverridePolicy.configure(config.originals()); clientConfigOverridePolicy.configure(config.originals());

View File

@ -31,8 +31,6 @@ import org.apache.kafka.connect.runtime.isolation.Plugins;
import org.apache.kafka.connect.runtime.rest.RestServerConfig; import org.apache.kafka.connect.runtime.rest.RestServerConfig;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -215,14 +213,9 @@ public final class MirrorMakerConfig extends AbstractConfig {
Set<String> allConfigNames() { Set<String> allConfigNames() {
Set<String> allNames = new HashSet<>(); Set<String> allNames = new HashSet<>();
List<ConfigDef> connectorConfigDefs = Arrays.asList( allNames.addAll(MirrorCheckpointConfig.CONNECTOR_CONFIG_DEF.names());
MirrorCheckpointConfig.CONNECTOR_CONFIG_DEF, allNames.addAll(MirrorSourceConfig.CONNECTOR_CONFIG_DEF.names());
MirrorSourceConfig.CONNECTOR_CONFIG_DEF, allNames.addAll(MirrorHeartbeatConfig.CONNECTOR_CONFIG_DEF.names());
MirrorHeartbeatConfig.CONNECTOR_CONFIG_DEF
);
for (ConfigDef cd : connectorConfigDefs) {
allNames.addAll(cd.names());
}
return allNames; return allNames;
} }
@ -288,7 +281,7 @@ public final class MirrorMakerConfig extends AbstractConfig {
ConfigDef result = new ConfigDef() ConfigDef result = new ConfigDef()
.define(CLUSTERS_CONFIG, Type.LIST, Importance.HIGH, CLUSTERS_DOC) .define(CLUSTERS_CONFIG, Type.LIST, Importance.HIGH, CLUSTERS_DOC)
.define(ENABLE_INTERNAL_REST_CONFIG, Type.BOOLEAN, false, Importance.HIGH, ENABLE_INTERNAL_REST_DOC) .define(ENABLE_INTERNAL_REST_CONFIG, Type.BOOLEAN, false, Importance.HIGH, ENABLE_INTERNAL_REST_DOC)
.define(CONFIG_PROVIDERS_CONFIG, Type.LIST, Collections.emptyList(), Importance.LOW, CONFIG_PROVIDERS_DOC) .define(CONFIG_PROVIDERS_CONFIG, Type.LIST, List.of(), Importance.LOW, CONFIG_PROVIDERS_DOC)
// security support // security support
.define(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, .define(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG,
Type.STRING, Type.STRING,

View File

@ -55,7 +55,6 @@ import org.slf4j.LoggerFactory;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -99,8 +98,8 @@ public class MirrorSourceConnector extends SourceConnector {
private String connectorName; private String connectorName;
private TopicFilter topicFilter; private TopicFilter topicFilter;
private ConfigPropertyFilter configPropertyFilter; private ConfigPropertyFilter configPropertyFilter;
private List<TopicPartition> knownSourceTopicPartitions = Collections.emptyList(); private List<TopicPartition> knownSourceTopicPartitions = List.of();
private List<TopicPartition> knownTargetTopicPartitions = Collections.emptyList(); private List<TopicPartition> knownTargetTopicPartitions = List.of();
private ReplicationPolicy replicationPolicy; private ReplicationPolicy replicationPolicy;
private int replicationFactor; private int replicationFactor;
private Admin sourceAdminClient; private Admin sourceAdminClient;
@ -202,7 +201,7 @@ public class MirrorSourceConnector extends SourceConnector {
@Override @Override
public List<Map<String, String>> taskConfigs(int maxTasks) { public List<Map<String, String>> taskConfigs(int maxTasks) {
if (!config.enabled() || knownSourceTopicPartitions.isEmpty()) { if (!config.enabled() || knownSourceTopicPartitions.isEmpty()) {
return Collections.emptyList(); return List.of();
} }
int numTasks = Math.min(maxTasks, knownSourceTopicPartitions.size()); int numTasks = Math.min(maxTasks, knownSourceTopicPartitions.size());
List<List<TopicPartition>> roundRobinByTask = new ArrayList<>(numTasks); List<List<TopicPartition>> roundRobinByTask = new ArrayList<>(numTasks);

View File

@ -19,7 +19,6 @@ package org.apache.kafka.connect.mirror;
import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.config.ConfigDef;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
@ -36,7 +35,7 @@ public class MirrorSourceTaskConfig extends MirrorSourceConfig {
Set<TopicPartition> taskTopicPartitions() { Set<TopicPartition> taskTopicPartitions() {
List<String> fields = getList(TASK_TOPIC_PARTITIONS); List<String> fields = getList(TASK_TOPIC_PARTITIONS);
if (fields == null || fields.isEmpty()) { if (fields == null || fields.isEmpty()) {
return Collections.emptySet(); return Set.of();
} }
return fields.stream() return fields.stream()
.map(MirrorUtils::decodeTopicPartition) .map(MirrorUtils::decodeTopicPartition)

View File

@ -38,19 +38,16 @@ import org.apache.kafka.connect.util.TopicAdmin;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import static java.util.Collections.singleton;
/** Internal utility methods. */ /** Internal utility methods. */
public final class MirrorUtils { public final class MirrorUtils {
@ -84,7 +81,7 @@ public final class MirrorUtils {
} }
public static Map<String, Object> wrapOffset(long offset) { public static Map<String, Object> wrapOffset(long offset) {
return Collections.singletonMap(OFFSET_KEY, offset); return Map.of(OFFSET_KEY, offset);
} }
public static TopicPartition unwrapPartition(Map<String, ?> wrapped) { public static TopicPartition unwrapPartition(Map<String, ?> wrapped) {
@ -265,7 +262,7 @@ public final class MirrorUtils {
} }
static Pattern compilePatternList(String fields) { static Pattern compilePatternList(String fields) {
return compilePatternList(Arrays.asList(fields.split("\\W*,\\W*"))); return compilePatternList(List.of(fields.split("\\W*,\\W*")));
} }
static void createCompactedTopic(String topicName, short partitions, short replicationFactor, Admin admin) { static void createCompactedTopic(String topicName, short partitions, short replicationFactor, Admin admin) {
@ -277,7 +274,7 @@ public final class MirrorUtils {
CreateTopicsOptions args = new CreateTopicsOptions().validateOnly(false); CreateTopicsOptions args = new CreateTopicsOptions().validateOnly(false);
try { try {
admin.createTopics(singleton(topicDescription), args).values().get(topicName).get(); admin.createTopics(Set.of(topicDescription), args).values().get(topicName).get();
log.info("Created topic '{}'", topicName); log.info("Created topic '{}'", topicName);
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.interrupted(); Thread.interrupted();

View File

@ -28,7 +28,7 @@ import org.glassfish.hk2.utilities.binding.AbstractBinder;
import org.glassfish.jersey.server.ResourceConfig; import org.glassfish.jersey.server.ResourceConfig;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.List;
import java.util.Map; import java.util.Map;
public class MirrorRestServer extends RestServer { public class MirrorRestServer extends RestServer {
@ -48,14 +48,12 @@ public class MirrorRestServer extends RestServer {
@Override @Override
protected Collection<Class<?>> regularResources() { protected Collection<Class<?>> regularResources() {
return Collections.singletonList( return List.of(InternalMirrorResource.class);
InternalMirrorResource.class
);
} }
@Override @Override
protected Collection<Class<?>> adminResources() { protected Collection<Class<?>> adminResources() {
return Collections.emptyList(); return List.of();
} }
@Override @Override
@ -70,5 +68,4 @@ public class MirrorRestServer extends RestServer {
bind(restClient).to(RestClient.class); bind(restClient).to(RestClient.class);
} }
} }
} }

View File

@ -23,7 +23,6 @@ import org.apache.kafka.connect.util.Callback;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
@ -63,7 +62,7 @@ public class CheckpointStoreTest {
assertTrue(store.isInitialized()); assertTrue(store.isInitialized());
Map<String, Map<TopicPartition, Checkpoint>> expected = new HashMap<>(); Map<String, Map<TopicPartition, Checkpoint>> expected = new HashMap<>();
expected.put("group1", Collections.singletonMap(new TopicPartition("t1", 0), expected.put("group1", Map.of(new TopicPartition("t1", 0),
new Checkpoint("group1", new TopicPartition("t1", 0), 1, 1, ""))); new Checkpoint("group1", new TopicPartition("t1", 0), 1, 1, "")));
assertEquals(expected, store.checkpointsPerConsumerGroup); assertEquals(expected, store.checkpointsPerConsumerGroup);
} }

View File

@ -20,8 +20,6 @@ import org.apache.kafka.common.config.ConfigDef;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -37,7 +35,7 @@ public class MirrorCheckpointConfigTest {
@Test @Test
public void testTaskConfigConsumerGroups() { public void testTaskConfigConsumerGroups() {
List<String> groups = Arrays.asList("consumer-1", "consumer-2", "consumer-3"); List<String> groups = List.of("consumer-1", "consumer-2", "consumer-3");
MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps()); MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps());
Map<String, String> props = config.taskConfigForConsumerGroups(groups, 1); Map<String, String> props = config.taskConfigForConsumerGroups(groups, 1);
MirrorCheckpointTaskConfig taskConfig = new MirrorCheckpointTaskConfig(props); MirrorCheckpointTaskConfig taskConfig = new MirrorCheckpointTaskConfig(props);
@ -118,7 +116,7 @@ public class MirrorCheckpointConfigTest {
Map<String, String> configValues = MirrorCheckpointConfig.validate(makeProps( Map<String, String> configValues = MirrorCheckpointConfig.validate(makeProps(
MirrorCheckpointConfig.EMIT_CHECKPOINTS_ENABLED, "false", MirrorCheckpointConfig.EMIT_CHECKPOINTS_ENABLED, "false",
MirrorCheckpointConfig.SYNC_GROUP_OFFSETS_ENABLED, "false")); MirrorCheckpointConfig.SYNC_GROUP_OFFSETS_ENABLED, "false"));
assertEquals(configValues.keySet(), Collections.singleton(MirrorCheckpointConfig.EMIT_CHECKPOINTS_ENABLED)); assertEquals(configValues.keySet(), Set.of(MirrorCheckpointConfig.EMIT_CHECKPOINTS_ENABLED));
configValues = MirrorCheckpointConfig.validate(makeProps(MirrorCheckpointConfig.EMIT_CHECKPOINTS_ENABLED, "true", configValues = MirrorCheckpointConfig.validate(makeProps(MirrorCheckpointConfig.EMIT_CHECKPOINTS_ENABLED, "true",
MirrorCheckpointConfig.EMIT_OFFSET_SYNCS_ENABLED, "false")); MirrorCheckpointConfig.EMIT_OFFSET_SYNCS_ENABLED, "false"));

View File

@ -26,7 +26,6 @@ import org.apache.kafka.connect.errors.RetriableException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -144,10 +143,10 @@ public class MirrorCheckpointConnectorTest {
@Test @Test
public void testFindConsumerGroups() throws Exception { public void testFindConsumerGroups() throws Exception {
MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps()); MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps());
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(Collections.emptySet(), config); MirrorCheckpointConnector connector = new MirrorCheckpointConnector(Set.of(), config);
connector = spy(connector); connector = spy(connector);
Collection<GroupListing> groups = Arrays.asList( Collection<GroupListing> groups = List.of(
new GroupListing("g1", Optional.of(GroupType.CLASSIC), "", Optional.empty()), new GroupListing("g1", Optional.of(GroupType.CLASSIC), "", Optional.empty()),
new GroupListing("g2", Optional.of(GroupType.CLASSIC), ConsumerProtocol.PROTOCOL_TYPE, Optional.empty())); new GroupListing("g2", Optional.of(GroupType.CLASSIC), ConsumerProtocol.PROTOCOL_TYPE, Optional.empty()));
Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>(); Map<TopicPartition, OffsetAndMetadata> offsets = new HashMap<>();
@ -168,16 +167,16 @@ public class MirrorCheckpointConnectorTest {
doReturn(false).when(connector).shouldReplicateByTopicFilter(anyString()); doReturn(false).when(connector).shouldReplicateByTopicFilter(anyString());
Set<String> topicFilterGroupFound = connector.findConsumerGroups(); Set<String> topicFilterGroupFound = connector.findConsumerGroups();
assertEquals(Collections.emptySet(), topicFilterGroupFound); assertEquals(Set.of(), topicFilterGroupFound);
} }
@Test @Test
public void testFindConsumerGroupsInCommonScenarios() throws Exception { public void testFindConsumerGroupsInCommonScenarios() throws Exception {
MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps()); MirrorCheckpointConfig config = new MirrorCheckpointConfig(makeProps());
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(Collections.emptySet(), config); MirrorCheckpointConnector connector = new MirrorCheckpointConnector(Set.of(), config);
connector = spy(connector); connector = spy(connector);
Collection<GroupListing> groups = Arrays.asList( Collection<GroupListing> groups = List.of(
new GroupListing("g1", Optional.of(GroupType.CLASSIC), "", Optional.empty()), new GroupListing("g1", Optional.of(GroupType.CLASSIC), "", Optional.empty()),
new GroupListing("g2", Optional.of(GroupType.CLASSIC), ConsumerProtocol.PROTOCOL_TYPE, Optional.empty()), new GroupListing("g2", Optional.of(GroupType.CLASSIC), ConsumerProtocol.PROTOCOL_TYPE, Optional.empty()),
new GroupListing("g3", Optional.of(GroupType.CLASSIC), ConsumerProtocol.PROTOCOL_TYPE, Optional.empty()), new GroupListing("g3", Optional.of(GroupType.CLASSIC), ConsumerProtocol.PROTOCOL_TYPE, Optional.empty()),
@ -203,7 +202,7 @@ public class MirrorCheckpointConnectorTest {
groupToOffsets.put("g1", offsetsForGroup1); groupToOffsets.put("g1", offsetsForGroup1);
groupToOffsets.put("g2", offsetsForGroup2); groupToOffsets.put("g2", offsetsForGroup2);
groupToOffsets.put("g3", offsetsForGroup3); groupToOffsets.put("g3", offsetsForGroup3);
doReturn(groupToOffsets).when(connector).listConsumerGroupOffsets(Arrays.asList("g1", "g2", "g3")); doReturn(groupToOffsets).when(connector).listConsumerGroupOffsets(List.of("g1", "g2", "g3"));
Set<String> groupFound = connector.findConsumerGroups(); Set<String> groupFound = connector.findConsumerGroups();
Set<String> verifiedSet = new HashSet<>(); Set<String> verifiedSet = new HashSet<>();
@ -215,8 +214,8 @@ public class MirrorCheckpointConnectorTest {
@Test @Test
public void testAlterOffsetsIncorrectPartitionKey() { public void testAlterOffsetsIncorrectPartitionKey() {
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); MirrorCheckpointConnector connector = new MirrorCheckpointConnector();
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Map.of(
Collections.singletonMap("unused_partition_key", "unused_partition_value"), Map.of("unused_partition_key", "unused_partition_value"),
SOURCE_OFFSET SOURCE_OFFSET
))); )));
@ -231,7 +230,7 @@ public class MirrorCheckpointConnectorTest {
public void testAlterOffsetsMissingPartitionKey() { public void testAlterOffsetsMissingPartitionKey() {
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); MirrorCheckpointConnector connector = new MirrorCheckpointConnector();
Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Collections.singletonMap( Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Map.of(
partition, partition,
SOURCE_OFFSET SOURCE_OFFSET
)); ));
@ -240,7 +239,7 @@ public class MirrorCheckpointConnectorTest {
// Sanity check to make sure our valid partition is actually valid // Sanity check to make sure our valid partition is actually valid
assertTrue(alterOffsets.apply(validPartition)); assertTrue(alterOffsets.apply(validPartition));
for (String key : Arrays.asList(CONSUMER_GROUP_ID_KEY, TOPIC_KEY, PARTITION_KEY)) { for (String key : List.of(CONSUMER_GROUP_ID_KEY, TOPIC_KEY, PARTITION_KEY)) {
Map<String, ?> invalidPartition = new HashMap<>(validPartition); Map<String, ?> invalidPartition = new HashMap<>(validPartition);
invalidPartition.remove(key); invalidPartition.remove(key);
assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition)); assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition));
@ -252,7 +251,7 @@ public class MirrorCheckpointConnectorTest {
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); MirrorCheckpointConnector connector = new MirrorCheckpointConnector();
Map<String, Object> partition = sourcePartition("consumer-app-2", "t", 3); Map<String, Object> partition = sourcePartition("consumer-app-2", "t", 3);
partition.put(PARTITION_KEY, "a string"); partition.put(PARTITION_KEY, "a string");
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Map.of(
partition, partition,
SOURCE_OFFSET SOURCE_OFFSET
))); )));
@ -276,9 +275,9 @@ public class MirrorCheckpointConnectorTest {
public void testAlterOffsetsIncorrectOffsetKey() { public void testAlterOffsetsIncorrectOffsetKey() {
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); MirrorCheckpointConnector connector = new MirrorCheckpointConnector();
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
sourcePartition("consumer-app-5", "t1", 2), sourcePartition("consumer-app-5", "t1", 2),
Collections.singletonMap("unused_offset_key", 0) Map.of("unused_offset_key", 0)
); );
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets)); assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets));
} }
@ -287,7 +286,7 @@ public class MirrorCheckpointConnectorTest {
public void testAlterOffsetsOffsetValues() { public void testAlterOffsetsOffsetValues() {
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); MirrorCheckpointConnector connector = new MirrorCheckpointConnector();
Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Collections.singletonMap( Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Map.of(
sourcePartition("consumer-app-6", "t", 5), sourcePartition("consumer-app-6", "t", 5),
Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset) Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset)
)); ));
@ -308,7 +307,7 @@ public class MirrorCheckpointConnectorTest {
public void testSuccessfulAlterOffsets() { public void testSuccessfulAlterOffsets() {
MirrorCheckpointConnector connector = new MirrorCheckpointConnector(); MirrorCheckpointConnector connector = new MirrorCheckpointConnector();
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
sourcePartition("consumer-app-7", "t2", 0), sourcePartition("consumer-app-7", "t2", 0),
SOURCE_OFFSET SOURCE_OFFSET
); );
@ -317,7 +316,7 @@ public class MirrorCheckpointConnectorTest {
// since it could indicate that the offsets were reset previously or that no offsets have been committed yet // since it could indicate that the offsets were reset previously or that no offsets have been committed yet
// (for a reset operation) // (for a reset operation)
assertTrue(connector.alterOffsets(null, offsets)); assertTrue(connector.alterOffsets(null, offsets));
assertTrue(connector.alterOffsets(null, Collections.emptyMap())); assertTrue(connector.alterOffsets(null, Map.of()));
} }
@Test @Test
@ -337,8 +336,8 @@ public class MirrorCheckpointConnectorTest {
assertTrue(() -> alterOffsets.apply(partition)); assertTrue(() -> alterOffsets.apply(partition));
assertTrue(() -> alterOffsets.apply(null)); assertTrue(() -> alterOffsets.apply(null));
assertTrue(() -> alterOffsets.apply(Collections.emptyMap())); assertTrue(() -> alterOffsets.apply(Map.of()));
assertTrue(() -> alterOffsets.apply(Collections.singletonMap("unused_partition_key", "unused_partition_value"))); assertTrue(() -> alterOffsets.apply(Map.of("unused_partition_key", "unused_partition_value")));
} }
private static Map<String, Object> sourcePartition(String consumerGroupId, String topic, int partition) { private static Map<String, Object> sourcePartition(String consumerGroupId, String topic, int partition) {

View File

@ -22,12 +22,12 @@ import org.apache.kafka.connect.source.SourceRecord;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.OptionalLong; import java.util.OptionalLong;
import java.util.Set;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -43,8 +43,8 @@ public class MirrorCheckpointTaskTest {
@Test @Test
public void testDownstreamTopicRenaming() { public void testDownstreamTopicRenaming() {
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), null, Collections.emptySet(), Collections.emptyMap(), new DefaultReplicationPolicy(), null, Set.of(), Map.of(),
new CheckpointStore(Collections.emptyMap())); new CheckpointStore(Map.of()));
assertEquals(new TopicPartition("source1.topic3", 4), assertEquals(new TopicPartition("source1.topic3", 4),
mirrorCheckpointTask.renameTopicPartition(new TopicPartition("topic3", 4)), mirrorCheckpointTask.renameTopicPartition(new TopicPartition("topic3", 4)),
"Renaming source1.topic3 failed"); "Renaming source1.topic3 failed");
@ -65,8 +65,8 @@ public class MirrorCheckpointTaskTest {
OffsetSyncStoreTest.FakeOffsetSyncStore offsetSyncStore = new OffsetSyncStoreTest.FakeOffsetSyncStore(); OffsetSyncStoreTest.FakeOffsetSyncStore offsetSyncStore = new OffsetSyncStoreTest.FakeOffsetSyncStore();
offsetSyncStore.start(true); offsetSyncStore.start(true);
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), offsetSyncStore, Collections.emptySet(), new DefaultReplicationPolicy(), offsetSyncStore, Set.of(),
Collections.emptyMap(), new CheckpointStore(Collections.emptyMap())); Map.of(), new CheckpointStore(Map.of()));
offsetSyncStore.sync(new TopicPartition("topic1", 2), t1UpstreamOffset, t1DownstreamOffset); offsetSyncStore.sync(new TopicPartition("topic1", 2), t1UpstreamOffset, t1DownstreamOffset);
offsetSyncStore.sync(new TopicPartition("target2.topic5", 6), t2UpstreamOffset, t2DownstreamOffset); offsetSyncStore.sync(new TopicPartition("target2.topic5", 6), t2UpstreamOffset, t2DownstreamOffset);
Optional<Checkpoint> optionalCheckpoint1 = mirrorCheckpointTask.checkpoint("group9", new TopicPartition("topic1", 2), Optional<Checkpoint> optionalCheckpoint1 = mirrorCheckpointTask.checkpoint("group9", new TopicPartition("topic1", 2),
@ -166,7 +166,7 @@ public class MirrorCheckpointTaskTest {
checkpointsPerConsumerGroup.put(consumer2, checkpointMapC2); checkpointsPerConsumerGroup.put(consumer2, checkpointMapC2);
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), null, Collections.emptySet(), idleConsumerGroupsOffset, new DefaultReplicationPolicy(), null, Set.of(), idleConsumerGroupsOffset,
new CheckpointStore(checkpointsPerConsumerGroup)); new CheckpointStore(checkpointsPerConsumerGroup));
Map<String, Map<TopicPartition, OffsetAndMetadata>> output = mirrorCheckpointTask.syncGroupOffset(); Map<String, Map<TopicPartition, OffsetAndMetadata>> output = mirrorCheckpointTask.syncGroupOffset();
@ -197,7 +197,7 @@ public class MirrorCheckpointTaskTest {
checkpointsPerConsumerGroup.put(consumer, checkpointMap); checkpointsPerConsumerGroup.put(consumer, checkpointMap);
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source", "target", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source", "target",
new DefaultReplicationPolicy(), null, Collections.emptySet(), idleConsumerGroupsOffset, new DefaultReplicationPolicy(), null, Set.of(), idleConsumerGroupsOffset,
new CheckpointStore(checkpointsPerConsumerGroup)); new CheckpointStore(checkpointsPerConsumerGroup));
Map<String, Map<TopicPartition, OffsetAndMetadata>> output = mirrorCheckpointTask.syncGroupOffset(); Map<String, Map<TopicPartition, OffsetAndMetadata>> output = mirrorCheckpointTask.syncGroupOffset();
@ -210,8 +210,8 @@ public class MirrorCheckpointTaskTest {
OffsetSyncStoreTest.FakeOffsetSyncStore offsetSyncStore = new OffsetSyncStoreTest.FakeOffsetSyncStore(); OffsetSyncStoreTest.FakeOffsetSyncStore offsetSyncStore = new OffsetSyncStoreTest.FakeOffsetSyncStore();
offsetSyncStore.start(true); offsetSyncStore.start(true);
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), offsetSyncStore, Collections.emptySet(), Collections.emptyMap(), new DefaultReplicationPolicy(), offsetSyncStore, Set.of(), Map.of(),
new CheckpointStore(Collections.emptyMap())); new CheckpointStore(Map.of()));
offsetSyncStore.sync(new TopicPartition("topic1", 0), 3L, 4L); offsetSyncStore.sync(new TopicPartition("topic1", 0), 3L, 4L);
Optional<Checkpoint> checkpoint1 = mirrorCheckpointTask.checkpoint("group9", new TopicPartition("topic1", 1), Optional<Checkpoint> checkpoint1 = mirrorCheckpointTask.checkpoint("group9", new TopicPartition("topic1", 1),
@ -227,8 +227,8 @@ public class MirrorCheckpointTaskTest {
OffsetSyncStoreTest.FakeOffsetSyncStore offsetSyncStore = new OffsetSyncStoreTest.FakeOffsetSyncStore(); OffsetSyncStoreTest.FakeOffsetSyncStore offsetSyncStore = new OffsetSyncStoreTest.FakeOffsetSyncStore();
offsetSyncStore.start(true); offsetSyncStore.start(true);
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), offsetSyncStore, Collections.emptySet(), Collections.emptyMap(), new DefaultReplicationPolicy(), offsetSyncStore, Set.of(), Map.of(),
new CheckpointStore(Collections.emptyMap())); new CheckpointStore(Map.of()));
offsetSyncStore.sync(new TopicPartition("topic1", 0), 1L, 3L); offsetSyncStore.sync(new TopicPartition("topic1", 0), 1L, 3L);
Optional<Checkpoint> checkpoint = mirrorCheckpointTask.checkpoint("g1", new TopicPartition("topic1", 0), null); Optional<Checkpoint> checkpoint = mirrorCheckpointTask.checkpoint("g1", new TopicPartition("topic1", 0), null);
assertFalse(checkpoint.isPresent()); assertFalse(checkpoint.isPresent());
@ -240,7 +240,7 @@ public class MirrorCheckpointTaskTest {
offsetSyncStore.start(true); offsetSyncStore.start(true);
Map<String, Map<TopicPartition, Checkpoint>> checkpointsPerConsumerGroup = new HashMap<>(); Map<String, Map<TopicPartition, Checkpoint>> checkpointsPerConsumerGroup = new HashMap<>();
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), offsetSyncStore, Collections.emptySet(), Collections.emptyMap(), new DefaultReplicationPolicy(), offsetSyncStore, Set.of(), Map.of(),
new CheckpointStore(checkpointsPerConsumerGroup)); new CheckpointStore(checkpointsPerConsumerGroup));
TopicPartition tp = new TopicPartition("topic1", 0); TopicPartition tp = new TopicPartition("topic1", 0);
TopicPartition targetTP = new TopicPartition("source1.topic1", 0); TopicPartition targetTP = new TopicPartition("source1.topic1", 0);
@ -277,7 +277,7 @@ public class MirrorCheckpointTaskTest {
private Map<TopicPartition, Checkpoint> assertCheckpointForTopic( private Map<TopicPartition, Checkpoint> assertCheckpointForTopic(
MirrorCheckpointTask task, TopicPartition tp, TopicPartition remoteTp, long consumerGroupOffset, boolean truth MirrorCheckpointTask task, TopicPartition tp, TopicPartition remoteTp, long consumerGroupOffset, boolean truth
) { ) {
Map<TopicPartition, OffsetAndMetadata> consumerGroupOffsets = Collections.singletonMap(tp, new OffsetAndMetadata(consumerGroupOffset)); Map<TopicPartition, OffsetAndMetadata> consumerGroupOffsets = Map.of(tp, new OffsetAndMetadata(consumerGroupOffset));
Map<TopicPartition, Checkpoint> checkpoints = task.checkpointsForGroup(consumerGroupOffsets, "g1"); Map<TopicPartition, Checkpoint> checkpoints = task.checkpointsForGroup(consumerGroupOffsets, "g1");
assertEquals(truth, checkpoints.containsKey(remoteTp), "should" + (truth ? "" : " not") + " emit offset sync"); assertEquals(truth, checkpoints.containsKey(remoteTp), "should" + (truth ? "" : " not") + " emit offset sync");
return checkpoints; return checkpoints;
@ -299,8 +299,8 @@ public class MirrorCheckpointTaskTest {
offsetSyncStore.start(false); offsetSyncStore.start(false);
MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), offsetSyncStore, Collections.emptySet(), Collections.emptyMap(), new DefaultReplicationPolicy(), offsetSyncStore, Set.of(), Map.of(),
new CheckpointStore(Collections.emptyMap())); new CheckpointStore(Map.of()));
// Generate a checkpoint for upstream offset 250, and assert it maps to downstream 201 // Generate a checkpoint for upstream offset 250, and assert it maps to downstream 201
// (as nearest mapping in OffsetSyncStore is 200->200) // (as nearest mapping in OffsetSyncStore is 200->200)
@ -327,7 +327,7 @@ public class MirrorCheckpointTaskTest {
Map<String, Map<TopicPartition, Checkpoint>> checkpointsPerConsumerGroup = new HashMap<>(); Map<String, Map<TopicPartition, Checkpoint>> checkpointsPerConsumerGroup = new HashMap<>();
checkpointsPerConsumerGroup.put("group1", checkpoints); checkpointsPerConsumerGroup.put("group1", checkpoints);
MirrorCheckpointTask mirrorCheckpointTask2 = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask mirrorCheckpointTask2 = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), offsetSyncStore2, Collections.emptySet(), Collections.emptyMap(), new DefaultReplicationPolicy(), offsetSyncStore2, Set.of(), Map.of(),
new CheckpointStore(checkpointsPerConsumerGroup)); new CheckpointStore(checkpointsPerConsumerGroup));
// Upstream offsets 250 and 370 now have the closest downstream value of 176, but this is // Upstream offsets 250 and 370 now have the closest downstream value of 176, but this is
@ -354,14 +354,14 @@ public class MirrorCheckpointTaskTest {
MirrorCheckpointTask task = new MirrorCheckpointTask("source1", "target2", MirrorCheckpointTask task = new MirrorCheckpointTask("source1", "target2",
new DefaultReplicationPolicy(), new DefaultReplicationPolicy(),
new OffsetSyncStoreTest.FakeOffsetSyncStore(), new OffsetSyncStoreTest.FakeOffsetSyncStore(),
Collections.singleton("group"), Set.of("group"),
Collections.emptyMap(), Map.of(),
checkpointStore) { checkpointStore) {
@Override @Override
List<SourceRecord> sourceRecordsForGroup(String group) { List<SourceRecord> sourceRecordsForGroup(String group) {
SourceRecord sr = new SourceRecord(Collections.emptyMap(), Collections.emptyMap(), "", 0, null, null); SourceRecord sr = new SourceRecord(Map.of(), Map.of(), "", 0, null, null);
return Collections.singletonList(sr); return List.of(sr);
} }
}; };

View File

@ -20,7 +20,6 @@ import org.apache.kafka.connect.errors.ConnectException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
@ -67,8 +66,8 @@ public class MirrorHeartBeatConnectorTest {
@Test @Test
public void testAlterOffsetsIncorrectPartitionKey() { public void testAlterOffsetsIncorrectPartitionKey() {
MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector();
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Map.of(
Collections.singletonMap("unused_partition_key", "unused_partition_value"), Map.of("unused_partition_key", "unused_partition_value"),
SOURCE_OFFSET SOURCE_OFFSET
))); )));
@ -83,7 +82,7 @@ public class MirrorHeartBeatConnectorTest {
public void testAlterOffsetsMissingPartitionKey() { public void testAlterOffsetsMissingPartitionKey() {
MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector();
Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Collections.singletonMap( Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Map.of(
partition, partition,
SOURCE_OFFSET SOURCE_OFFSET
)); ));
@ -92,7 +91,7 @@ public class MirrorHeartBeatConnectorTest {
// Sanity check to make sure our valid partition is actually valid // Sanity check to make sure our valid partition is actually valid
assertTrue(alterOffsets.apply(validPartition)); assertTrue(alterOffsets.apply(validPartition));
for (String key : Arrays.asList(SOURCE_CLUSTER_ALIAS_KEY, TARGET_CLUSTER_ALIAS_KEY)) { for (String key : List.of(SOURCE_CLUSTER_ALIAS_KEY, TARGET_CLUSTER_ALIAS_KEY)) {
Map<String, ?> invalidPartition = new HashMap<>(validPartition); Map<String, ?> invalidPartition = new HashMap<>(validPartition);
invalidPartition.remove(key); invalidPartition.remove(key);
assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition)); assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition));
@ -117,9 +116,9 @@ public class MirrorHeartBeatConnectorTest {
public void testAlterOffsetsIncorrectOffsetKey() { public void testAlterOffsetsIncorrectOffsetKey() {
MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector();
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
sourcePartition("primary", "backup"), sourcePartition("primary", "backup"),
Collections.singletonMap("unused_offset_key", 0) Map.of("unused_offset_key", 0)
); );
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets)); assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets));
} }
@ -128,7 +127,7 @@ public class MirrorHeartBeatConnectorTest {
public void testAlterOffsetsOffsetValues() { public void testAlterOffsetsOffsetValues() {
MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector();
Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Collections.singletonMap( Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Map.of(
sourcePartition("primary", "backup"), sourcePartition("primary", "backup"),
Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset) Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset)
)); ));
@ -149,7 +148,7 @@ public class MirrorHeartBeatConnectorTest {
public void testSuccessfulAlterOffsets() { public void testSuccessfulAlterOffsets() {
MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector(); MirrorHeartbeatConnector connector = new MirrorHeartbeatConnector();
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
sourcePartition("primary", "backup"), sourcePartition("primary", "backup"),
SOURCE_OFFSET SOURCE_OFFSET
); );
@ -158,7 +157,7 @@ public class MirrorHeartBeatConnectorTest {
// since it could indicate that the offsets were reset previously or that no offsets have been committed yet // since it could indicate that the offsets were reset previously or that no offsets have been committed yet
// (for a reset operation) // (for a reset operation)
assertTrue(connector.alterOffsets(null, offsets)); assertTrue(connector.alterOffsets(null, offsets));
assertTrue(connector.alterOffsets(null, Collections.emptyMap())); assertTrue(connector.alterOffsets(null, Map.of()));
} }
@Test @Test
@ -178,8 +177,8 @@ public class MirrorHeartBeatConnectorTest {
assertTrue(() -> alterOffsets.apply(partition)); assertTrue(() -> alterOffsets.apply(partition));
assertTrue(() -> alterOffsets.apply(null)); assertTrue(() -> alterOffsets.apply(null));
assertTrue(() -> alterOffsets.apply(Collections.emptyMap())); assertTrue(() -> alterOffsets.apply(Map.of()));
assertTrue(() -> alterOffsets.apply(Collections.singletonMap("unused_partition_key", "unused_partition_value"))); assertTrue(() -> alterOffsets.apply(Map.of("unused_partition_key", "unused_partition_value")));
} }
private static Map<String, Object> sourcePartition(String sourceClusterAlias, String targetClusterAlias) { private static Map<String, Object> sourcePartition(String sourceClusterAlias, String targetClusterAlias) {

View File

@ -35,9 +35,9 @@ public class MirrorHeartbeatTaskTest {
List<SourceRecord> records = heartbeatTask.poll(); List<SourceRecord> records = heartbeatTask.poll();
assertEquals(1, records.size()); assertEquals(1, records.size());
Map<String, ?> sourcePartition = records.iterator().next().sourcePartition(); Map<String, ?> sourcePartition = records.iterator().next().sourcePartition();
assertEquals(sourcePartition.get(Heartbeat.SOURCE_CLUSTER_ALIAS_KEY), "testSource", assertEquals("testSource", sourcePartition.get(Heartbeat.SOURCE_CLUSTER_ALIAS_KEY),
"sourcePartition's " + Heartbeat.SOURCE_CLUSTER_ALIAS_KEY + " record was not created"); "sourcePartition's " + Heartbeat.SOURCE_CLUSTER_ALIAS_KEY + " record was not created");
assertEquals(sourcePartition.get(Heartbeat.TARGET_CLUSTER_ALIAS_KEY), "testTarget", assertEquals("testTarget", sourcePartition.get(Heartbeat.TARGET_CLUSTER_ALIAS_KEY),
"sourcePartition's " + Heartbeat.TARGET_CLUSTER_ALIAS_KEY + " record was not created"); "sourcePartition's " + Heartbeat.TARGET_CLUSTER_ALIAS_KEY + " record was not created");
} }
} }

View File

@ -28,8 +28,6 @@ import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Locale; import java.util.Locale;
@ -152,11 +150,11 @@ public class MirrorMakerConfigTest {
MirrorSourceConfig sourceConfig = new MirrorSourceConfig(connectorProps); MirrorSourceConfig sourceConfig = new MirrorSourceConfig(connectorProps);
assertEquals(100, (int) sourceConfig.getInt("tasks.max"), assertEquals(100, (int) sourceConfig.getInt("tasks.max"),
"Connector properties like tasks.max should be passed through to underlying Connectors."); "Connector properties like tasks.max should be passed through to underlying Connectors.");
assertEquals(Collections.singletonList("topic-1"), sourceConfig.getList("topics"), assertEquals(List.of("topic-1"), sourceConfig.getList("topics"),
"Topics include should be passed through to underlying Connectors."); "Topics include should be passed through to underlying Connectors.");
assertEquals(Collections.singletonList("property-3"), sourceConfig.getList("config.properties.exclude"), assertEquals(List.of("property-3"), sourceConfig.getList("config.properties.exclude"),
"Config properties exclude should be passed through to underlying Connectors."); "Config properties exclude should be passed through to underlying Connectors.");
assertEquals(Collections.singletonList("FakeMetricsReporter"), sourceConfig.getList("metric.reporters"), assertEquals(List.of("FakeMetricsReporter"), sourceConfig.getList("metric.reporters"),
"Metrics reporters should be passed through to underlying Connectors."); "Metrics reporters should be passed through to underlying Connectors.");
assertEquals("DefaultTopicFilter", sourceConfig.getClass("topic.filter.class").getSimpleName(), assertEquals("DefaultTopicFilter", sourceConfig.getClass("topic.filter.class").getSimpleName(),
"Filters should be passed through to underlying Connectors."); "Filters should be passed through to underlying Connectors.");
@ -166,7 +164,7 @@ public class MirrorMakerConfigTest {
"Unknown properties should not be passed through to Connectors."); "Unknown properties should not be passed through to Connectors.");
MirrorCheckpointConfig checkpointConfig = new MirrorCheckpointConfig(connectorProps); MirrorCheckpointConfig checkpointConfig = new MirrorCheckpointConfig(connectorProps);
assertEquals(Collections.singletonList("group-2"), checkpointConfig.getList("groups"), assertEquals(List.of("group-2"), checkpointConfig.getList("groups"),
"Groups include should be passed through to underlying Connectors."); "Groups include should be passed through to underlying Connectors.");
} }
@ -182,9 +180,9 @@ public class MirrorMakerConfigTest {
MirrorSourceConnector.class); MirrorSourceConnector.class);
DefaultTopicFilter.TopicFilterConfig filterConfig = DefaultTopicFilter.TopicFilterConfig filterConfig =
new DefaultTopicFilter.TopicFilterConfig(connectorProps); new DefaultTopicFilter.TopicFilterConfig(connectorProps);
assertEquals(Arrays.asList("topic1", "topic2"), filterConfig.getList("topics"), assertEquals(List.of("topic1", "topic2"), filterConfig.getList("topics"),
"source->target.topics should be passed through to TopicFilters."); "source->target.topics should be passed through to TopicFilters.");
assertEquals(Collections.singletonList("topic3"), filterConfig.getList("topics.exclude"), assertEquals(List.of("topic3"), filterConfig.getList("topics.exclude"),
"source->target.topics.exclude should be passed through to TopicFilters."); "source->target.topics.exclude should be passed through to TopicFilters.");
} }
@ -367,7 +365,7 @@ public class MirrorMakerConfigTest {
public static class FakeConfigProvider implements ConfigProvider { public static class FakeConfigProvider implements ConfigProvider {
Map<String, String> secrets = Collections.singletonMap("password", "secret2"); Map<String, String> secrets = Map.of("password", "secret2");
@Override @Override
public void configure(Map<String, ?> props) { public void configure(Map<String, ?> props) {

View File

@ -22,7 +22,6 @@ import org.apache.kafka.common.config.ConfigException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -38,7 +37,7 @@ public class MirrorSourceConfigTest {
@Test @Test
public void testTaskConfigTopicPartitions() { public void testTaskConfigTopicPartitions() {
List<TopicPartition> topicPartitions = Arrays.asList(new TopicPartition("topic-1", 2), List<TopicPartition> topicPartitions = List.of(new TopicPartition("topic-1", 2),
new TopicPartition("topic-3", 4), new TopicPartition("topic-5", 6)); new TopicPartition("topic-3", 4), new TopicPartition("topic-5", 6));
MirrorSourceConfig config = new MirrorSourceConfig(makeProps()); MirrorSourceConfig config = new MirrorSourceConfig(makeProps());
Map<String, String> props = config.taskConfigForTopicPartitions(topicPartitions, 1); Map<String, String> props = config.taskConfigForTopicPartitions(topicPartitions, 1);

View File

@ -44,7 +44,6 @@ import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -55,7 +54,6 @@ import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
import static org.apache.kafka.clients.consumer.ConsumerConfig.ISOLATION_LEVEL_CONFIG; import static org.apache.kafka.clients.consumer.ConsumerConfig.ISOLATION_LEVEL_CONFIG;
import static org.apache.kafka.connect.mirror.MirrorConnectorConfig.CONSUMER_CLIENT_PREFIX; import static org.apache.kafka.connect.mirror.MirrorConnectorConfig.CONSUMER_CLIENT_PREFIX;
@ -106,7 +104,7 @@ public class MirrorSourceConnectorTest {
assertTrue(connector.shouldReplicateTopic("heartbeats"), "should replicate heartbeats"); assertTrue(connector.shouldReplicateTopic("heartbeats"), "should replicate heartbeats");
assertTrue(connector.shouldReplicateTopic("us-west.heartbeats"), "should replicate upstream heartbeats"); assertTrue(connector.shouldReplicateTopic("us-west.heartbeats"), "should replicate upstream heartbeats");
Map<String, ?> configs = Collections.singletonMap(DefaultReplicationPolicy.SEPARATOR_CONFIG, "_"); Map<String, ?> configs = Map.of(DefaultReplicationPolicy.SEPARATOR_CONFIG, "_");
defaultReplicationPolicy.configure(configs); defaultReplicationPolicy.configure(configs);
assertTrue(connector.shouldReplicateTopic("heartbeats"), "should replicate heartbeats"); assertTrue(connector.shouldReplicateTopic("heartbeats"), "should replicate heartbeats");
assertFalse(connector.shouldReplicateTopic("us-west.heartbeats"), "should not consider this topic as a heartbeats topic"); assertFalse(connector.shouldReplicateTopic("us-west.heartbeats"), "should not consider this topic as a heartbeats topic");
@ -184,15 +182,15 @@ public class MirrorSourceConnectorTest {
String expectedRemoteTopicName = "source" + DefaultReplicationPolicy.SEPARATOR_DEFAULT String expectedRemoteTopicName = "source" + DefaultReplicationPolicy.SEPARATOR_DEFAULT
+ allowAllAclBinding.pattern().name(); + allowAllAclBinding.pattern().name();
assertEquals(expectedRemoteTopicName, processedAllowAllAclBinding.pattern().name(), "should change topic name"); assertEquals(expectedRemoteTopicName, processedAllowAllAclBinding.pattern().name(), "should change topic name");
assertEquals(processedAllowAllAclBinding.entry().operation(), AclOperation.READ, "should change ALL to READ"); assertEquals(AclOperation.READ, processedAllowAllAclBinding.entry().operation(), "should change ALL to READ");
assertEquals(processedAllowAllAclBinding.entry().permissionType(), AclPermissionType.ALLOW, "should not change ALLOW"); assertEquals(AclPermissionType.ALLOW, processedAllowAllAclBinding.entry().permissionType(), "should not change ALLOW");
AclBinding denyAllAclBinding = new AclBinding( AclBinding denyAllAclBinding = new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "test_topic", PatternType.LITERAL), new ResourcePattern(ResourceType.TOPIC, "test_topic", PatternType.LITERAL),
new AccessControlEntry("kafka", "", AclOperation.ALL, AclPermissionType.DENY)); new AccessControlEntry("kafka", "", AclOperation.ALL, AclPermissionType.DENY));
AclBinding processedDenyAllAclBinding = connector.targetAclBinding(denyAllAclBinding); AclBinding processedDenyAllAclBinding = connector.targetAclBinding(denyAllAclBinding);
assertEquals(processedDenyAllAclBinding.entry().operation(), AclOperation.ALL, "should not change ALL"); assertEquals(AclOperation.ALL, processedDenyAllAclBinding.entry().operation(), "should not change ALL");
assertEquals(processedDenyAllAclBinding.entry().permissionType(), AclPermissionType.DENY, "should not change DENY"); assertEquals(AclPermissionType.DENY, processedDenyAllAclBinding.entry().permissionType(), "should not change DENY");
} }
@Test @Test
@ -280,7 +278,7 @@ public class MirrorSourceConnectorTest {
new DefaultReplicationPolicy(), x -> true, new DefaultConfigPropertyFilter()); new DefaultReplicationPolicy(), x -> true, new DefaultConfigPropertyFilter());
ArrayList<ConfigEntry> entries = new ArrayList<>(); ArrayList<ConfigEntry> entries = new ArrayList<>();
entries.add(new ConfigEntry("name-1", "value-1")); entries.add(new ConfigEntry("name-1", "value-1"));
entries.add(new ConfigEntry("name-2", "value-2", ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, Collections.emptyList(), ConfigEntry.ConfigType.STRING, "")); entries.add(new ConfigEntry("name-2", "value-2", ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, List.of(), ConfigEntry.ConfigType.STRING, ""));
entries.add(new ConfigEntry("min.insync.replicas", "2")); entries.add(new ConfigEntry("min.insync.replicas", "2"));
Config config = new Config(entries); Config config = new Config(entries);
Config targetConfig = connector.targetConfig(config, true); Config targetConfig = connector.targetConfig(config, true);
@ -300,7 +298,7 @@ public class MirrorSourceConnectorTest {
List<ConfigEntry> entries = new ArrayList<>(); List<ConfigEntry> entries = new ArrayList<>();
entries.add(new ConfigEntry("name-1", "value-1")); entries.add(new ConfigEntry("name-1", "value-1"));
// When "use.defaults.from" set to "target" by default, the config with default value should be excluded // When "use.defaults.from" set to "target" by default, the config with default value should be excluded
entries.add(new ConfigEntry("name-2", "value-2", ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, Collections.emptyList(), ConfigEntry.ConfigType.STRING, "")); entries.add(new ConfigEntry("name-2", "value-2", ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, List.of(), ConfigEntry.ConfigType.STRING, ""));
entries.add(new ConfigEntry("min.insync.replicas", "2")); entries.add(new ConfigEntry("min.insync.replicas", "2"));
Config config = new Config(entries); Config config = new Config(entries);
Config targetConfig = connector.targetConfig(config, false); Config targetConfig = connector.targetConfig(config, false);
@ -315,7 +313,7 @@ public class MirrorSourceConnectorTest {
@Test @Test
@Deprecated @Deprecated
public void testConfigPropertyFilteringWithAlterConfigsAndSourceDefault() { public void testConfigPropertyFilteringWithAlterConfigsAndSourceDefault() {
Map<String, Object> filterConfig = Collections.singletonMap(DefaultConfigPropertyFilter.USE_DEFAULTS_FROM, "source"); Map<String, Object> filterConfig = Map.of(DefaultConfigPropertyFilter.USE_DEFAULTS_FROM, "source");
DefaultConfigPropertyFilter filter = new DefaultConfigPropertyFilter(); DefaultConfigPropertyFilter filter = new DefaultConfigPropertyFilter();
filter.configure(filterConfig); filter.configure(filterConfig);
@ -324,7 +322,7 @@ public class MirrorSourceConnectorTest {
List<ConfigEntry> entries = new ArrayList<>(); List<ConfigEntry> entries = new ArrayList<>();
entries.add(new ConfigEntry("name-1", "value-1")); entries.add(new ConfigEntry("name-1", "value-1"));
// When "use.defaults.from" explicitly set to "source", the config with default value should be replicated // When "use.defaults.from" explicitly set to "source", the config with default value should be replicated
entries.add(new ConfigEntry("name-2", "value-2", ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, Collections.emptyList(), ConfigEntry.ConfigType.STRING, "")); entries.add(new ConfigEntry("name-2", "value-2", ConfigEntry.ConfigSource.DEFAULT_CONFIG, false, false, List.of(), ConfigEntry.ConfigType.STRING, ""));
entries.add(new ConfigEntry("min.insync.replicas", "2")); entries.add(new ConfigEntry("min.insync.replicas", "2"));
Config config = new Config(entries); Config config = new Config(entries);
Config targetConfig = connector.targetConfig(config, false); Config targetConfig = connector.targetConfig(config, false);
@ -358,7 +356,7 @@ public class MirrorSourceConnectorTest {
entries.add(new ConfigEntry("exclude_param.param1", "value-param1")); entries.add(new ConfigEntry("exclude_param.param1", "value-param1"));
entries.add(new ConfigEntry("min.insync.replicas", "2")); entries.add(new ConfigEntry("min.insync.replicas", "2"));
Config config = new Config(entries); Config config = new Config(entries);
doReturn(Collections.singletonMap(topic, config)).when(connector).describeTopicConfigs(any()); doReturn(Map.of(topic, config)).when(connector).describeTopicConfigs(any());
doAnswer(invocation -> { doAnswer(invocation -> {
Map<String, NewTopic> newTopics = invocation.getArgument(0); Map<String, NewTopic> newTopics = invocation.getArgument(0);
assertNotNull(newTopics.get("source." + topic)); assertNotNull(newTopics.get("source." + topic));
@ -375,7 +373,7 @@ public class MirrorSourceConnectorTest {
assertNull(targetConfig.get(prop2), "should not replicate excluded properties " + prop2); assertNull(targetConfig.get(prop2), "should not replicate excluded properties " + prop2);
return null; return null;
}).when(connector).createNewTopics(any()); }).when(connector).createNewTopics(any());
connector.createNewTopics(Collections.singleton(topic), Collections.singletonMap(topic, 1L)); connector.createNewTopics(Set.of(topic), Map.of(topic, 1L));
verify(connector).createNewTopics(any(), any()); verify(connector).createNewTopics(any(), any());
} }
@ -433,15 +431,15 @@ public class MirrorSourceConnectorTest {
connector.initialize(mock(ConnectorContext.class)); connector.initialize(mock(ConnectorContext.class));
connector = spy(connector); connector = spy(connector);
Config topicConfig = new Config(Arrays.asList( Config topicConfig = new Config(List.of(
new ConfigEntry("cleanup.policy", "compact"), new ConfigEntry("cleanup.policy", "compact"),
new ConfigEntry("segment.bytes", "100"))); new ConfigEntry("segment.bytes", "100")));
Map<String, Config> configs = Collections.singletonMap("topic", topicConfig); Map<String, Config> configs = Map.of("topic", topicConfig);
List<TopicPartition> sourceTopicPartitions = Collections.singletonList(new TopicPartition("topic", 0)); List<TopicPartition> sourceTopicPartitions = List.of(new TopicPartition("topic", 0));
doReturn(sourceTopicPartitions).when(connector).findSourceTopicPartitions(); doReturn(sourceTopicPartitions).when(connector).findSourceTopicPartitions();
doReturn(Collections.emptyList()).when(connector).findTargetTopicPartitions(); doReturn(List.of()).when(connector).findTargetTopicPartitions();
doReturn(configs).when(connector).describeTopicConfigs(Collections.singleton("topic")); doReturn(configs).when(connector).describeTopicConfigs(Set.of("topic"));
doNothing().when(connector).createNewTopics(any()); doNothing().when(connector).createNewTopics(any());
connector.refreshTopicPartitions(); connector.refreshTopicPartitions();
@ -460,7 +458,7 @@ public class MirrorSourceConnectorTest {
verify(connector, times(2)).createNewTopics(eq(expectedNewTopics)); verify(connector, times(2)).createNewTopics(eq(expectedNewTopics));
verify(connector, times(0)).createNewPartitions(any()); verify(connector, times(0)).createNewPartitions(any());
List<TopicPartition> targetTopicPartitions = Collections.singletonList(new TopicPartition("source.topic", 0)); List<TopicPartition> targetTopicPartitions = List.of(new TopicPartition("source.topic", 0));
doReturn(targetTopicPartitions).when(connector).findTargetTopicPartitions(); doReturn(targetTopicPartitions).when(connector).findTargetTopicPartitions();
connector.refreshTopicPartitions(); connector.refreshTopicPartitions();
@ -475,17 +473,17 @@ public class MirrorSourceConnectorTest {
connector.initialize(mock(ConnectorContext.class)); connector.initialize(mock(ConnectorContext.class));
connector = spy(connector); connector = spy(connector);
Config topicConfig = new Config(Arrays.asList( Config topicConfig = new Config(List.of(
new ConfigEntry("cleanup.policy", "compact"), new ConfigEntry("cleanup.policy", "compact"),
new ConfigEntry("segment.bytes", "100"))); new ConfigEntry("segment.bytes", "100")));
Map<String, Config> configs = Collections.singletonMap("source.topic", topicConfig); Map<String, Config> configs = Map.of("source.topic", topicConfig);
List<TopicPartition> sourceTopicPartitions = Collections.emptyList(); List<TopicPartition> sourceTopicPartitions = List.of();
List<TopicPartition> targetTopicPartitions = Collections.singletonList(new TopicPartition("source.topic", 0)); List<TopicPartition> targetTopicPartitions = List.of(new TopicPartition("source.topic", 0));
doReturn(sourceTopicPartitions).when(connector).findSourceTopicPartitions(); doReturn(sourceTopicPartitions).when(connector).findSourceTopicPartitions();
doReturn(targetTopicPartitions).when(connector).findTargetTopicPartitions(); doReturn(targetTopicPartitions).when(connector).findTargetTopicPartitions();
doReturn(configs).when(connector).describeTopicConfigs(Collections.singleton("source.topic")); doReturn(configs).when(connector).describeTopicConfigs(Set.of("source.topic"));
doReturn(Collections.emptyMap()).when(connector).describeTopicConfigs(Collections.emptySet()); doReturn(Map.of()).when(connector).describeTopicConfigs(Set.of());
doNothing().when(connector).createNewTopics(any()); doNothing().when(connector).createNewTopics(any());
doNothing().when(connector).createNewPartitions(any()); doNothing().when(connector).createNewPartitions(any());
@ -494,7 +492,7 @@ public class MirrorSourceConnectorTest {
connector.refreshTopicPartitions(); connector.refreshTopicPartitions();
verify(connector, times(0)).computeAndCreateTopicPartitions(); verify(connector, times(0)).computeAndCreateTopicPartitions();
sourceTopicPartitions = Collections.singletonList(new TopicPartition("topic", 0)); sourceTopicPartitions = List.of(new TopicPartition("topic", 0));
doReturn(sourceTopicPartitions).when(connector).findSourceTopicPartitions(); doReturn(sourceTopicPartitions).when(connector).findSourceTopicPartitions();
// when partitions are added to the source cluster, reconfiguration is triggered // when partitions are added to the source cluster, reconfiguration is triggered
@ -620,7 +618,7 @@ public class MirrorSourceConnectorTest {
List<ConfigValue> results = new MirrorSourceConnector().validate(props) List<ConfigValue> results = new MirrorSourceConnector().validate(props)
.configValues().stream() .configValues().stream()
.filter(cv -> name.equals(cv.name())) .filter(cv -> name.equals(cv.name()))
.collect(Collectors.toList()); .toList();
assertTrue(results.size() <= 1, "Connector produced multiple config values for '" + name + "' property"); assertTrue(results.size() <= 1, "Connector produced multiple config values for '" + name + "' property");
@ -635,8 +633,8 @@ public class MirrorSourceConnectorTest {
@Test @Test
public void testAlterOffsetsIncorrectPartitionKey() { public void testAlterOffsetsIncorrectPartitionKey() {
MirrorSourceConnector connector = new MirrorSourceConnector(); MirrorSourceConnector connector = new MirrorSourceConnector();
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Map.of(
Collections.singletonMap("unused_partition_key", "unused_partition_value"), Map.of("unused_partition_key", "unused_partition_value"),
MirrorUtils.wrapOffset(10) MirrorUtils.wrapOffset(10)
))); )));
@ -651,7 +649,7 @@ public class MirrorSourceConnectorTest {
public void testAlterOffsetsMissingPartitionKey() { public void testAlterOffsetsMissingPartitionKey() {
MirrorSourceConnector connector = new MirrorSourceConnector(); MirrorSourceConnector connector = new MirrorSourceConnector();
Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Collections.singletonMap( Function<Map<String, ?>, Boolean> alterOffsets = partition -> connector.alterOffsets(null, Map.of(
partition, partition,
MirrorUtils.wrapOffset(64) MirrorUtils.wrapOffset(64)
)); ));
@ -660,7 +658,7 @@ public class MirrorSourceConnectorTest {
// Sanity check to make sure our valid partition is actually valid // Sanity check to make sure our valid partition is actually valid
assertTrue(alterOffsets.apply(validPartition)); assertTrue(alterOffsets.apply(validPartition));
for (String key : Arrays.asList(SOURCE_CLUSTER_KEY, TOPIC_KEY, PARTITION_KEY)) { for (String key : List.of(SOURCE_CLUSTER_KEY, TOPIC_KEY, PARTITION_KEY)) {
Map<String, ?> invalidPartition = new HashMap<>(validPartition); Map<String, ?> invalidPartition = new HashMap<>(validPartition);
invalidPartition.remove(key); invalidPartition.remove(key);
assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition)); assertThrows(ConnectException.class, () -> alterOffsets.apply(invalidPartition));
@ -672,7 +670,7 @@ public class MirrorSourceConnectorTest {
MirrorSourceConnector connector = new MirrorSourceConnector(); MirrorSourceConnector connector = new MirrorSourceConnector();
Map<String, Object> partition = sourcePartition("t", 3, "us-west-2"); Map<String, Object> partition = sourcePartition("t", 3, "us-west-2");
partition.put(PARTITION_KEY, "a string"); partition.put(PARTITION_KEY, "a string");
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Collections.singletonMap( assertThrows(ConnectException.class, () -> connector.alterOffsets(null, Map.of(
partition, partition,
MirrorUtils.wrapOffset(49) MirrorUtils.wrapOffset(49)
))); )));
@ -696,9 +694,9 @@ public class MirrorSourceConnectorTest {
public void testAlterOffsetsIncorrectOffsetKey() { public void testAlterOffsetsIncorrectOffsetKey() {
MirrorSourceConnector connector = new MirrorSourceConnector(); MirrorSourceConnector connector = new MirrorSourceConnector();
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
sourcePartition("t1", 2, "backup"), sourcePartition("t1", 2, "backup"),
Collections.singletonMap("unused_offset_key", 0) Map.of("unused_offset_key", 0)
); );
assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets)); assertThrows(ConnectException.class, () -> connector.alterOffsets(null, offsets));
} }
@ -707,7 +705,7 @@ public class MirrorSourceConnectorTest {
public void testAlterOffsetsOffsetValues() { public void testAlterOffsetsOffsetValues() {
MirrorSourceConnector connector = new MirrorSourceConnector(); MirrorSourceConnector connector = new MirrorSourceConnector();
Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Collections.singletonMap( Function<Object, Boolean> alterOffsets = offset -> connector.alterOffsets(null, Map.of(
sourcePartition("t", 5, "backup"), sourcePartition("t", 5, "backup"),
Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset) Collections.singletonMap(MirrorUtils.OFFSET_KEY, offset)
)); ));
@ -728,7 +726,7 @@ public class MirrorSourceConnectorTest {
public void testSuccessfulAlterOffsets() { public void testSuccessfulAlterOffsets() {
MirrorSourceConnector connector = new MirrorSourceConnector(); MirrorSourceConnector connector = new MirrorSourceConnector();
Map<Map<String, ?>, Map<String, ?>> offsets = Collections.singletonMap( Map<Map<String, ?>, Map<String, ?>> offsets = Map.of(
sourcePartition("t2", 0, "backup"), sourcePartition("t2", 0, "backup"),
MirrorUtils.wrapOffset(5) MirrorUtils.wrapOffset(5)
); );
@ -737,7 +735,7 @@ public class MirrorSourceConnectorTest {
// since it could indicate that the offsets were reset previously or that no offsets have been committed yet // since it could indicate that the offsets were reset previously or that no offsets have been committed yet
// (for a reset operation) // (for a reset operation)
assertTrue(connector.alterOffsets(null, offsets)); assertTrue(connector.alterOffsets(null, offsets));
assertTrue(connector.alterOffsets(null, Collections.emptyMap())); assertTrue(connector.alterOffsets(null, Map.of()));
} }
@Test @Test
@ -757,8 +755,8 @@ public class MirrorSourceConnectorTest {
assertTrue(() -> alterOffsets.apply(partition)); assertTrue(() -> alterOffsets.apply(partition));
assertTrue(() -> alterOffsets.apply(null)); assertTrue(() -> alterOffsets.apply(null));
assertTrue(() -> alterOffsets.apply(Collections.emptyMap())); assertTrue(() -> alterOffsets.apply(Map.of()));
assertTrue(() -> alterOffsets.apply(Collections.singletonMap("unused_partition_key", "unused_partition_value"))); assertTrue(() -> alterOffsets.apply(Map.of("unused_partition_key", "unused_partition_value")));
} }
private static Map<String, Object> sourcePartition(String topic, int partition, String sourceClusterAlias) { private static Map<String, Object> sourcePartition(String topic, int partition, String sourceClusterAlias) {

View File

@ -29,7 +29,6 @@ import org.apache.kafka.connect.errors.ConnectException;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Collections;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
@ -51,7 +50,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopic() throws Exception { public void testCreateCompactedTopic() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenReturn(null); when(future.get()).thenReturn(null);
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);
@ -64,7 +63,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopicAlreadyExists() throws Exception { public void testCreateCompactedTopicAlreadyExists() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenThrow(new ExecutionException(new TopicExistsException("topic exists"))); when(future.get()).thenThrow(new ExecutionException(new TopicExistsException("topic exists")));
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);
@ -77,7 +76,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopicAssumeTopicAlreadyExistsWithUnsupportedVersionException() throws Exception { public void testCreateCompactedTopicAssumeTopicAlreadyExistsWithUnsupportedVersionException() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenThrow(new ExecutionException(new UnsupportedVersionException("unsupported"))); when(future.get()).thenThrow(new ExecutionException(new UnsupportedVersionException("unsupported")));
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);
@ -90,7 +89,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopicAssumeTopicAlreadyExistsWithClusterAuthorizationException() throws Exception { public void testCreateCompactedTopicAssumeTopicAlreadyExistsWithClusterAuthorizationException() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenThrow(new ExecutionException(new ClusterAuthorizationException("not authorised"))); when(future.get()).thenThrow(new ExecutionException(new ClusterAuthorizationException("not authorised")));
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);
@ -103,7 +102,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopicAssumeTopicAlreadyExistsWithTopicAuthorizationException() throws Exception { public void testCreateCompactedTopicAssumeTopicAlreadyExistsWithTopicAuthorizationException() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenThrow(new ExecutionException(new TopicAuthorizationException("not authorised"))); when(future.get()).thenThrow(new ExecutionException(new TopicAuthorizationException("not authorised")));
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);
@ -116,7 +115,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopicFailsWithInvalidConfigurationException() throws Exception { public void testCreateCompactedTopicFailsWithInvalidConfigurationException() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenThrow(new ExecutionException(new InvalidConfigurationException("wrong config"))); when(future.get()).thenThrow(new ExecutionException(new InvalidConfigurationException("wrong config")));
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);
@ -130,7 +129,7 @@ public class MirrorUtilsTest {
@Test @Test
public void testCreateCompactedTopicFailsWithTimeoutException() throws Exception { public void testCreateCompactedTopicFailsWithTimeoutException() throws Exception {
Map<String, KafkaFuture<Void>> values = Collections.singletonMap(TOPIC, future); Map<String, KafkaFuture<Void>> values = Map.of(TOPIC, future);
when(future.get()).thenThrow(new ExecutionException(new TimeoutException("Timeout"))); when(future.get()).thenThrow(new ExecutionException(new TimeoutException("Timeout")));
when(ctr.values()).thenReturn(values); when(ctr.values()).thenReturn(values);
when(admin.createTopics(any(), any())).thenReturn(ctr); when(admin.createTopics(any(), any())).thenReturn(ctr);

View File

@ -55,11 +55,11 @@ public class OffsetSyncWriterTest {
offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 0, 1); offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 0, 1);
assertFalse(offsetSyncWriter.getDelayedOffsetSyncs().containsKey(topicPartition)); assertFalse(offsetSyncWriter.getDelayedOffsetSyncs().containsKey(topicPartition));
assertTrue(offsetSyncWriter.getPendingOffsetSyncs().containsKey(topicPartition)); assertTrue(offsetSyncWriter.getPendingOffsetSyncs().containsKey(topicPartition));
assertEquals(offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset, 1); assertEquals(1, offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset);
offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 1, 2); offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 1, 2);
assertTrue(offsetSyncWriter.getDelayedOffsetSyncs().containsKey(topicPartition)); assertTrue(offsetSyncWriter.getDelayedOffsetSyncs().containsKey(topicPartition));
assertEquals(offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset, 1); assertEquals(1, offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset);
} }
@Test @Test
@ -71,7 +71,7 @@ public class OffsetSyncWriterTest {
OffsetSyncWriter offsetSyncWriter = new OffsetSyncWriter(producer, topicName, outstandingOffsetSyncs, maxOffsetLag); OffsetSyncWriter offsetSyncWriter = new OffsetSyncWriter(producer, topicName, outstandingOffsetSyncs, maxOffsetLag);
offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 0, 100); offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 0, 100);
assertEquals(offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset, 100); assertEquals(100, offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset);
offsetSyncWriter.firePendingOffsetSyncs(); offsetSyncWriter.firePendingOffsetSyncs();
@ -85,7 +85,7 @@ public class OffsetSyncWriterTest {
verify(producer, times(1)).send(any(), any()); verify(producer, times(1)).send(any(), any());
offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 2, 102); offsetSyncWriter.maybeQueueOffsetSyncs(topicPartition, 2, 102);
assertEquals(offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset, 102); assertEquals(102, offsetSyncWriter.partitionStates().get(topicPartition).lastSyncDownstreamOffset);
offsetSyncWriter.firePendingOffsetSyncs(); offsetSyncWriter.firePendingOffsetSyncs();
// in-flight offset syncs; will not try to send remaining offset syncs immediately // in-flight offset syncs; will not try to send remaining offset syncs immediately

View File

@ -45,8 +45,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.time.Duration; import java.time.Duration;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -58,7 +56,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Predicate; import java.util.function.Predicate;
import java.util.stream.Collectors;
import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG; import static org.apache.kafka.clients.consumer.ConsumerConfig.AUTO_OFFSET_RESET_CONFIG;
import static org.apache.kafka.connect.mirror.MirrorMaker.CONNECTOR_CLASSES; import static org.apache.kafka.connect.mirror.MirrorMaker.CONNECTOR_CLASSES;
@ -230,7 +227,7 @@ public class DedicatedMirrorIntegrationTest {
// Bring up a single-node cluster // Bring up a single-node cluster
final MirrorMaker mm = startMirrorMaker("no-offset-syncing", mmProps); final MirrorMaker mm = startMirrorMaker("no-offset-syncing", mmProps);
final SourceAndTarget sourceAndTarget = new SourceAndTarget(a, b); final SourceAndTarget sourceAndTarget = new SourceAndTarget(a, b);
awaitMirrorMakerStart(mm, sourceAndTarget, Arrays.asList(MirrorSourceConnector.class, MirrorHeartbeatConnector.class)); awaitMirrorMakerStart(mm, sourceAndTarget, List.of(MirrorSourceConnector.class, MirrorHeartbeatConnector.class));
// wait for mirror source and heartbeat connectors to start a task // wait for mirror source and heartbeat connectors to start a task
awaitConnectorTasksStart(mm, MirrorHeartbeatConnector.class, sourceAndTarget); awaitConnectorTasksStart(mm, MirrorHeartbeatConnector.class, sourceAndTarget);
@ -256,7 +253,7 @@ public class DedicatedMirrorIntegrationTest {
.stream() .stream()
.filter(Optional::isPresent) .filter(Optional::isPresent)
.map(Optional::get) .map(Optional::get)
.collect(Collectors.toList()); .toList();
assertTrue(offsetSyncTopic.isEmpty()); assertTrue(offsetSyncTopic.isEmpty());
} }
@ -451,8 +448,8 @@ public class DedicatedMirrorIntegrationTest {
} }
private void awaitTopicContent(EmbeddedKafkaCluster cluster, String clusterName, String topic, int numMessages) throws Exception { private void awaitTopicContent(EmbeddedKafkaCluster cluster, String clusterName, String topic, int numMessages) throws Exception {
try (Consumer<?, ?> consumer = cluster.createConsumer(Collections.singletonMap(AUTO_OFFSET_RESET_CONFIG, "earliest"))) { try (Consumer<?, ?> consumer = cluster.createConsumer(Map.of(AUTO_OFFSET_RESET_CONFIG, "earliest"))) {
consumer.subscribe(Collections.singleton(topic)); consumer.subscribe(Set.of(topic));
AtomicInteger messagesRead = new AtomicInteger(0); AtomicInteger messagesRead = new AtomicInteger(0);
waitForCondition( waitForCondition(
() -> { () -> {

View File

@ -67,9 +67,7 @@ import org.slf4j.LoggerFactory;
import java.time.Duration; import java.time.Duration;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -127,7 +125,7 @@ public class MirrorConnectorsIntegrationBaseTest {
protected static final Duration CONSUMER_POLL_TIMEOUT = Duration.ofMillis(500L); protected static final Duration CONSUMER_POLL_TIMEOUT = Duration.ofMillis(500L);
protected static final String PRIMARY_CLUSTER_ALIAS = "primary"; protected static final String PRIMARY_CLUSTER_ALIAS = "primary";
protected static final String BACKUP_CLUSTER_ALIAS = "backup"; protected static final String BACKUP_CLUSTER_ALIAS = "backup";
protected static final List<Class<? extends Connector>> CONNECTOR_LIST = Arrays.asList( protected static final List<Class<? extends Connector>> CONNECTOR_LIST = List.of(
MirrorSourceConnector.class, MirrorSourceConnector.class,
MirrorCheckpointConnector.class, MirrorCheckpointConnector.class,
MirrorHeartbeatConnector.class); MirrorHeartbeatConnector.class);
@ -243,7 +241,7 @@ public class MirrorConnectorsIntegrationBaseTest {
waitForTopicCreated(backup, "mm2-configs.primary.internal"); waitForTopicCreated(backup, "mm2-configs.primary.internal");
waitForTopicCreated(backup, "test-topic-1"); waitForTopicCreated(backup, "test-topic-1");
waitForTopicCreated(primary, "test-topic-1"); waitForTopicCreated(primary, "test-topic-1");
warmUpConsumer(Collections.singletonMap("group.id", "consumer-group-dummy")); warmUpConsumer(Map.of("group.id", "consumer-group-dummy"));
log.info(PRIMARY_CLUSTER_ALIAS + " REST service: {}", primary.endpointForResource("connectors")); log.info(PRIMARY_CLUSTER_ALIAS + " REST service: {}", primary.endpointForResource("connectors"));
log.info(BACKUP_CLUSTER_ALIAS + " REST service: {}", backup.endpointForResource("connectors")); log.info(BACKUP_CLUSTER_ALIAS + " REST service: {}", backup.endpointForResource("connectors"));
@ -290,14 +288,14 @@ public class MirrorConnectorsIntegrationBaseTest {
} }
String reverseTopic1 = remoteTopicName("test-topic-1", BACKUP_CLUSTER_ALIAS); String reverseTopic1 = remoteTopicName("test-topic-1", BACKUP_CLUSTER_ALIAS);
String consumerGroupName = "consumer-group-testReplication"; String consumerGroupName = "consumer-group-testReplication";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// warm up consumers before starting the connectors, so we don't need to wait for discovery // warm up consumers before starting the connectors, so we don't need to wait for discovery
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
mm2Config = new MirrorMakerConfig(mm2Props); mm2Config = new MirrorMakerConfig(mm2Props);
waitUntilMirrorMakerIsRunning(backup, CONNECTOR_LIST, mm2Config, PRIMARY_CLUSTER_ALIAS, BACKUP_CLUSTER_ALIAS); waitUntilMirrorMakerIsRunning(backup, CONNECTOR_LIST, mm2Config, PRIMARY_CLUSTER_ALIAS, BACKUP_CLUSTER_ALIAS);
List<Class<? extends Connector>> primaryConnectors = replicateBackupToPrimary ? CONNECTOR_LIST : Collections.singletonList(MirrorHeartbeatConnector.class); List<Class<? extends Connector>> primaryConnectors = replicateBackupToPrimary ? CONNECTOR_LIST : List.of(MirrorHeartbeatConnector.class);
waitUntilMirrorMakerIsRunning(primary, primaryConnectors, mm2Config, BACKUP_CLUSTER_ALIAS, PRIMARY_CLUSTER_ALIAS); waitUntilMirrorMakerIsRunning(primary, primaryConnectors, mm2Config, BACKUP_CLUSTER_ALIAS, PRIMARY_CLUSTER_ALIAS);
MirrorClient primaryClient = new MirrorClient(mm2Config.clientConfig(PRIMARY_CLUSTER_ALIAS)); MirrorClient primaryClient = new MirrorClient(mm2Config.clientConfig(PRIMARY_CLUSTER_ALIAS));
@ -370,7 +368,7 @@ public class MirrorConnectorsIntegrationBaseTest {
backupClient, consumerGroupName, PRIMARY_CLUSTER_ALIAS, backupTopic1); backupClient, consumerGroupName, PRIMARY_CLUSTER_ALIAS, backupTopic1);
// Failover consumer group to backup cluster. // Failover consumer group to backup cluster.
try (Consumer<byte[], byte[]> primaryConsumer = backup.kafka().createConsumer(Collections.singletonMap("group.id", consumerGroupName))) { try (Consumer<byte[], byte[]> primaryConsumer = backup.kafka().createConsumer(Map.of("group.id", consumerGroupName))) {
primaryConsumer.assign(backupOffsets.keySet()); primaryConsumer.assign(backupOffsets.keySet());
backupOffsets.forEach(primaryConsumer::seek); backupOffsets.forEach(primaryConsumer::seek);
primaryConsumer.poll(CONSUMER_POLL_TIMEOUT); primaryConsumer.poll(CONSUMER_POLL_TIMEOUT);
@ -391,7 +389,7 @@ public class MirrorConnectorsIntegrationBaseTest {
primaryClient, consumerGroupName, BACKUP_CLUSTER_ALIAS, reverseTopic1); primaryClient, consumerGroupName, BACKUP_CLUSTER_ALIAS, reverseTopic1);
// Failback consumer group to primary cluster // Failback consumer group to primary cluster
try (Consumer<byte[], byte[]> primaryConsumer = primary.kafka().createConsumer(Collections.singletonMap("group.id", consumerGroupName))) { try (Consumer<byte[], byte[]> primaryConsumer = primary.kafka().createConsumer(Map.of("group.id", consumerGroupName))) {
primaryConsumer.assign(primaryOffsets.keySet()); primaryConsumer.assign(primaryOffsets.keySet());
primaryOffsets.forEach(primaryConsumer::seek); primaryOffsets.forEach(primaryConsumer::seek);
primaryConsumer.poll(CONSUMER_POLL_TIMEOUT); primaryConsumer.poll(CONSUMER_POLL_TIMEOUT);
@ -435,7 +433,7 @@ public class MirrorConnectorsIntegrationBaseTest {
@Test @Test
public void testReplicationWithEmptyPartition() throws Exception { public void testReplicationWithEmptyPartition() throws Exception {
String consumerGroupName = "consumer-group-testReplicationWithEmptyPartition"; String consumerGroupName = "consumer-group-testReplicationWithEmptyPartition";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// create topic // create topic
String topic = "test-topic-with-empty-partition"; String topic = "test-topic-with-empty-partition";
@ -526,7 +524,7 @@ public class MirrorConnectorsIntegrationBaseTest {
try (Consumer<byte[], byte[]> backupConsumer = backup.kafka().createConsumerAndSubscribeTo( try (Consumer<byte[], byte[]> backupConsumer = backup.kafka().createConsumerAndSubscribeTo(
consumerProps, backupTopic1)) { consumerProps, backupTopic1)) {
waitForConsumerGroupFullSync(backup, Collections.singletonList(backupTopic1), waitForConsumerGroupFullSync(backup, List.of(backupTopic1),
consumerGroupName, NUM_RECORDS_PRODUCED, offsetLagMax); consumerGroupName, NUM_RECORDS_PRODUCED, offsetLagMax);
assertDownstreamRedeliveriesBoundedByMaxLag(backupConsumer, offsetLagMax); assertDownstreamRedeliveriesBoundedByMaxLag(backupConsumer, offsetLagMax);
} }
@ -541,17 +539,17 @@ public class MirrorConnectorsIntegrationBaseTest {
produceMessages(primaryProducer, "test-topic-2"); produceMessages(primaryProducer, "test-topic-2");
// create a consumer at primary cluster to consume the new topic // create a consumer at primary cluster to consume the new topic
try (Consumer<byte[], byte[]> consumer1 = primary.kafka().createConsumerAndSubscribeTo(Collections.singletonMap( try (Consumer<byte[], byte[]> consumer1 = primary.kafka().createConsumerAndSubscribeTo(Map.of(
"group.id", consumerGroupName), "test-topic-2")) { "group.id", consumerGroupName), "test-topic-2")) {
// we need to wait for consuming all the records for MM2 replicating the expected offsets // we need to wait for consuming all the records for MM2 replicating the expected offsets
waitForConsumingAllRecords(consumer1, NUM_RECORDS_PRODUCED); waitForConsumingAllRecords(consumer1, NUM_RECORDS_PRODUCED);
} }
// create a consumer at backup cluster with same consumer group ID to consume old and new topic // create a consumer at backup cluster with same consumer group ID to consume old and new topic
try (Consumer<byte[], byte[]> backupConsumer = backup.kafka().createConsumerAndSubscribeTo(Collections.singletonMap( try (Consumer<byte[], byte[]> backupConsumer = backup.kafka().createConsumerAndSubscribeTo(Map.of(
"group.id", consumerGroupName), backupTopic1, remoteTopic2)) { "group.id", consumerGroupName), backupTopic1, remoteTopic2)) {
waitForConsumerGroupFullSync(backup, Arrays.asList(backupTopic1, remoteTopic2), waitForConsumerGroupFullSync(backup, List.of(backupTopic1, remoteTopic2),
consumerGroupName, NUM_RECORDS_PRODUCED, offsetLagMax); consumerGroupName, NUM_RECORDS_PRODUCED, offsetLagMax);
assertDownstreamRedeliveriesBoundedByMaxLag(backupConsumer, offsetLagMax); assertDownstreamRedeliveriesBoundedByMaxLag(backupConsumer, offsetLagMax);
} }
@ -567,7 +565,7 @@ public class MirrorConnectorsIntegrationBaseTest {
produceMessages(backupProducer, "test-topic-1"); produceMessages(backupProducer, "test-topic-1");
} }
String consumerGroupName = "consumer-group-testReplication"; String consumerGroupName = "consumer-group-testReplication";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// warm up consumers before starting the connectors, so we don't need to wait for discovery // warm up consumers before starting the connectors, so we don't need to wait for discovery
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
@ -576,7 +574,7 @@ public class MirrorConnectorsIntegrationBaseTest {
mm2Config = new MirrorMakerConfig(mm2Props); mm2Config = new MirrorMakerConfig(mm2Props);
waitUntilMirrorMakerIsRunning(backup, Arrays.asList(MirrorSourceConnector.class, MirrorHeartbeatConnector.class), mm2Config, PRIMARY_CLUSTER_ALIAS, BACKUP_CLUSTER_ALIAS); waitUntilMirrorMakerIsRunning(backup, List.of(MirrorSourceConnector.class, MirrorHeartbeatConnector.class), mm2Config, PRIMARY_CLUSTER_ALIAS, BACKUP_CLUSTER_ALIAS);
MirrorClient primaryClient = new MirrorClient(mm2Config.clientConfig(PRIMARY_CLUSTER_ALIAS)); MirrorClient primaryClient = new MirrorClient(mm2Config.clientConfig(PRIMARY_CLUSTER_ALIAS));
MirrorClient backupClient = new MirrorClient(mm2Config.clientConfig(BACKUP_CLUSTER_ALIAS)); MirrorClient backupClient = new MirrorClient(mm2Config.clientConfig(BACKUP_CLUSTER_ALIAS));
@ -595,7 +593,7 @@ public class MirrorConnectorsIntegrationBaseTest {
.stream() .stream()
.filter(Optional::isPresent) .filter(Optional::isPresent)
.map(Optional::get) .map(Optional::get)
.collect(Collectors.toList()); .toList();
assertTrue(offsetSyncTopic.isEmpty()); assertTrue(offsetSyncTopic.isEmpty());
primaryClient.close(); primaryClient.close();
@ -617,7 +615,7 @@ public class MirrorConnectorsIntegrationBaseTest {
waitForTopicCreated(backup, "mm2-offset-syncs." + PRIMARY_CLUSTER_ALIAS + ".internal"); waitForTopicCreated(backup, "mm2-offset-syncs." + PRIMARY_CLUSTER_ALIAS + ".internal");
String consumerGroupName = "consumer-group-syncs-on-target"; String consumerGroupName = "consumer-group-syncs-on-target";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
produceMessages(primaryProducer, "test-topic-1"); produceMessages(primaryProducer, "test-topic-1");
@ -626,7 +624,7 @@ public class MirrorConnectorsIntegrationBaseTest {
String remoteTopic = remoteTopicName("test-topic-1", PRIMARY_CLUSTER_ALIAS); String remoteTopic = remoteTopicName("test-topic-1", PRIMARY_CLUSTER_ALIAS);
// Check offsets are pushed to the checkpoint topic // Check offsets are pushed to the checkpoint topic
try (Consumer<byte[], byte[]> backupConsumer = backup.kafka().createConsumerAndSubscribeTo(Collections.singletonMap( try (Consumer<byte[], byte[]> backupConsumer = backup.kafka().createConsumerAndSubscribeTo(Map.of(
"auto.offset.reset", "earliest"), PRIMARY_CLUSTER_ALIAS + ".checkpoints.internal")) { "auto.offset.reset", "earliest"), PRIMARY_CLUSTER_ALIAS + ".checkpoints.internal")) {
waitForCondition(() -> { waitForCondition(() -> {
ConsumerRecords<byte[], byte[]> records = backupConsumer.poll(Duration.ofSeconds(1L)); ConsumerRecords<byte[], byte[]> records = backupConsumer.poll(Duration.ofSeconds(1L));
@ -655,7 +653,7 @@ public class MirrorConnectorsIntegrationBaseTest {
@Test @Test
public void testNoCheckpointsIfNoRecordsAreMirrored() throws InterruptedException { public void testNoCheckpointsIfNoRecordsAreMirrored() throws InterruptedException {
String consumerGroupName = "consumer-group-no-checkpoints"; String consumerGroupName = "consumer-group-no-checkpoints";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// ensure there are some records in the topic on the source cluster // ensure there are some records in the topic on the source cluster
produceMessages(primaryProducer, "test-topic-1"); produceMessages(primaryProducer, "test-topic-1");
@ -676,7 +674,7 @@ public class MirrorConnectorsIntegrationBaseTest {
TopicPartition tp1 = new TopicPartition("test-topic-1", 0); TopicPartition tp1 = new TopicPartition("test-topic-1", 0);
TopicPartition tp2 = new TopicPartition("test-topic-no-checkpoints", 0); TopicPartition tp2 = new TopicPartition("test-topic-no-checkpoints", 0);
try (Consumer<byte[], byte[]> consumer = primary.kafka().createConsumer(consumerProps)) { try (Consumer<byte[], byte[]> consumer = primary.kafka().createConsumer(consumerProps)) {
Collection<TopicPartition> tps = Arrays.asList(tp1, tp2); Collection<TopicPartition> tps = List.of(tp1, tp2);
Map<TopicPartition, Long> endOffsets = consumer.endOffsets(tps); Map<TopicPartition, Long> endOffsets = consumer.endOffsets(tps);
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = endOffsets.entrySet().stream() Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = endOffsets.entrySet().stream()
.collect(Collectors.toMap( .collect(Collectors.toMap(
@ -699,7 +697,7 @@ public class MirrorConnectorsIntegrationBaseTest {
produceMessages(primaryProducer, "test-topic-no-checkpoints"); produceMessages(primaryProducer, "test-topic-no-checkpoints");
try (Consumer<byte[], byte[]> consumer = primary.kafka().createConsumer(consumerProps)) { try (Consumer<byte[], byte[]> consumer = primary.kafka().createConsumer(consumerProps)) {
Collection<TopicPartition> tps = Arrays.asList(tp1, tp2); Collection<TopicPartition> tps = List.of(tp1, tp2);
Map<TopicPartition, Long> endOffsets = consumer.endOffsets(tps); Map<TopicPartition, Long> endOffsets = consumer.endOffsets(tps);
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = endOffsets.entrySet().stream() Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = endOffsets.entrySet().stream()
.collect(Collectors.toMap( .collect(Collectors.toMap(
@ -722,7 +720,7 @@ public class MirrorConnectorsIntegrationBaseTest {
@Test @Test
public void testRestartReplication() throws InterruptedException { public void testRestartReplication() throws InterruptedException {
String consumerGroupName = "consumer-group-restart"; String consumerGroupName = "consumer-group-restart";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
String remoteTopic = remoteTopicName("test-topic-1", PRIMARY_CLUSTER_ALIAS); String remoteTopic = remoteTopicName("test-topic-1", PRIMARY_CLUSTER_ALIAS);
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
mm2Props.put("sync.group.offsets.enabled", "true"); mm2Props.put("sync.group.offsets.enabled", "true");
@ -734,7 +732,7 @@ public class MirrorConnectorsIntegrationBaseTest {
try (Consumer<byte[], byte[]> primaryConsumer = primary.kafka().createConsumerAndSubscribeTo(consumerProps, "test-topic-1")) { try (Consumer<byte[], byte[]> primaryConsumer = primary.kafka().createConsumerAndSubscribeTo(consumerProps, "test-topic-1")) {
waitForConsumingAllRecords(primaryConsumer, NUM_RECORDS_PRODUCED); waitForConsumingAllRecords(primaryConsumer, NUM_RECORDS_PRODUCED);
} }
waitForConsumerGroupFullSync(backup, Collections.singletonList(remoteTopic), consumerGroupName, NUM_RECORDS_PRODUCED, OFFSET_LAG_MAX); waitForConsumerGroupFullSync(backup, List.of(remoteTopic), consumerGroupName, NUM_RECORDS_PRODUCED, OFFSET_LAG_MAX);
restartMirrorMakerConnectors(backup, CONNECTOR_LIST); restartMirrorMakerConnectors(backup, CONNECTOR_LIST);
assertMonotonicCheckpoints(backup, "primary.checkpoints.internal"); assertMonotonicCheckpoints(backup, "primary.checkpoints.internal");
Thread.sleep(5000); Thread.sleep(5000);
@ -742,14 +740,14 @@ public class MirrorConnectorsIntegrationBaseTest {
try (Consumer<byte[], byte[]> primaryConsumer = primary.kafka().createConsumerAndSubscribeTo(consumerProps, "test-topic-1")) { try (Consumer<byte[], byte[]> primaryConsumer = primary.kafka().createConsumerAndSubscribeTo(consumerProps, "test-topic-1")) {
waitForConsumingAllRecords(primaryConsumer, NUM_RECORDS_PRODUCED); waitForConsumingAllRecords(primaryConsumer, NUM_RECORDS_PRODUCED);
} }
waitForConsumerGroupFullSync(backup, Collections.singletonList(remoteTopic), consumerGroupName, 2 * NUM_RECORDS_PRODUCED, OFFSET_LAG_MAX); waitForConsumerGroupFullSync(backup, List.of(remoteTopic), consumerGroupName, 2 * NUM_RECORDS_PRODUCED, OFFSET_LAG_MAX);
assertMonotonicCheckpoints(backup, "primary.checkpoints.internal"); assertMonotonicCheckpoints(backup, "primary.checkpoints.internal");
} }
@Test @Test
public void testOffsetTranslationBehindReplicationFlow() throws InterruptedException { public void testOffsetTranslationBehindReplicationFlow() throws InterruptedException {
String consumerGroupName = "consumer-group-lagging-behind"; String consumerGroupName = "consumer-group-lagging-behind";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
String remoteTopic = remoteTopicName("test-topic-1", PRIMARY_CLUSTER_ALIAS); String remoteTopic = remoteTopicName("test-topic-1", PRIMARY_CLUSTER_ALIAS);
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
mm2Props.put("sync.group.offsets.enabled", "true"); mm2Props.put("sync.group.offsets.enabled", "true");
@ -839,7 +837,7 @@ public class MirrorConnectorsIntegrationBaseTest {
Collection<AlterConfigOp> ops = new ArrayList<>(); Collection<AlterConfigOp> ops = new ArrayList<>();
ops.add(new AlterConfigOp(new ConfigEntry("delete.retention.ms", "2000"), AlterConfigOp.OpType.SET)); ops.add(new AlterConfigOp(new ConfigEntry("delete.retention.ms", "2000"), AlterConfigOp.OpType.SET));
ops.add(new AlterConfigOp(new ConfigEntry("retention.bytes", "2000"), AlterConfigOp.OpType.SET)); ops.add(new AlterConfigOp(new ConfigEntry("retention.bytes", "2000"), AlterConfigOp.OpType.SET));
Map<ConfigResource, Collection<AlterConfigOp>> configOps = Collections.singletonMap(configResource, ops); Map<ConfigResource, Collection<AlterConfigOp>> configOps = Map.of(configResource, ops);
// alter configs on target cluster // alter configs on target cluster
backup.kafka().incrementalAlterConfigs(configOps); backup.kafka().incrementalAlterConfigs(configOps);
@ -879,7 +877,7 @@ public class MirrorConnectorsIntegrationBaseTest {
Collection<AlterConfigOp> ops = new ArrayList<>(); Collection<AlterConfigOp> ops = new ArrayList<>();
ops.add(new AlterConfigOp(new ConfigEntry("delete.retention.ms", "2000"), AlterConfigOp.OpType.SET)); ops.add(new AlterConfigOp(new ConfigEntry("delete.retention.ms", "2000"), AlterConfigOp.OpType.SET));
ops.add(new AlterConfigOp(new ConfigEntry("retention.bytes", "2000"), AlterConfigOp.OpType.SET)); ops.add(new AlterConfigOp(new ConfigEntry("retention.bytes", "2000"), AlterConfigOp.OpType.SET));
Map<ConfigResource, Collection<AlterConfigOp>> configOps = Collections.singletonMap(configResource, ops); Map<ConfigResource, Collection<AlterConfigOp>> configOps = Map.of(configResource, ops);
backup.kafka().incrementalAlterConfigs(configOps); backup.kafka().incrementalAlterConfigs(configOps);
waitForCondition(() -> { waitForCondition(() -> {
@ -933,7 +931,7 @@ public class MirrorConnectorsIntegrationBaseTest {
ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, topic); ConfigResource configResource = new ConfigResource(ConfigResource.Type.TOPIC, topic);
Collection<AlterConfigOp> ops = new ArrayList<>(); Collection<AlterConfigOp> ops = new ArrayList<>();
ops.add(new AlterConfigOp(new ConfigEntry("retention.bytes", "1000"), AlterConfigOp.OpType.DELETE)); ops.add(new AlterConfigOp(new ConfigEntry("retention.bytes", "1000"), AlterConfigOp.OpType.DELETE));
Map<ConfigResource, Collection<AlterConfigOp>> configOps = Collections.singletonMap(configResource, ops); Map<ConfigResource, Collection<AlterConfigOp>> configOps = Map.of(configResource, ops);
primary.kafka().incrementalAlterConfigs(configOps); primary.kafka().incrementalAlterConfigs(configOps);
waitForCondition(() -> { waitForCondition(() -> {
@ -1155,7 +1153,7 @@ public class MirrorConnectorsIntegrationBaseTest {
String connectorName = connectorClass.getSimpleName(); String connectorName = connectorClass.getSimpleName();
connectCluster.resetConnectorOffsets(connectorName); connectCluster.resetConnectorOffsets(connectorName);
assertEquals( assertEquals(
Collections.emptyList(), List.of(),
connectCluster.connectorOffsets(connectorName).offsets(), connectCluster.connectorOffsets(connectorName).offsets(),
"Offsets for connector should be completely empty after full reset" "Offsets for connector should be completely empty after full reset"
); );
@ -1181,7 +1179,7 @@ public class MirrorConnectorsIntegrationBaseTest {
*/ */
protected static String getTopicConfig(EmbeddedKafkaCluster cluster, String topic, String configName) throws Exception { protected static String getTopicConfig(EmbeddedKafkaCluster cluster, String topic, String configName) throws Exception {
try (Admin client = cluster.createAdminClient()) { try (Admin client = cluster.createAdminClient()) {
Collection<ConfigResource> cr = Collections.singleton( Collection<ConfigResource> cr = Set.of(
new ConfigResource(ConfigResource.Type.TOPIC, topic)); new ConfigResource(ConfigResource.Type.TOPIC, topic));
DescribeConfigsResult configsResult = client.describeConfigs(cr); DescribeConfigsResult configsResult = client.describeConfigs(cr);
@ -1200,7 +1198,7 @@ public class MirrorConnectorsIntegrationBaseTest {
protected Producer<byte[], byte[]> initializeProducer(EmbeddedConnectCluster cluster) { protected Producer<byte[], byte[]> initializeProducer(EmbeddedConnectCluster cluster) {
return cluster.kafka().createProducer(Collections.emptyMap()); return cluster.kafka().createProducer(Map.of());
} }
/** /**
@ -1224,7 +1222,7 @@ public class MirrorConnectorsIntegrationBaseTest {
private static Map<TopicPartition, OffsetAndMetadata> waitForCheckpointOnAllPartitions( private static Map<TopicPartition, OffsetAndMetadata> waitForCheckpointOnAllPartitions(
MirrorClient client, String consumerGroupName, String remoteClusterAlias, String topicName MirrorClient client, String consumerGroupName, String remoteClusterAlias, String topicName
) throws InterruptedException { ) throws InterruptedException {
return waitForNewCheckpointOnAllPartitions(client, consumerGroupName, remoteClusterAlias, topicName, Collections.emptyMap()); return waitForNewCheckpointOnAllPartitions(client, consumerGroupName, remoteClusterAlias, topicName, Map.of());
} }
protected static Map<TopicPartition, OffsetAndMetadata> waitForNewCheckpointOnAllPartitions( protected static Map<TopicPartition, OffsetAndMetadata> waitForNewCheckpointOnAllPartitions(
@ -1318,7 +1316,7 @@ public class MirrorConnectorsIntegrationBaseTest {
private static void assertMonotonicCheckpoints(EmbeddedConnectCluster cluster, String checkpointTopic) { private static void assertMonotonicCheckpoints(EmbeddedConnectCluster cluster, String checkpointTopic) {
TopicPartition checkpointTopicPartition = new TopicPartition(checkpointTopic, 0); TopicPartition checkpointTopicPartition = new TopicPartition(checkpointTopic, 0);
try (Consumer<byte[], byte[]> backupConsumer = cluster.kafka().createConsumerAndSubscribeTo(Collections.singletonMap( try (Consumer<byte[], byte[]> backupConsumer = cluster.kafka().createConsumerAndSubscribeTo(Map.of(
"auto.offset.reset", "earliest"), checkpointTopic)) { "auto.offset.reset", "earliest"), checkpointTopic)) {
Map<String, Map<TopicPartition, Checkpoint>> checkpointsByGroup = new HashMap<>(); Map<String, Map<TopicPartition, Checkpoint>> checkpointsByGroup = new HashMap<>();
long deadline = System.currentTimeMillis() + CHECKPOINT_DURATION_MS; long deadline = System.currentTimeMillis() + CHECKPOINT_DURATION_MS;
@ -1390,11 +1388,11 @@ public class MirrorConnectorsIntegrationBaseTest {
private void createTopics() { private void createTopics() {
// to verify topic config will be sync-ed across clusters // to verify topic config will be sync-ed across clusters
Map<String, String> topicConfig = Collections.singletonMap(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT); Map<String, String> topicConfig = Map.of(TopicConfig.CLEANUP_POLICY_CONFIG, TopicConfig.CLEANUP_POLICY_COMPACT);
Map<String, String> emptyMap = Collections.emptyMap(); Map<String, String> emptyMap = Map.of();
// increase admin client request timeout value to make the tests reliable. // increase admin client request timeout value to make the tests reliable.
Map<String, Object> adminClientConfig = Collections.singletonMap( Map<String, Object> adminClientConfig = Map.of(
AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, REQUEST_TIMEOUT_DURATION_MS); AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, REQUEST_TIMEOUT_DURATION_MS);
// create these topics before starting the connectors, so we don't need to wait for discovery // create these topics before starting the connectors, so we don't need to wait for discovery
@ -1465,7 +1463,7 @@ public class MirrorConnectorsIntegrationBaseTest {
topicPartitionsPendingPosition.removeAll(topicPartitionsWithPosition); topicPartitionsPendingPosition.removeAll(topicPartitionsWithPosition);
} }
assertEquals( assertEquals(
Collections.emptySet(), Set.of(),
topicPartitionsPendingPosition, topicPartitionsPendingPosition,
"Failed to calculate consumer position for one or more partitions on cluster " + clusterName + " in time" "Failed to calculate consumer position for one or more partitions on cluster " + clusterName + " in time"
); );
@ -1494,7 +1492,7 @@ public class MirrorConnectorsIntegrationBaseTest {
*/ */
protected static void waitForTopicPartitionCreated(EmbeddedConnectCluster cluster, String topicName, int totalNumPartitions) throws InterruptedException { protected static void waitForTopicPartitionCreated(EmbeddedConnectCluster cluster, String topicName, int totalNumPartitions) throws InterruptedException {
try (final Admin adminClient = cluster.kafka().createAdminClient()) { try (final Admin adminClient = cluster.kafka().createAdminClient()) {
waitForCondition(() -> adminClient.describeTopics(Collections.singleton(topicName)).allTopicNames().get() waitForCondition(() -> adminClient.describeTopics(Set.of(topicName)).allTopicNames().get()
.get(topicName).partitions().size() == totalNumPartitions, TOPIC_SYNC_DURATION_MS, .get(topicName).partitions().size() == totalNumPartitions, TOPIC_SYNC_DURATION_MS,
"Topic: " + topicName + "'s partitions didn't get created on cluster: " + cluster.getName() "Topic: " + topicName + "'s partitions didn't get created on cluster: " + cluster.getName()
); );

View File

@ -24,7 +24,7 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays; import java.util.List;
import java.util.Properties; import java.util.Properties;
import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertEquals;
@ -45,7 +45,7 @@ public class MirrorConnectorsIntegrationExactlyOnceTest extends MirrorConnectors
BACKUP_CLUSTER_ALIAS + "." + DistributedConfig.EXACTLY_ONCE_SOURCE_SUPPORT_CONFIG, BACKUP_CLUSTER_ALIAS + "." + DistributedConfig.EXACTLY_ONCE_SOURCE_SUPPORT_CONFIG,
DistributedConfig.ExactlyOnceSourceSupport.ENABLED.toString() DistributedConfig.ExactlyOnceSourceSupport.ENABLED.toString()
); );
for (Properties brokerProps : Arrays.asList(primaryBrokerProps, backupBrokerProps)) { for (Properties brokerProps : List.of(primaryBrokerProps, backupBrokerProps)) {
brokerProps.put("transaction.state.log.replication.factor", "1"); brokerProps.put("transaction.state.log.replication.factor", "1");
brokerProps.put("transaction.state.log.min.isr", "1"); brokerProps.put("transaction.state.log.min.isr", "1");
} }

View File

@ -43,9 +43,7 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -172,7 +170,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
startClusters(additionalConfig); startClusters(additionalConfig);
try (Admin adminClient = primary.kafka().createAdminClient()) { try (Admin adminClient = primary.kafka().createAdminClient()) {
adminClient.createAcls(Collections.singletonList( adminClient.createAcls(List.of(
new AclBinding( new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL), new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
new AccessControlEntry("User:connector", "*", AclOperation.ALL, AclPermissionType.ALLOW) new AccessControlEntry("User:connector", "*", AclOperation.ALL, AclPermissionType.ALLOW)
@ -180,7 +178,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
)).all().get(); )).all().get();
} }
try (Admin adminClient = backup.kafka().createAdminClient()) { try (Admin adminClient = backup.kafka().createAdminClient()) {
adminClient.createAcls(Collections.singletonList( adminClient.createAcls(List.of(
new AclBinding( new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL), new ResourcePattern(ResourceType.TOPIC, "*", PatternType.LITERAL),
new AccessControlEntry("User:connector", "*", AclOperation.ALL, AclPermissionType.ALLOW) new AccessControlEntry("User:connector", "*", AclOperation.ALL, AclPermissionType.ALLOW)
@ -202,7 +200,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
produceMessages(primaryProducer, "test-topic-1"); produceMessages(primaryProducer, "test-topic-1");
produceMessages(backupProducer, "test-topic-1"); produceMessages(backupProducer, "test-topic-1");
String consumerGroupName = "consumer-group-testReplication"; String consumerGroupName = "consumer-group-testReplication";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// warm up consumers before starting the connectors so we don't need to wait for discovery // warm up consumers before starting the connectors so we don't need to wait for discovery
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
@ -239,7 +237,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
produceMessages(backupProducer, "test-topic-1"); produceMessages(backupProducer, "test-topic-1");
produceMessages(primaryProducer, "test-topic-1"); produceMessages(primaryProducer, "test-topic-1");
String consumerGroupName = "consumer-group-testReplication"; String consumerGroupName = "consumer-group-testReplication";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// warm up consumers before starting the connectors so we don't need to wait for discovery // warm up consumers before starting the connectors so we don't need to wait for discovery
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
@ -255,7 +253,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
waitForTopicToPersistInFakeLocalMetadataStore("primary.test-topic-1"); waitForTopicToPersistInFakeLocalMetadataStore("primary.test-topic-1");
// increase number of partitions // increase number of partitions
Map<String, NewPartitions> newPartitions = Collections.singletonMap("test-topic-1", NewPartitions.increaseTo(NUM_PARTITIONS + 1)); Map<String, NewPartitions> newPartitions = Map.of("test-topic-1", NewPartitions.increaseTo(NUM_PARTITIONS + 1));
try (Admin adminClient = primary.kafka().createAdminClient()) { try (Admin adminClient = primary.kafka().createAdminClient()) {
adminClient.createPartitions(newPartitions).all().get(); adminClient.createPartitions(newPartitions).all().get();
} }
@ -274,7 +272,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
produceMessages(backupProducer, "test-topic-1"); produceMessages(backupProducer, "test-topic-1");
produceMessages(primaryProducer, "test-topic-1"); produceMessages(primaryProducer, "test-topic-1");
String consumerGroupName = "consumer-group-testReplication"; String consumerGroupName = "consumer-group-testReplication";
Map<String, Object> consumerProps = Collections.singletonMap("group.id", consumerGroupName); Map<String, Object> consumerProps = Map.of("group.id", consumerGroupName);
// warm up consumers before starting the connectors so we don't need to wait for discovery // warm up consumers before starting the connectors so we don't need to wait for discovery
warmUpConsumer(consumerProps); warmUpConsumer(consumerProps);
@ -302,7 +300,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
mm2Props.put("sync.topic.acls.enabled", "true"); mm2Props.put("sync.topic.acls.enabled", "true");
mm2Props.put("sync.topic.acls.interval.seconds", "1"); mm2Props.put("sync.topic.acls.interval.seconds", "1");
mm2Config = new MirrorMakerConfig(mm2Props); mm2Config = new MirrorMakerConfig(mm2Props);
List<AclBinding> aclBindings = Collections.singletonList( List<AclBinding> aclBindings = List.of(
new AclBinding( new AclBinding(
new ResourcePattern(ResourceType.TOPIC, "test-topic-1", PatternType.LITERAL), new ResourcePattern(ResourceType.TOPIC, "test-topic-1", PatternType.LITERAL),
new AccessControlEntry("User:dummy", "*", AclOperation.DESCRIBE, AclPermissionType.ALLOW) new AccessControlEntry("User:dummy", "*", AclOperation.DESCRIBE, AclPermissionType.ALLOW)
@ -344,7 +342,7 @@ public class MirrorConnectorsWithCustomForwardingAdminIntegrationTest extends Mi
); );
// expect to use FakeForwardingAdminWithLocalMetadata to update topic ACLs in FakeLocalMetadataStore.allAcls // expect to use FakeForwardingAdminWithLocalMetadata to update topic ACLs in FakeLocalMetadataStore.allAcls
assertTrue(FakeLocalMetadataStore.aclBindings("dummy").containsAll(Arrays.asList(expectedACLOnBackupCluster, expectedACLOnPrimaryCluster))); assertTrue(FakeLocalMetadataStore.aclBindings("dummy").containsAll(List.of(expectedACLOnBackupCluster, expectedACLOnPrimaryCluster)));
} }
void waitForTopicToPersistInFakeLocalMetadataStore(String topicName) throws InterruptedException { void waitForTopicToPersistInFakeLocalMetadataStore(String topicName) throws InterruptedException {