MINOR: Update JUnit to 4.13 and annotate log cleaner integration test (#6248)

JUnit 4.13 fixes the issue where `Category` and `Parameterized` annotations
could not be used together. It also deprecates `ExpectedException` and
`assertThat`. Given this, we:

- Replace `ExpectedException` with the newly introduced `assertThrows`.
- Replace `Assert.assertThat` with `MatcherAssert.assertThat`.
- Annotate `AbstractLogCleanerIntegrationTest` with `IntegrationTest` category.

Reviewers: Ewen Cheslack-Postava <ewen@confluent.io>, David Arthur <mumrah@gmail.com>
This commit is contained in:
Ismael Juma 2019-02-11 22:06:14 -08:00 committed by GitHub
parent 08036fa4b1
commit c7f99bc2bd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
42 changed files with 252 additions and 277 deletions

View File

@ -78,9 +78,7 @@ import org.apache.kafka.test.MockMetricsReporter;
import org.apache.kafka.test.TestCondition; import org.apache.kafka.test.TestCondition;
import org.apache.kafka.test.TestUtils; import org.apache.kafka.test.TestUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.time.Duration; import java.time.Duration;
@ -115,6 +113,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.any;
@ -143,9 +142,6 @@ public class KafkaConsumerTest {
private final String groupId = "mock-group"; private final String groupId = "mock-group";
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Test @Test
public void testMetricsReporterAutoGeneratedClientId() { public void testMetricsReporterAutoGeneratedClientId() {
Properties props = new Properties(); Properties props = new Properties();
@ -840,13 +836,12 @@ public class KafkaConsumerTest {
// interrupt the thread and call poll // interrupt the thread and call poll
try { try {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
expectedException.expect(InterruptException.class); assertThrows(InterruptException.class, () -> consumer.poll(Duration.ZERO));
consumer.poll(Duration.ZERO);
} finally { } finally {
// clear interrupted state again since this thread may be reused by JUnit // clear interrupted state again since this thread may be reused by JUnit
Thread.interrupted(); Thread.interrupted();
consumer.close(Duration.ofMillis(0));
} }
consumer.close(Duration.ofMillis(0));
} }
@Test @Test

View File

@ -27,9 +27,9 @@ import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
public class MockConsumerTest { public class MockConsumerTest {

View File

@ -33,8 +33,8 @@ import java.lang.management.ManagementFactory;
import java.util.List; import java.util.List;
import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
public class KafkaMbeanTest { public class KafkaMbeanTest {

View File

@ -19,9 +19,7 @@ package org.apache.kafka.common.record;
import net.jpountz.xxhash.XXHashFactory; import net.jpountz.xxhash.XXHashFactory;
import org.hamcrest.CoreMatchers; import org.hamcrest.CoreMatchers;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.Parameters;
@ -37,9 +35,11 @@ import java.util.List;
import java.util.Random; import java.util.Random;
import static org.apache.kafka.common.record.KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK; import static org.apache.kafka.common.record.KafkaLZ4BlockOutputStream.LZ4_FRAME_INCOMPRESSIBLE_MASK;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
@ -71,9 +71,6 @@ public class KafkaLZ4Test {
} }
} }
@Rule
public ExpectedException thrown = ExpectedException.none();
@Parameters(name = "{index} useBrokenFlagDescriptorChecksum={0}, ignoreFlagDescriptorChecksum={1}, blockChecksum={2}, close={3}, payload={4}") @Parameters(name = "{index} useBrokenFlagDescriptorChecksum={0}, ignoreFlagDescriptorChecksum={1}, blockChecksum={2}, close={3}, payload={4}")
public static Collection<Object[]> data() { public static Collection<Object[]> data() {
List<Payload> payloads = new ArrayList<>(); List<Payload> payloads = new ArrayList<>();
@ -111,12 +108,10 @@ public class KafkaLZ4Test {
} }
@Test @Test
public void testHeaderPrematureEnd() throws Exception { public void testHeaderPrematureEnd() {
thrown.expect(IOException.class); ByteBuffer buffer = ByteBuffer.allocate(2);
thrown.expectMessage(KafkaLZ4BlockInputStream.PREMATURE_EOS); IOException e = assertThrows(IOException.class, () -> makeInputStream(buffer));
assertEquals(KafkaLZ4BlockInputStream.PREMATURE_EOS, e.getMessage());
final ByteBuffer buffer = ByteBuffer.allocate(2);
makeInputStream(buffer);
} }
private KafkaLZ4BlockInputStream makeInputStream(ByteBuffer buffer) throws IOException { private KafkaLZ4BlockInputStream makeInputStream(ByteBuffer buffer) throws IOException {
@ -125,43 +120,41 @@ public class KafkaLZ4Test {
@Test @Test
public void testNotSupported() throws Exception { public void testNotSupported() throws Exception {
thrown.expect(IOException.class);
thrown.expectMessage(KafkaLZ4BlockInputStream.NOT_SUPPORTED);
byte[] compressed = compressedBytes(); byte[] compressed = compressedBytes();
compressed[0] = 0x00; compressed[0] = 0x00;
ByteBuffer buffer = ByteBuffer.wrap(compressed);
makeInputStream(ByteBuffer.wrap(compressed)); IOException e = assertThrows(IOException.class, () -> makeInputStream(buffer));
assertEquals(KafkaLZ4BlockInputStream.NOT_SUPPORTED, e.getMessage());
} }
@Test @Test
public void testBadFrameChecksum() throws Exception { public void testBadFrameChecksum() throws Exception {
if (!ignoreFlagDescriptorChecksum) {
thrown.expect(IOException.class);
thrown.expectMessage(KafkaLZ4BlockInputStream.DESCRIPTOR_HASH_MISMATCH);
}
byte[] compressed = compressedBytes(); byte[] compressed = compressedBytes();
compressed[6] = (byte) 0xFF; compressed[6] = (byte) 0xFF;
ByteBuffer buffer = ByteBuffer.wrap(compressed);
makeInputStream(ByteBuffer.wrap(compressed)); if (ignoreFlagDescriptorChecksum) {
makeInputStream(buffer);
} else {
IOException e = assertThrows(IOException.class, () -> makeInputStream(buffer));
assertEquals(KafkaLZ4BlockInputStream.DESCRIPTOR_HASH_MISMATCH, e.getMessage());
}
} }
@Test @Test
public void testBadBlockSize() throws Exception { public void testBadBlockSize() throws Exception {
if (!close || (useBrokenFlagDescriptorChecksum && !ignoreFlagDescriptorChecksum)) return; if (!close || (useBrokenFlagDescriptorChecksum && !ignoreFlagDescriptorChecksum))
return;
thrown.expect(IOException.class);
thrown.expectMessage(CoreMatchers.containsString("exceeded max"));
byte[] compressed = compressedBytes(); byte[] compressed = compressedBytes();
final ByteBuffer buffer = ByteBuffer.wrap(compressed).order(ByteOrder.LITTLE_ENDIAN); ByteBuffer buffer = ByteBuffer.wrap(compressed).order(ByteOrder.LITTLE_ENDIAN);
int blockSize = buffer.getInt(7); int blockSize = buffer.getInt(7);
blockSize = (blockSize & LZ4_FRAME_INCOMPRESSIBLE_MASK) | (1 << 24 & ~LZ4_FRAME_INCOMPRESSIBLE_MASK); blockSize = (blockSize & LZ4_FRAME_INCOMPRESSIBLE_MASK) | (1 << 24 & ~LZ4_FRAME_INCOMPRESSIBLE_MASK);
buffer.putInt(7, blockSize); buffer.putInt(7, blockSize);
testDecompression(buffer); IOException e = assertThrows(IOException.class, () -> testDecompression(buffer));
assertThat(e.getMessage(), CoreMatchers.containsString("exceeded max"));
} }

View File

@ -20,9 +20,7 @@ import org.apache.kafka.common.errors.UnsupportedCompressionTypeException;
import org.apache.kafka.common.utils.Time; import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils; import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.test.TestUtils; import org.apache.kafka.test.TestUtils;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
@ -30,6 +28,7 @@ import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.function.Supplier;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
@ -38,14 +37,13 @@ import static org.apache.kafka.common.utils.Utils.utf8;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class MemoryRecordsBuilderTest { public class MemoryRecordsBuilderTest {
@Rule
public ExpectedException exceptionRule = ExpectedException.none();
private final CompressionType compressionType; private final CompressionType compressionType;
private final int bufferOffset; private final int bufferOffset;
private final Time time; private final Time time;
@ -58,17 +56,25 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void testWriteEmptyRecordSet() { public void testWriteEmptyRecordSet() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); byte magic = RecordBatch.MAGIC_VALUE_V0;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(128); ByteBuffer buffer = ByteBuffer.allocate(128);
buffer.position(bufferOffset); buffer.position(bufferOffset);
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V0, compressionType, Supplier<MemoryRecordsBuilder> builderSupplier = () -> new MemoryRecordsBuilder(buffer, magic,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, compressionType, TimestampType.CREATE_TIME, 0L, 0L,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
MemoryRecords records = builder.build(); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
assertEquals(0, records.sizeInBytes());
assertEquals(bufferOffset, buffer.position()); if (compressionType != CompressionType.ZSTD) {
MemoryRecords records = builderSupplier.get().build();
assertEquals(0, records.sizeInBytes());
assertEquals(bufferOffset, buffer.position());
} else {
Exception e = assertThrows(IllegalArgumentException.class, () -> builderSupplier.get().build());
assertEquals(e.getMessage(), "ZStandard compression is not supported for magic " + magic);
}
} }
@Test @Test
@ -215,18 +221,19 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void testCompressionRateV0() { public void testCompressionRateV0() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); byte magic = RecordBatch.MAGIC_VALUE_V0;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset); buffer.position(bufferOffset);
LegacyRecord[] records = new LegacyRecord[] { LegacyRecord[] records = new LegacyRecord[] {
LegacyRecord.create(RecordBatch.MAGIC_VALUE_V0, 0L, "a".getBytes(), "1".getBytes()), LegacyRecord.create(magic, 0L, "a".getBytes(), "1".getBytes()),
LegacyRecord.create(RecordBatch.MAGIC_VALUE_V0, 1L, "b".getBytes(), "2".getBytes()), LegacyRecord.create(magic, 1L, "b".getBytes(), "2".getBytes()),
LegacyRecord.create(RecordBatch.MAGIC_VALUE_V0, 2L, "c".getBytes(), "3".getBytes()), LegacyRecord.create(magic, 2L, "c".getBytes(), "3".getBytes()),
}; };
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V0, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
@ -272,18 +279,19 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void testCompressionRateV1() { public void testCompressionRateV1() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V1); byte magic = RecordBatch.MAGIC_VALUE_V1;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset); buffer.position(bufferOffset);
LegacyRecord[] records = new LegacyRecord[] { LegacyRecord[] records = new LegacyRecord[] {
LegacyRecord.create(RecordBatch.MAGIC_VALUE_V1, 0L, "a".getBytes(), "1".getBytes()), LegacyRecord.create(magic, 0L, "a".getBytes(), "1".getBytes()),
LegacyRecord.create(RecordBatch.MAGIC_VALUE_V1, 1L, "b".getBytes(), "2".getBytes()), LegacyRecord.create(magic, 1L, "b".getBytes(), "2".getBytes()),
LegacyRecord.create(RecordBatch.MAGIC_VALUE_V1, 2L, "c".getBytes(), "3".getBytes()), LegacyRecord.create(magic, 2L, "c".getBytes(), "3".getBytes()),
}; };
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V1, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
@ -305,13 +313,14 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void buildUsingLogAppendTime() { public void buildUsingLogAppendTime() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V1); byte magic = RecordBatch.MAGIC_VALUE_V1;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset); buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis(); long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V1, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.LOG_APPEND_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, TimestampType.LOG_APPEND_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH,
RecordBatch.NO_SEQUENCE, false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); RecordBatch.NO_SEQUENCE, false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.append(0L, "a".getBytes(), "1".getBytes()); builder.append(0L, "a".getBytes(), "1".getBytes());
@ -336,13 +345,14 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void buildUsingCreateTime() { public void buildUsingCreateTime() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V1); byte magic = RecordBatch.MAGIC_VALUE_V1;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBuffer buffer = ByteBuffer.allocate(1024);
buffer.position(bufferOffset); buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis(); long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V1, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.append(0L, "a".getBytes(), "1".getBytes()); builder.append(0L, "a".getBytes(), "1".getBytes());
@ -369,7 +379,8 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void testAppendedChecksumConsistency() { public void testAppendedChecksumConsistency() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); assumeAtLeastV2OrNotZstd(RecordBatch.MAGIC_VALUE_V0);
assumeAtLeastV2OrNotZstd(RecordBatch.MAGIC_VALUE_V1);
ByteBuffer buffer = ByteBuffer.allocate(512); ByteBuffer buffer = ByteBuffer.allocate(512);
for (byte magic : Arrays.asList(RecordBatch.MAGIC_VALUE_V0, RecordBatch.MAGIC_VALUE_V1, RecordBatch.MAGIC_VALUE_V2)) { for (byte magic : Arrays.asList(RecordBatch.MAGIC_VALUE_V0, RecordBatch.MAGIC_VALUE_V1, RecordBatch.MAGIC_VALUE_V2)) {
@ -415,13 +426,14 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void writePastLimit() { public void writePastLimit() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V1); byte magic = RecordBatch.MAGIC_VALUE_V1;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(64); ByteBuffer buffer = ByteBuffer.allocate(64);
buffer.position(bufferOffset); buffer.position(bufferOffset);
long logAppendTime = System.currentTimeMillis(); long logAppendTime = System.currentTimeMillis();
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V1, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, logAppendTime, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.setEstimatedCompressionRatio(0.5f); builder.setEstimatedCompressionRatio(0.5f);
@ -462,11 +474,6 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void convertV2ToV1UsingMixedCreateAndLogAppendTime() { public void convertV2ToV1UsingMixedCreateAndLogAppendTime() {
if (compressionType == CompressionType.ZSTD) {
exceptionRule.expect(UnsupportedCompressionTypeException.class);
exceptionRule.expectMessage("Down-conversion of zstandard-compressed batches is not supported");
}
ByteBuffer buffer = ByteBuffer.allocate(512); ByteBuffer buffer = ByteBuffer.allocate(512);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V2, MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V2,
compressionType, TimestampType.LOG_APPEND_TIME, 0L); compressionType, TimestampType.LOG_APPEND_TIME, 0L);
@ -493,36 +500,44 @@ public class MemoryRecordsBuilderTest {
buffer.flip(); buffer.flip();
ConvertedRecords<MemoryRecords> convertedRecords = MemoryRecords.readableRecords(buffer) Supplier<ConvertedRecords<MemoryRecords>> convertedRecordsSupplier = () ->
.downConvert(RecordBatch.MAGIC_VALUE_V1, 0, time); MemoryRecords.readableRecords(buffer).downConvert(RecordBatch.MAGIC_VALUE_V1, 0, time);
MemoryRecords records = convertedRecords.records();
// Transactional markers are skipped when down converting to V1, so exclude them from size if (compressionType != CompressionType.ZSTD) {
verifyRecordsProcessingStats(convertedRecords.recordConversionStats(), ConvertedRecords<MemoryRecords> convertedRecords = convertedRecordsSupplier.get();
MemoryRecords records = convertedRecords.records();
// Transactional markers are skipped when down converting to V1, so exclude them from size
verifyRecordsProcessingStats(convertedRecords.recordConversionStats(),
3, 3, records.sizeInBytes(), sizeExcludingTxnMarkers); 3, 3, records.sizeInBytes(), sizeExcludingTxnMarkers);
List<? extends RecordBatch> batches = Utils.toList(records.batches().iterator()); List<? extends RecordBatch> batches = Utils.toList(records.batches().iterator());
if (compressionType != CompressionType.NONE) { if (compressionType != CompressionType.NONE) {
assertEquals(2, batches.size()); assertEquals(2, batches.size());
assertEquals(TimestampType.LOG_APPEND_TIME, batches.get(0).timestampType()); assertEquals(TimestampType.LOG_APPEND_TIME, batches.get(0).timestampType());
assertEquals(TimestampType.CREATE_TIME, batches.get(1).timestampType()); assertEquals(TimestampType.CREATE_TIME, batches.get(1).timestampType());
} else { } else {
assertEquals(3, batches.size()); assertEquals(3, batches.size());
assertEquals(TimestampType.LOG_APPEND_TIME, batches.get(0).timestampType()); assertEquals(TimestampType.LOG_APPEND_TIME, batches.get(0).timestampType());
assertEquals(TimestampType.CREATE_TIME, batches.get(1).timestampType()); assertEquals(TimestampType.CREATE_TIME, batches.get(1).timestampType());
assertEquals(TimestampType.CREATE_TIME, batches.get(2).timestampType()); assertEquals(TimestampType.CREATE_TIME, batches.get(2).timestampType());
} }
List<Record> logRecords = Utils.toList(records.records().iterator()); List<Record> logRecords = Utils.toList(records.records().iterator());
assertEquals(3, logRecords.size()); assertEquals(3, logRecords.size());
assertEquals(ByteBuffer.wrap("1".getBytes()), logRecords.get(0).key()); assertEquals(ByteBuffer.wrap("1".getBytes()), logRecords.get(0).key());
assertEquals(ByteBuffer.wrap("2".getBytes()), logRecords.get(1).key()); assertEquals(ByteBuffer.wrap("2".getBytes()), logRecords.get(1).key());
assertEquals(ByteBuffer.wrap("3".getBytes()), logRecords.get(2).key()); assertEquals(ByteBuffer.wrap("3".getBytes()), logRecords.get(2).key());
} else {
Exception e = assertThrows(UnsupportedCompressionTypeException.class, convertedRecordsSupplier::get);
assertEquals("Down-conversion of zstandard-compressed batches is not supported", e.getMessage());
}
} }
@Test @Test
public void convertToV1WithMixedV0AndV2Data() { public void convertToV1WithMixedV0AndV2Data() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); assumeAtLeastV2OrNotZstd(RecordBatch.MAGIC_VALUE_V0);
assumeAtLeastV2OrNotZstd(RecordBatch.MAGIC_VALUE_V1);
ByteBuffer buffer = ByteBuffer.allocate(512); ByteBuffer buffer = ByteBuffer.allocate(512);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V0, MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V0,
@ -598,31 +613,28 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void shouldThrowIllegalStateExceptionOnBuildWhenAborted() { public void shouldThrowIllegalStateExceptionOnBuildWhenAborted() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); byte magic = RecordBatch.MAGIC_VALUE_V0;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(128); ByteBuffer buffer = ByteBuffer.allocate(128);
buffer.position(bufferOffset); buffer.position(bufferOffset);
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V0, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH,
RecordBatch.NO_SEQUENCE, false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); RecordBatch.NO_SEQUENCE, false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.abort(); builder.abort();
try { assertThrows(IllegalStateException.class, builder::build);
builder.build();
fail("Should have thrown KafkaException");
} catch (IllegalStateException e) {
// ok
}
} }
@Test @Test
public void shouldResetBufferToInitialPositionOnAbort() { public void shouldResetBufferToInitialPositionOnAbort() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); byte magic = RecordBatch.MAGIC_VALUE_V0;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(128); ByteBuffer buffer = ByteBuffer.allocate(128);
buffer.position(bufferOffset); buffer.position(bufferOffset);
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V0, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.append(0L, "a".getBytes(), "1".getBytes()); builder.append(0L, "a".getBytes(), "1".getBytes());
@ -632,12 +644,13 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void shouldThrowIllegalStateExceptionOnCloseWhenAborted() { public void shouldThrowIllegalStateExceptionOnCloseWhenAborted() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); byte magic = RecordBatch.MAGIC_VALUE_V0;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(128); ByteBuffer buffer = ByteBuffer.allocate(128);
buffer.position(bufferOffset); buffer.position(bufferOffset);
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V0, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.abort(); builder.abort();
@ -651,12 +664,13 @@ public class MemoryRecordsBuilderTest {
@Test @Test
public void shouldThrowIllegalStateExceptionOnAppendWhenAborted() { public void shouldThrowIllegalStateExceptionOnAppendWhenAborted() {
expectExceptionWithZStd(compressionType, RecordBatch.MAGIC_VALUE_V0); byte magic = RecordBatch.MAGIC_VALUE_V0;
assumeAtLeastV2OrNotZstd(magic);
ByteBuffer buffer = ByteBuffer.allocate(128); ByteBuffer buffer = ByteBuffer.allocate(128);
buffer.position(bufferOffset); buffer.position(bufferOffset);
MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, RecordBatch.MAGIC_VALUE_V0, compressionType, MemoryRecordsBuilder builder = new MemoryRecordsBuilder(buffer, magic, compressionType,
TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE, TimestampType.CREATE_TIME, 0L, 0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, RecordBatch.NO_SEQUENCE,
false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity()); false, false, RecordBatch.NO_PARTITION_LEADER_EPOCH, buffer.capacity());
builder.abort(); builder.abort();
@ -734,10 +748,7 @@ public class MemoryRecordsBuilderTest {
} }
} }
private void expectExceptionWithZStd(CompressionType compressionType, byte magic) { private void assumeAtLeastV2OrNotZstd(byte magic) {
if (compressionType == CompressionType.ZSTD && magic < MAGIC_VALUE_V2) { assumeTrue(compressionType != CompressionType.ZSTD || magic >= MAGIC_VALUE_V2);
exceptionRule.expect(IllegalArgumentException.class);
exceptionRule.expectMessage("ZStandard compression is not supported for magic " + magic);
}
} }
} }

View File

@ -22,9 +22,7 @@ import org.apache.kafka.common.header.internals.RecordHeaders;
import org.apache.kafka.common.record.MemoryRecords.RecordFilter.BatchRetention; import org.apache.kafka.common.record.MemoryRecords.RecordFilter.BatchRetention;
import org.apache.kafka.common.utils.Utils; import org.apache.kafka.common.utils.Utils;
import org.apache.kafka.test.TestUtils; import org.apache.kafka.test.TestUtils;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
import org.junit.runners.Parameterized; import org.junit.runners.Parameterized;
@ -32,6 +30,7 @@ import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.function.Supplier;
import java.util.List; import java.util.List;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
@ -40,14 +39,13 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class MemoryRecordsTest { public class MemoryRecordsTest {
@Rule
public ExpectedException exceptionRule = ExpectedException.none();
private CompressionType compression; private CompressionType compression;
private byte magic; private byte magic;
private long firstOffset; private long firstOffset;
@ -74,7 +72,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testIterator() { public void testIterator() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBuffer buffer = ByteBuffer.allocate(1024);
@ -159,8 +157,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testHasRoomForMethod() { public void testHasRoomForMethod() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(1024), magic, compression, MemoryRecordsBuilder builder = MemoryRecords.builder(ByteBuffer.allocate(1024), magic, compression,
TimestampType.CREATE_TIME, 0L); TimestampType.CREATE_TIME, 0L);
builder.append(0L, "a".getBytes(), "1".getBytes()); builder.append(0L, "a".getBytes(), "1".getBytes());
@ -447,60 +444,59 @@ public class MemoryRecordsTest {
@Test @Test
public void testFilterToBatchDiscard() { public void testFilterToBatchDiscard() {
if (compression != CompressionType.NONE || magic >= RecordBatch.MAGIC_VALUE_V2) { assumeAtLeastV2OrNotZstd();
expectExceptionWithZStd(compression, magic); assumeTrue(compression != CompressionType.NONE || magic >= MAGIC_VALUE_V2);
ByteBuffer buffer = ByteBuffer.allocate(2048); ByteBuffer buffer = ByteBuffer.allocate(2048);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L);
builder.append(10L, "1".getBytes(), "a".getBytes()); builder.append(10L, "1".getBytes(), "a".getBytes());
builder.close(); builder.close();
builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 1L); builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 1L);
builder.append(11L, "2".getBytes(), "b".getBytes()); builder.append(11L, "2".getBytes(), "b".getBytes());
builder.append(12L, "3".getBytes(), "c".getBytes()); builder.append(12L, "3".getBytes(), "c".getBytes());
builder.close(); builder.close();
builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 3L); builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 3L);
builder.append(13L, "4".getBytes(), "d".getBytes()); builder.append(13L, "4".getBytes(), "d".getBytes());
builder.append(20L, "5".getBytes(), "e".getBytes()); builder.append(20L, "5".getBytes(), "e".getBytes());
builder.append(15L, "6".getBytes(), "f".getBytes()); builder.append(15L, "6".getBytes(), "f".getBytes());
builder.close(); builder.close();
builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 6L); builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 6L);
builder.append(16L, "7".getBytes(), "g".getBytes()); builder.append(16L, "7".getBytes(), "g".getBytes());
builder.close(); builder.close();
buffer.flip(); buffer.flip();
ByteBuffer filtered = ByteBuffer.allocate(2048); ByteBuffer filtered = ByteBuffer.allocate(2048);
MemoryRecords.readableRecords(buffer).filterTo(new TopicPartition("foo", 0), new MemoryRecords.RecordFilter() { MemoryRecords.readableRecords(buffer).filterTo(new TopicPartition("foo", 0), new MemoryRecords.RecordFilter() {
@Override @Override
protected BatchRetention checkBatchRetention(RecordBatch batch) { protected BatchRetention checkBatchRetention(RecordBatch batch) {
// discard the second and fourth batches // discard the second and fourth batches
if (batch.lastOffset() == 2L || batch.lastOffset() == 6L) if (batch.lastOffset() == 2L || batch.lastOffset() == 6L)
return BatchRetention.DELETE; return BatchRetention.DELETE;
return BatchRetention.DELETE_EMPTY; return BatchRetention.DELETE_EMPTY;
} }
@Override @Override
protected boolean shouldRetainRecord(RecordBatch recordBatch, Record record) { protected boolean shouldRetainRecord(RecordBatch recordBatch, Record record) {
return true; return true;
} }
}, filtered, Integer.MAX_VALUE, BufferSupplier.NO_CACHING); }, filtered, Integer.MAX_VALUE, BufferSupplier.NO_CACHING);
filtered.flip(); filtered.flip();
MemoryRecords filteredRecords = MemoryRecords.readableRecords(filtered); MemoryRecords filteredRecords = MemoryRecords.readableRecords(filtered);
List<MutableRecordBatch> batches = TestUtils.toList(filteredRecords.batches()); List<MutableRecordBatch> batches = TestUtils.toList(filteredRecords.batches());
assertEquals(2, batches.size()); assertEquals(2, batches.size());
assertEquals(0L, batches.get(0).lastOffset()); assertEquals(0L, batches.get(0).lastOffset());
assertEquals(5L, batches.get(1).lastOffset()); assertEquals(5L, batches.get(1).lastOffset());
}
} }
@Test @Test
public void testFilterToAlreadyCompactedLog() { public void testFilterToAlreadyCompactedLog() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
ByteBuffer buffer = ByteBuffer.allocate(2048); ByteBuffer buffer = ByteBuffer.allocate(2048);
@ -642,7 +638,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testFilterToWithUndersizedBuffer() { public void testFilterToWithUndersizedBuffer() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
ByteBuffer buffer = ByteBuffer.allocate(1024); ByteBuffer buffer = ByteBuffer.allocate(1024);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L);
@ -694,7 +690,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testToString() { public void testToString() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
long timestamp = 1000000; long timestamp = 1000000;
MemoryRecords memoryRecords = MemoryRecords.withRecords(magic, compression, MemoryRecords memoryRecords = MemoryRecords.withRecords(magic, compression,
@ -726,7 +722,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testFilterTo() { public void testFilterTo() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
ByteBuffer buffer = ByteBuffer.allocate(2048); ByteBuffer buffer = ByteBuffer.allocate(2048);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L); MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, TimestampType.CREATE_TIME, 0L);
@ -841,7 +837,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testFilterToPreservesLogAppendTime() { public void testFilterToPreservesLogAppendTime() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
long logAppendTime = System.currentTimeMillis(); long logAppendTime = System.currentTimeMillis();
@ -887,7 +883,7 @@ public class MemoryRecordsTest {
@Test @Test
public void testNextBatchSize() { public void testNextBatchSize() {
expectExceptionWithZStd(compression, magic); assumeAtLeastV2OrNotZstd();
ByteBuffer buffer = ByteBuffer.allocate(2048); ByteBuffer buffer = ByteBuffer.allocate(2048);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression, MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, magic, compression,
@ -902,37 +898,37 @@ public class MemoryRecordsTest {
assertEquals(0, buffer.position()); assertEquals(0, buffer.position());
buffer.limit(1); // size not in buffer buffer.limit(1); // size not in buffer
assertEquals(null, records.firstBatchSize()); assertNull(records.firstBatchSize());
buffer.limit(Records.LOG_OVERHEAD); // magic not in buffer buffer.limit(Records.LOG_OVERHEAD); // magic not in buffer
assertEquals(null, records.firstBatchSize()); assertNull(records.firstBatchSize());
buffer.limit(Records.HEADER_SIZE_UP_TO_MAGIC); // payload not in buffer buffer.limit(Records.HEADER_SIZE_UP_TO_MAGIC); // payload not in buffer
assertEquals(size, records.firstBatchSize().intValue()); assertEquals(size, records.firstBatchSize().intValue());
buffer.limit(size); buffer.limit(size);
byte magic = buffer.get(Records.MAGIC_OFFSET); byte magic = buffer.get(Records.MAGIC_OFFSET);
buffer.put(Records.MAGIC_OFFSET, (byte) 10); buffer.put(Records.MAGIC_OFFSET, (byte) 10);
try { assertThrows(CorruptRecordException.class, records::firstBatchSize);
records.firstBatchSize();
fail("Did not fail with corrupt magic");
} catch (CorruptRecordException e) {
// Expected exception
}
buffer.put(Records.MAGIC_OFFSET, magic); buffer.put(Records.MAGIC_OFFSET, magic);
buffer.put(Records.SIZE_OFFSET + 3, (byte) 0); buffer.put(Records.SIZE_OFFSET + 3, (byte) 0);
try { assertThrows(CorruptRecordException.class, records::firstBatchSize);
records.firstBatchSize(); }
fail("Did not fail with corrupt size");
} catch (CorruptRecordException e) { @Test
// Expected exception public void testWithRecords() {
Supplier<MemoryRecords> recordsSupplier = () -> MemoryRecords.withRecords(magic, compression,
new SimpleRecord(10L, "key1".getBytes(), "value1".getBytes()));
if (compression != CompressionType.ZSTD || magic >= MAGIC_VALUE_V2) {
MemoryRecords memoryRecords = recordsSupplier.get();
String key = Utils.utf8(memoryRecords.batches().iterator().next().iterator().next().key());
assertEquals("key1", key);
} else {
assertThrows(IllegalArgumentException.class, recordsSupplier::get);
} }
} }
private void expectExceptionWithZStd(CompressionType compressionType, byte magic) { private void assumeAtLeastV2OrNotZstd() {
if (compressionType == CompressionType.ZSTD && magic < MAGIC_VALUE_V2) { assumeTrue(compression != CompressionType.ZSTD || magic >= MAGIC_VALUE_V2);
exceptionRule.expect(IllegalArgumentException.class);
exceptionRule.expectMessage("ZStandard compression is not supported for magic " + magic);
}
} }
@Parameterized.Parameters(name = "{index} magic={0}, firstOffset={1}, compressionType={2}") @Parameterized.Parameters(name = "{index} magic={0}, firstOffset={1}, compressionType={2}")

View File

@ -17,9 +17,7 @@
package org.apache.kafka.connect.data; package org.apache.kafka.connect.data;
import org.apache.kafka.connect.errors.DataException; import org.apache.kafka.connect.errors.DataException;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays; import java.util.Arrays;
@ -30,6 +28,7 @@ import java.util.Map;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
public class StructTest { public class StructTest {
@ -285,9 +284,6 @@ public class StructTest {
assertNotEquals(struct2.hashCode(), struct3.hashCode()); assertNotEquals(struct2.hashCode(), struct3.hashCode());
} }
@Rule
public ExpectedException thrown = ExpectedException.none();
@Test @Test
public void testValidateStructWithNullValue() { public void testValidateStructWithNullValue() {
Schema schema = SchemaBuilder.struct() Schema schema = SchemaBuilder.struct()
@ -297,9 +293,9 @@ public class StructTest {
.build(); .build();
Struct struct = new Struct(schema); Struct struct = new Struct(schema);
thrown.expect(DataException.class); Exception e = assertThrows(DataException.class, struct::validate);
thrown.expectMessage("Invalid value: null used for required field: \"one\", schema type: STRING"); assertEquals("Invalid value: null used for required field: \"one\", schema type: STRING",
struct.validate(); e.getMessage());
} }
@Test @Test
@ -307,13 +303,15 @@ public class StructTest {
String fieldName = "field"; String fieldName = "field";
FakeSchema fakeSchema = new FakeSchema(); FakeSchema fakeSchema = new FakeSchema();
thrown.expect(DataException.class); Exception e = assertThrows(DataException.class, () -> ConnectSchema.validateValue(fieldName,
thrown.expectMessage("Invalid Java object for schema type null: class java.lang.Object for field: \"field\""); fakeSchema, new Object()));
ConnectSchema.validateValue(fieldName, fakeSchema, new Object()); assertEquals("Invalid Java object for schema type null: class java.lang.Object for field: \"field\"",
e.getMessage());
thrown.expect(DataException.class); e = assertThrows(DataException.class, () -> ConnectSchema.validateValue(fieldName,
thrown.expectMessage("Invalid Java object for schema type INT8: class java.lang.Object for field: \"field\""); Schema.INT8_SCHEMA, new Object()));
ConnectSchema.validateValue(fieldName, Schema.INT8_SCHEMA, new Object()); assertEquals("Invalid Java object for schema type INT8: class java.lang.Object for field: \"field\"",
e.getMessage());
} }
@Test @Test
@ -323,9 +321,7 @@ public class StructTest {
.field(fieldName, Schema.STRING_SCHEMA); .field(fieldName, Schema.STRING_SCHEMA);
Struct struct = new Struct(testSchema); Struct struct = new Struct(testSchema);
thrown.expect(DataException.class); assertThrows(DataException.class, () -> struct.put((Field) null, "valid"));
Field field = null;
struct.put(field, "valid");
} }
@Test @Test
@ -335,8 +331,8 @@ public class StructTest {
.field(fieldName, Schema.STRING_SCHEMA); .field(fieldName, Schema.STRING_SCHEMA);
Struct struct = new Struct(testSchema); Struct struct = new Struct(testSchema);
thrown.expect(DataException.class); Exception e = assertThrows(DataException.class, () -> struct.put(fieldName, null));
thrown.expectMessage("Invalid value: null used for required field: \"fieldName\", schema type: STRING"); assertEquals("Invalid value: null used for required field: \"fieldName\", schema type: STRING",
struct.put(fieldName, null); e.getMessage());
} }
} }

View File

@ -26,12 +26,15 @@ import kafka.utils.Implicits._
import org.apache.kafka.common.TopicPartition import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.record.{CompressionType, MemoryRecords, RecordBatch} import org.apache.kafka.common.record.{CompressionType, MemoryRecords, RecordBatch}
import org.apache.kafka.common.utils.Utils import org.apache.kafka.common.utils.Utils
import org.apache.kafka.test.IntegrationTest
import org.junit.After import org.junit.After
import org.junit.experimental.categories.Category
import scala.collection.Seq import scala.collection.Seq
import scala.collection.mutable.ListBuffer import scala.collection.mutable.ListBuffer
import scala.util.Random import scala.util.Random
@Category(Array(classOf[IntegrationTest]))
abstract class AbstractLogCleanerIntegrationTest { abstract class AbstractLogCleanerIntegrationTest {
var cleaner: LogCleaner = _ var cleaner: LogCleaner = _

View File

@ -64,7 +64,7 @@ versions += [
jaxrs: "2.1.1", jaxrs: "2.1.1",
jfreechart: "1.0.0", jfreechart: "1.0.0",
jopt: "5.0.4", jopt: "5.0.4",
junit: "4.12", junit: "4.13-beta-2",
kafka_0100: "0.10.0.1", kafka_0100: "0.10.0.1",
kafka_0101: "0.10.1.1", kafka_0101: "0.10.1.1",
kafka_0102: "0.10.2.2", kafka_0102: "0.10.2.2",

View File

@ -51,9 +51,9 @@ import static org.apache.kafka.streams.StreamsConfig.consumerPrefix;
import static org.apache.kafka.streams.StreamsConfig.producerPrefix; import static org.apache.kafka.streams.StreamsConfig.producerPrefix;
import static org.apache.kafka.test.StreamsTestUtils.getStreamsConfig; import static org.apache.kafka.test.StreamsTestUtils.getStreamsConfig;
import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsEqual.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;

View File

@ -59,7 +59,7 @@ import kafka.utils.MockTime;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@Category({IntegrationTest.class}) @Category({IntegrationTest.class})

View File

@ -53,7 +53,7 @@ import java.util.Map;
import java.util.Properties; import java.util.Properties;
import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
@Category({IntegrationTest.class}) @Category({IntegrationTest.class})
public class GlobalKTableIntegrationTest { public class GlobalKTableIntegrationTest {

View File

@ -65,7 +65,7 @@ import java.util.regex.Pattern;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
/** /**
* End-to-end integration test based on using regex and named topics for creating sources, using * End-to-end integration test based on using regex and named topics for creating sources, using

View File

@ -61,8 +61,8 @@ import java.util.regex.Pattern;
import static java.time.Duration.ofMillis; import static java.time.Duration.ofMillis;
import static java.time.Duration.ofSeconds; import static java.time.Duration.ofSeconds;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@Category({IntegrationTest.class}) @Category({IntegrationTest.class})
public class RepartitionOptimizingIntegrationTest { public class RepartitionOptimizingIntegrationTest {

View File

@ -25,8 +25,8 @@ import static org.apache.kafka.streams.internals.ApiUtils.prepareMillisCheckFail
import static org.apache.kafka.streams.internals.ApiUtils.validateMillisecondDuration; import static org.apache.kafka.streams.internals.ApiUtils.validateMillisecondDuration;
import static org.apache.kafka.streams.internals.ApiUtils.validateMillisecondInstant; import static org.apache.kafka.streams.internals.ApiUtils.validateMillisecondInstant;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -107,4 +107,4 @@ public class ApiUtilsTest {
assertThat(failMsgPrefix, containsString("variableName")); assertThat(failMsgPrefix, containsString("variableName"));
assertThat(failMsgPrefix, containsString("someValue")); assertThat(failMsgPrefix, containsString("someValue"));
} }
} }

View File

@ -36,7 +36,7 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
public class PrintedTest { public class PrintedTest {

View File

@ -35,7 +35,7 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;

View File

@ -47,10 +47,10 @@ import java.util.regex.Pattern;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static org.apache.kafka.streams.Topology.AutoOffsetReset; import static org.apache.kafka.streams.Topology.AutoOffsetReset;
import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")

View File

@ -25,9 +25,7 @@ import org.apache.kafka.streams.processor.ProcessorContext;
import org.easymock.EasyMock; import org.easymock.EasyMock;
import org.easymock.EasyMockSupport; import org.easymock.EasyMockSupport;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -44,16 +42,13 @@ public class KStreamFlatTransformTest extends EasyMockSupport {
private KStreamFlatTransformProcessor<Number, Number, Integer, Integer> processor; private KStreamFlatTransformProcessor<Number, Number, Integer, Integer> processor;
@Rule
public final ExpectedException exception = ExpectedException.none();
@Before @Before
public void setUp() { public void setUp() {
inputKey = 1; inputKey = 1;
inputValue = 10; inputValue = 10;
transformer = mock(Transformer.class); transformer = mock(Transformer.class);
context = strictMock(ProcessorContext.class); context = strictMock(ProcessorContext.class);
processor = new KStreamFlatTransformProcessor<Number, Number, Integer, Integer>(transformer); processor = new KStreamFlatTransformProcessor<>(transformer);
} }
@Test @Test
@ -139,4 +134,4 @@ public class KStreamFlatTransformTest extends EasyMockSupport {
verifyAll(); verifyAll();
assertTrue(processor instanceof KStreamFlatTransformProcessor); assertTrue(processor instanceof KStreamFlatTransformProcessor);
} }
} }

View File

@ -53,9 +53,7 @@ import org.apache.kafka.test.MockProcessorSupplier;
import org.apache.kafka.test.MockValueJoiner; import org.apache.kafka.test.MockValueJoiner;
import org.apache.kafka.test.StreamsTestUtils; import org.apache.kafka.test.StreamsTestUtils;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.time.Duration; import java.time.Duration;
import java.util.Arrays; import java.util.Arrays;
@ -71,9 +69,10 @@ import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -91,9 +90,6 @@ public class KStreamImplTest {
private Serde<String> mySerde = new Serdes.StringSerde(); private Serde<String> mySerde = new Serdes.StringSerde();
@Rule
public final ExpectedException exception = ExpectedException.none();
@Before @Before
public void before() { public void before() {
builder = new StreamsBuilder(); builder = new StreamsBuilder();
@ -547,16 +543,14 @@ public class KStreamImplTest {
@Test @Test
public void shouldNotAllowNullTransformSupplierOnTransform() { public void shouldNotAllowNullTransformSupplierOnTransform() {
exception.expect(NullPointerException.class); final Exception e = assertThrows(NullPointerException.class, () -> testStream.transform(null));
exception.expectMessage("transformerSupplier can't be null"); assertEquals("transformerSupplier can't be null", e.getMessage());
testStream.transform(null);
} }
@Test @Test
public void shouldNotAllowNullTransformSupplierOnFlatTransform() { public void shouldNotAllowNullTransformSupplierOnFlatTransform() {
exception.expect(NullPointerException.class); final Exception e = assertThrows(NullPointerException.class, () -> testStream.flatTransform(null));
exception.expectMessage("transformerSupplier can't be null"); assertEquals("transformerSupplier can't be null", e.getMessage());
testStream.flatTransform(null);
} }
@Test(expected = NullPointerException.class) @Test(expected = NullPointerException.class)

View File

@ -43,8 +43,8 @@ import java.util.Set;
import static java.time.Duration.ofMillis; import static java.time.Duration.ofMillis;
import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName;
import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class KStreamKStreamJoinTest { public class KStreamKStreamJoinTest {

View File

@ -40,9 +40,9 @@ import java.util.Properties;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static org.apache.kafka.test.StreamsTestUtils.getMetricByName; import static org.apache.kafka.test.StreamsTestUtils.getMetricByName;
import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
public class KTableSourceTest { public class KTableSourceTest {

View File

@ -30,7 +30,7 @@ import org.junit.Test;
import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsEqual.equalTo;
import static org.hamcrest.core.IsSame.sameInstance; import static org.hamcrest.core.IsSame.sameInstance;
import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class TransformerSupplierAdapterTest extends EasyMockSupport { public class TransformerSupplierAdapterTest extends EasyMockSupport {

View File

@ -30,7 +30,7 @@ import org.junit.Test;
import static java.time.Duration.ofMillis; import static java.time.Duration.ofMillis;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
public class GraphGraceSearchUtilTest { public class GraphGraceSearchUtilTest {

View File

@ -34,7 +34,7 @@ import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_BATCH;
import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_END; import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_END;
import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_START; import static org.apache.kafka.test.MockStateRestoreListener.RESTORE_START;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -219,4 +219,4 @@ public class CompositeRestoreListenerTest {
} }
} }
} }

View File

@ -32,8 +32,8 @@ import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.mock; import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify; import static org.easymock.EasyMock.verify;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
public class GlobalProcessorContextImplTest { public class GlobalProcessorContextImplTest {
private static final String GLOBAL_STORE_NAME = "global-store"; private static final String GLOBAL_STORE_NAME = "global-store";

View File

@ -51,11 +51,11 @@ import static java.time.Duration.ofSeconds;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static org.apache.kafka.common.utils.Utils.mkSet; import static org.apache.kafka.common.utils.Utils.mkSet;
import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;

View File

@ -31,7 +31,7 @@ import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.mock; import static org.easymock.EasyMock.mock;
import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.replay;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
public class ProcessorContextTest { public class ProcessorContextTest {

View File

@ -91,12 +91,12 @@ import static org.apache.kafka.common.utils.Utils.mkProperties;
import static org.apache.kafka.streams.processor.internals.AbstractStateManager.CHECKPOINT_FILE_NAME; import static org.apache.kafka.streams.processor.internals.AbstractStateManager.CHECKPOINT_FILE_NAME;
import static org.apache.kafka.streams.processor.internals.StreamThread.getSharedAdminClientId; import static org.apache.kafka.streams.processor.internals.StreamThread.getSharedAdminClientId;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame; import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;

View File

@ -61,9 +61,9 @@ import static java.time.Duration.ofMillis;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")

View File

@ -34,10 +34,10 @@ import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.PROCESSOR_NODE_METRICS_GROUP; import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.PROCESSOR_NODE_METRICS_GROUP;
import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.addAvgMaxLatency; import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.addAvgMaxLatency;
import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.addInvocationRateAndCount; import static org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl.addInvocationRateAndCount;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
public class StreamsMetricsImplTest { public class StreamsMetricsImplTest {

View File

@ -39,9 +39,9 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsEqual.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
public abstract class AbstractKeyValueStoreTest { public abstract class AbstractKeyValueStoreTest {

View File

@ -47,11 +47,11 @@ import java.util.Map;
import static org.apache.kafka.streams.state.internals.ThreadCacheTest.memoryCacheEntrySize; import static org.apache.kafka.streams.state.internals.ThreadCacheTest.memoryCacheEntrySize;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -346,4 +346,4 @@ public class CachingKeyValueStoreTest extends AbstractKeyValueStoreTest {
forwarded.put(key, new Change<>(newValue, oldValue)); forwarded.put(key, new Change<>(newValue, oldValue));
} }
} }
} }

View File

@ -26,9 +26,7 @@ import org.apache.kafka.test.StateStoreProviderStub;
import org.apache.kafka.test.StreamsTestUtils; import org.apache.kafka.test.StreamsTestUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
@ -40,10 +38,12 @@ import static java.util.Arrays.asList;
import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsEqual.equalTo;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
public class CompositeReadOnlyWindowStoreTest { public class CompositeReadOnlyWindowStoreTest {
private static final long WINDOW_SIZE = 30_000; private static final long WINDOW_SIZE = 30_000;
private final String storeName = "window-store"; private final String storeName = "window-store";
private StateStoreProviderStub stubProviderOne; private StateStoreProviderStub stubProviderOne;
private StateStoreProviderStub stubProviderTwo; private StateStoreProviderStub stubProviderTwo;
@ -51,9 +51,6 @@ public class CompositeReadOnlyWindowStoreTest {
private ReadOnlyWindowStoreStub<String, String> underlyingWindowStore; private ReadOnlyWindowStoreStub<String, String> underlyingWindowStore;
private ReadOnlyWindowStoreStub<String, String> otherUnderlyingStore; private ReadOnlyWindowStoreStub<String, String> otherUnderlyingStore;
@Rule
public final ExpectedException windowStoreIteratorException = ExpectedException.none();
@Before @Before
public void before() { public void before() {
stubProviderOne = new StateStoreProviderStub(false); stubProviderOne = new StateStoreProviderStub(false);
@ -151,9 +148,7 @@ public class CompositeReadOnlyWindowStoreTest {
final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new
StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo");
final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10));
assertThrows(NoSuchElementException.class, windowStoreIterator::peekNextKey);
windowStoreIteratorException.expect(NoSuchElementException.class);
windowStoreIterator.peekNextKey();
} }
@Test @Test
@ -161,9 +156,7 @@ public class CompositeReadOnlyWindowStoreTest {
final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new final CompositeReadOnlyWindowStore<Object, Object> store = new CompositeReadOnlyWindowStore<>(new
StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo"); StateStoreProviderStub(false), QueryableStoreTypes.windowStore(), "foo");
final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10)); final WindowStoreIterator<Object> windowStoreIterator = store.fetch("key", ofEpochMilli(1), ofEpochMilli(10));
assertThrows(NoSuchElementException.class, windowStoreIterator::next);
windowStoreIteratorException.expect(NoSuchElementException.class);
windowStoreIterator.next();
} }
@Test @Test

View File

@ -30,8 +30,8 @@ import java.util.Arrays;
import java.util.List; import java.util.List;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
public class InMemoryLRUCacheStoreTest extends AbstractKeyValueStoreTest { public class InMemoryLRUCacheStoreTest extends AbstractKeyValueStoreTest {

View File

@ -47,8 +47,8 @@ import java.util.Map;
import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@RunWith(EasyMockRunner.class) @RunWith(EasyMockRunner.class)
@ -229,4 +229,4 @@ public class MeteredKeyValueStoreTest {
} }
} }

View File

@ -48,8 +48,8 @@ import java.util.Map;
import static org.apache.kafka.common.utils.Utils.mkEntry; import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@RunWith(EasyMockRunner.class) @RunWith(EasyMockRunner.class)
@ -248,4 +248,4 @@ public class MeteredSessionStoreTest {
return this.metrics.metric(new MetricName(name, "stream-scope-metrics", "", this.tags)); return this.metrics.metric(new MetricName(name, "stream-scope-metrics", "", this.tags));
} }
} }

View File

@ -51,9 +51,9 @@ import java.util.List;
import static org.easymock.EasyMock.replay; import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset; import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify; import static org.easymock.EasyMock.verify;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.matchesPattern; import static org.hamcrest.Matchers.matchesPattern;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
/** /**

View File

@ -36,7 +36,6 @@ import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.kafka.test.NoOpRecordCollector; import org.apache.kafka.test.NoOpRecordCollector;
import org.apache.kafka.test.TestUtils; import org.apache.kafka.test.TestUtils;
import org.junit.After; import org.junit.After;
import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -377,7 +376,7 @@ public class RocksDBSegmentedBytesStoreTest {
// Bulk loading is enabled during recovery. // Bulk loading is enabled during recovery.
for (final KeyValueSegment segment : bytesStore.getSegments()) { for (final KeyValueSegment segment : bytesStore.getSegments()) {
Assert.assertThat(segment.getOptions().level0FileNumCompactionTrigger(), equalTo(1 << 30)); assertThat(segment.getOptions().level0FileNumCompactionTrigger(), equalTo(1 << 30));
} }
final List<KeyValue<Windowed<String>, Long>> expected = new ArrayList<>(); final List<KeyValue<Windowed<String>, Long>> expected = new ArrayList<>();
@ -401,12 +400,12 @@ public class RocksDBSegmentedBytesStoreTest {
restoreListener.onRestoreStart(null, bytesStore.name(), 0L, 0L); restoreListener.onRestoreStart(null, bytesStore.name(), 0L, 0L);
for (final KeyValueSegment segment : bytesStore.getSegments()) { for (final KeyValueSegment segment : bytesStore.getSegments()) {
Assert.assertThat(segment.getOptions().level0FileNumCompactionTrigger(), equalTo(1 << 30)); assertThat(segment.getOptions().level0FileNumCompactionTrigger(), equalTo(1 << 30));
} }
restoreListener.onRestoreEnd(null, bytesStore.name(), 0L); restoreListener.onRestoreEnd(null, bytesStore.name(), 0L);
for (final KeyValueSegment segment : bytesStore.getSegments()) { for (final KeyValueSegment segment : bytesStore.getSegments()) {
Assert.assertThat(segment.getOptions().level0FileNumCompactionTrigger(), equalTo(4)); assertThat(segment.getOptions().level0FileNumCompactionTrigger(), equalTo(4));
} }
} }

View File

@ -51,9 +51,9 @@ import java.util.Set;
import static java.nio.charset.StandardCharsets.UTF_8; import static java.nio.charset.StandardCharsets.UTF_8;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;

View File

@ -36,9 +36,9 @@ import java.util.List;
import static java.util.Arrays.asList; import static java.util.Arrays.asList;
import static org.hamcrest.CoreMatchers.hasItem; import static org.hamcrest.CoreMatchers.hasItem;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
public class RocksDBTimestampedStoreTest extends RocksDBStoreTest { public class RocksDBTimestampedStoreTest extends RocksDBStoreTest {

View File

@ -74,10 +74,10 @@ import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap; import static org.apache.kafka.common.utils.Utils.mkMap;
import static org.apache.kafka.common.utils.Utils.mkProperties; import static org.apache.kafka.common.utils.Utils.mkProperties;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -808,7 +808,7 @@ public class TopologyTestDriverTest {
public void shouldNotUpdateStoreForSmallerValue() { public void shouldNotUpdateStoreForSmallerValue() {
setup(); setup();
testDriver.pipeInput(recordFactory.create("input-topic", "a", 1L, 9999L)); testDriver.pipeInput(recordFactory.create("input-topic", "a", 1L, 9999L));
Assert.assertThat(store.get("a"), equalTo(21L)); assertThat(store.get("a"), equalTo(21L));
OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "a", 21L); OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "a", 21L);
Assert.assertNull(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer)); Assert.assertNull(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer));
} }
@ -817,7 +817,7 @@ public class TopologyTestDriverTest {
public void shouldNotUpdateStoreForLargerValue() { public void shouldNotUpdateStoreForLargerValue() {
setup(); setup();
testDriver.pipeInput(recordFactory.create("input-topic", "a", 42L, 9999L)); testDriver.pipeInput(recordFactory.create("input-topic", "a", 42L, 9999L));
Assert.assertThat(store.get("a"), equalTo(42L)); assertThat(store.get("a"), equalTo(42L));
OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "a", 42L); OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "a", 42L);
Assert.assertNull(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer)); Assert.assertNull(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer));
} }
@ -826,7 +826,7 @@ public class TopologyTestDriverTest {
public void shouldUpdateStoreForNewKey() { public void shouldUpdateStoreForNewKey() {
setup(); setup();
testDriver.pipeInput(recordFactory.create("input-topic", "b", 21L, 9999L)); testDriver.pipeInput(recordFactory.create("input-topic", "b", 21L, 9999L));
Assert.assertThat(store.get("b"), equalTo(21L)); assertThat(store.get("b"), equalTo(21L));
OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "a", 21L); OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "a", 21L);
OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "b", 21L); OutputVerifier.compareKeyValue(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer), "b", 21L);
Assert.assertNull(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer)); Assert.assertNull(testDriver.readOutput("result-topic", stringDeserializer, longDeserializer));