mirror of https://github.com/apache/kafka.git
MINOR Quarantine some flaky tests (#17779)
Reviewers: Colin Patrick McCabe <cmccabe@apache.org>
This commit is contained in:
parent
adf4b6eb39
commit
edab667a9a
|
@ -44,6 +44,7 @@
|
||||||
<allow pkg="org.apache.kafka.common.utils" />
|
<allow pkg="org.apache.kafka.common.utils" />
|
||||||
<allow pkg="org.apache.kafka.common.errors" exact-match="true" />
|
<allow pkg="org.apache.kafka.common.errors" exact-match="true" />
|
||||||
<allow pkg="org.apache.kafka.common.memory" />
|
<allow pkg="org.apache.kafka.common.memory" />
|
||||||
|
<allow pkg="org.apache.kafka.common.test" />
|
||||||
|
|
||||||
|
|
||||||
<subpackage name="server">
|
<subpackage name="server">
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.kafka.clients.admin.RaftVoterEndpoint;
|
||||||
import org.apache.kafka.common.Uuid;
|
import org.apache.kafka.common.Uuid;
|
||||||
import org.apache.kafka.common.test.KafkaClusterTestKit;
|
import org.apache.kafka.common.test.KafkaClusterTestKit;
|
||||||
import org.apache.kafka.common.test.TestKitNodes;
|
import org.apache.kafka.common.test.TestKitNodes;
|
||||||
|
import org.apache.kafka.common.test.api.Flaky;
|
||||||
import org.apache.kafka.server.common.KRaftVersion;
|
import org.apache.kafka.server.common.KRaftVersion;
|
||||||
import org.apache.kafka.test.TestUtils;
|
import org.apache.kafka.test.TestUtils;
|
||||||
|
|
||||||
|
@ -129,6 +130,7 @@ public class ReconfigurableQuorumIntegrationTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Flaky("KAFKA-17988")
|
||||||
@Test
|
@Test
|
||||||
public void testRemoveAndAddSameController() throws Exception {
|
public void testRemoveAndAddSameController() throws Exception {
|
||||||
try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(
|
try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.kafka.common.quota.{ClientQuotaAlteration, ClientQuotaEntity}
|
||||||
import org.apache.kafka.common.record.{MemoryRecords, SimpleRecord}
|
import org.apache.kafka.common.record.{MemoryRecords, SimpleRecord}
|
||||||
import org.apache.kafka.common.requests.{ProduceRequest, ProduceResponse}
|
import org.apache.kafka.common.requests.{ProduceRequest, ProduceResponse}
|
||||||
import org.apache.kafka.common.security.auth.SecurityProtocol
|
import org.apache.kafka.common.security.auth.SecurityProtocol
|
||||||
|
import org.apache.kafka.common.test.api.Flaky
|
||||||
import org.apache.kafka.common.{KafkaException, requests}
|
import org.apache.kafka.common.{KafkaException, requests}
|
||||||
import org.apache.kafka.network.SocketServerConfigs
|
import org.apache.kafka.network.SocketServerConfigs
|
||||||
import org.apache.kafka.server.config.QuotaConfig
|
import org.apache.kafka.server.config.QuotaConfig
|
||||||
|
@ -81,6 +82,7 @@ class DynamicConnectionQuotaTest extends BaseRequestTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Flaky("KAFKA-17999")
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
@ValueSource(strings = Array("kraft"))
|
@ValueSource(strings = Array("kraft"))
|
||||||
def testDynamicConnectionQuota(quorum: String): Unit = {
|
def testDynamicConnectionQuota(quorum: String): Unit = {
|
||||||
|
|
|
@ -51,6 +51,7 @@ import org.apache.kafka.common.requests.FetchRequest.PartitionData
|
||||||
import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse
|
import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse
|
||||||
import org.apache.kafka.common.requests._
|
import org.apache.kafka.common.requests._
|
||||||
import org.apache.kafka.common.security.auth.KafkaPrincipal
|
import org.apache.kafka.common.security.auth.KafkaPrincipal
|
||||||
|
import org.apache.kafka.common.test.api.Flaky
|
||||||
import org.apache.kafka.common.utils.{Exit, LogContext, Time, Utils}
|
import org.apache.kafka.common.utils.{Exit, LogContext, Time, Utils}
|
||||||
import org.apache.kafka.coordinator.transaction.TransactionLogConfig
|
import org.apache.kafka.coordinator.transaction.TransactionLogConfig
|
||||||
import org.apache.kafka.image._
|
import org.apache.kafka.image._
|
||||||
|
@ -4342,6 +4343,7 @@ class ReplicaManagerTest {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Flaky("KAFKA-18000")
|
||||||
@Test
|
@Test
|
||||||
def testSuccessfulBuildRemoteLogAuxStateMetrics(): Unit = {
|
def testSuccessfulBuildRemoteLogAuxStateMetrics(): Unit = {
|
||||||
val tp0 = new TopicPartition(topic, 0)
|
val tp0 = new TopicPartition(topic, 0)
|
||||||
|
|
|
@ -18,6 +18,7 @@ package org.apache.kafka.tiered.storage.integration;
|
||||||
|
|
||||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||||
import org.apache.kafka.common.IsolationLevel;
|
import org.apache.kafka.common.IsolationLevel;
|
||||||
|
import org.apache.kafka.common.test.api.Flaky;
|
||||||
import org.apache.kafka.server.log.remote.storage.RemoteLogManagerConfig;
|
import org.apache.kafka.server.log.remote.storage.RemoteLogManagerConfig;
|
||||||
import org.apache.kafka.tiered.storage.TieredStorageTestBuilder;
|
import org.apache.kafka.tiered.storage.TieredStorageTestBuilder;
|
||||||
import org.apache.kafka.tiered.storage.TieredStorageTestHarness;
|
import org.apache.kafka.tiered.storage.TieredStorageTestHarness;
|
||||||
|
@ -36,6 +37,7 @@ import static org.apache.kafka.tiered.storage.specs.RemoteFetchCount.OperationTy
|
||||||
* Test Cases:
|
* Test Cases:
|
||||||
* Elementary offloads and fetches from tiered storage using consumer with read_committed isolation level.
|
* Elementary offloads and fetches from tiered storage using consumer with read_committed isolation level.
|
||||||
*/
|
*/
|
||||||
|
@Flaky("KAFKA-17998")
|
||||||
public final class OffloadAndTxnConsumeFromLeaderTest extends TieredStorageTestHarness {
|
public final class OffloadAndTxnConsumeFromLeaderTest extends TieredStorageTestHarness {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue