MINOR Quarantine some flaky tests (#17779)

Reviewers: Colin Patrick McCabe <cmccabe@apache.org>
This commit is contained in:
David Arthur 2024-11-12 19:34:44 -05:00 committed by GitHub
parent adf4b6eb39
commit edab667a9a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 9 additions and 0 deletions

View File

@ -44,6 +44,7 @@
<allow pkg="org.apache.kafka.common.utils" />
<allow pkg="org.apache.kafka.common.errors" exact-match="true" />
<allow pkg="org.apache.kafka.common.memory" />
<allow pkg="org.apache.kafka.common.test" />
<subpackage name="server">

View File

@ -24,6 +24,7 @@ import org.apache.kafka.clients.admin.RaftVoterEndpoint;
import org.apache.kafka.common.Uuid;
import org.apache.kafka.common.test.KafkaClusterTestKit;
import org.apache.kafka.common.test.TestKitNodes;
import org.apache.kafka.common.test.api.Flaky;
import org.apache.kafka.server.common.KRaftVersion;
import org.apache.kafka.test.TestUtils;
@ -129,6 +130,7 @@ public class ReconfigurableQuorumIntegrationTest {
}
}
@Flaky("KAFKA-17988")
@Test
public void testRemoveAndAddSameController() throws Exception {
try (KafkaClusterTestKit cluster = new KafkaClusterTestKit.Builder(

View File

@ -29,6 +29,7 @@ import org.apache.kafka.common.quota.{ClientQuotaAlteration, ClientQuotaEntity}
import org.apache.kafka.common.record.{MemoryRecords, SimpleRecord}
import org.apache.kafka.common.requests.{ProduceRequest, ProduceResponse}
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.common.test.api.Flaky
import org.apache.kafka.common.{KafkaException, requests}
import org.apache.kafka.network.SocketServerConfigs
import org.apache.kafka.server.config.QuotaConfig
@ -81,6 +82,7 @@ class DynamicConnectionQuotaTest extends BaseRequestTest {
}
}
@Flaky("KAFKA-17999")
@ParameterizedTest
@ValueSource(strings = Array("kraft"))
def testDynamicConnectionQuota(quorum: String): Unit = {

View File

@ -51,6 +51,7 @@ import org.apache.kafka.common.requests.FetchRequest.PartitionData
import org.apache.kafka.common.requests.ProduceResponse.PartitionResponse
import org.apache.kafka.common.requests._
import org.apache.kafka.common.security.auth.KafkaPrincipal
import org.apache.kafka.common.test.api.Flaky
import org.apache.kafka.common.utils.{Exit, LogContext, Time, Utils}
import org.apache.kafka.coordinator.transaction.TransactionLogConfig
import org.apache.kafka.image._
@ -4342,6 +4343,7 @@ class ReplicaManagerTest {
}
}
@Flaky("KAFKA-18000")
@Test
def testSuccessfulBuildRemoteLogAuxStateMetrics(): Unit = {
val tp0 = new TopicPartition(topic, 0)

View File

@ -18,6 +18,7 @@ package org.apache.kafka.tiered.storage.integration;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.IsolationLevel;
import org.apache.kafka.common.test.api.Flaky;
import org.apache.kafka.server.log.remote.storage.RemoteLogManagerConfig;
import org.apache.kafka.tiered.storage.TieredStorageTestBuilder;
import org.apache.kafka.tiered.storage.TieredStorageTestHarness;
@ -36,6 +37,7 @@ import static org.apache.kafka.tiered.storage.specs.RemoteFetchCount.OperationTy
* Test Cases:
* Elementary offloads and fetches from tiered storage using consumer with read_committed isolation level.
*/
@Flaky("KAFKA-17998")
public final class OffloadAndTxnConsumeFromLeaderTest extends TieredStorageTestHarness {
/**