Skip to content

Commit

Permalink
MINOR remove some flaky annotations (#18357)
Browse files Browse the repository at this point in the history
Remove the flaky annotation from the following tests

* RemoteLogManagerTest#testFetchOffsetByTimestampWithTieredStorageDoesNotFetchIndexWhenExistsLocally
* All the children of BaseConsumerTest#testCoordinatorFailover
* TransactionsTest#testFailureToFenceEpoch
* TransactionsTest#testReadCommittedConsumerShouldNotSeeUndecidedData
* MetricsDuringTopicCreationDeletionTest#testMetricsDuringTopicCreateDelete
* ProduceRequestTest#testProduceWithInvalidTimestamp

Reviewers: Chia-Ping Tsai <[email protected]>
  • Loading branch information
mumrah authored Jan 2, 2025
1 parent dd0fd55 commit a2a8d87
Show file tree
Hide file tree
Showing 5 changed files with 1 addition and 10 deletions.
2 changes: 0 additions & 2 deletions core/src/test/java/kafka/log/remote/RemoteLogManagerTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
import org.apache.kafka.common.record.SimpleRecord;
import org.apache.kafka.common.requests.FetchRequest;
import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.apache.kafka.common.test.api.Flaky;
import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.server.common.OffsetAndEpoch;
Expand Down Expand Up @@ -1683,7 +1682,6 @@ private void doTestFindOffsetByTimestamp(long ts, long startOffset, int targetLe
remoteLogManager.onLeadershipChange(Collections.singleton(mockPartition(leaderTopicIdPartition)), Collections.emptySet(), topicIds);
}

@Flaky("KAFKA-17779")
@Test
void testFetchOffsetByTimestampWithTieredStorageDoesNotFetchIndexWhenExistsLocally() throws Exception {
TopicPartition tp = new TopicPartition("sample", 0);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import org.apache.kafka.common.header.Headers
import org.apache.kafka.common.{ClusterResource, ClusterResourceListener, PartitionInfo}
import org.apache.kafka.common.internals.Topic
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, Deserializer, Serializer}
import org.apache.kafka.common.test.api.Flaky
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
Expand Down Expand Up @@ -80,7 +79,6 @@ abstract class BaseConsumerTest extends AbstractConsumerTest {
assertNotEquals(0, BaseConsumerTest.updateConsumerCount.get())
}

@Flaky("KAFKA-15920")
@ParameterizedTest(name = TestInfoUtils.TestWithParameterizedQuorumAndGroupProtocolNames)
@MethodSource(Array("getTestQuorumAndGroupProtocolParametersAll"))
def testCoordinatorFailover(quorum: String, groupProtocol: String): Unit = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import org.apache.kafka.clients.consumer._
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.errors.{InvalidProducerEpochException, ProducerFencedException, TimeoutException}
import org.apache.kafka.common.test.api.Flaky
import org.apache.kafka.coordinator.group.GroupCoordinatorConfig
import org.apache.kafka.coordinator.transaction.{TransactionLogConfig, TransactionStateManagerConfig}
import org.apache.kafka.server.config.{ReplicationConfigs, ServerConfigs, ServerLogConfigs}
Expand Down Expand Up @@ -172,7 +171,6 @@ class TransactionsTest extends IntegrationTestHarness {
}
}

@Flaky("KAFKA-18036")
@ParameterizedTest(name = TestInfoUtils.TestWithParameterizedQuorumAndGroupProtocolNames)
@MethodSource(Array("getTestQuorumAndGroupProtocolParametersAll"))
def testReadCommittedConsumerShouldNotSeeUndecidedData(quorum: String, groupProtocol: String): Unit = {
Expand Down Expand Up @@ -850,7 +848,6 @@ class TransactionsTest extends IntegrationTestHarness {
}
}

@Flaky("KAFKA-18306")
@ParameterizedTest(name = "{displayName}.quorum={0}.groupProtocol={1}.isTV2Enabled={2}")
@CsvSource(Array(
"kraft, classic, false",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import kafka.utils.{Logging, TestUtils}
import scala.jdk.CollectionConverters._
import org.junit.jupiter.api.{BeforeEach, TestInfo}
import com.yammer.metrics.core.Gauge
import org.apache.kafka.common.test.api.Flaky
import org.apache.kafka.server.config.{ReplicationConfigs, ServerConfigs, ServerLogConfigs}
import org.apache.kafka.server.metrics.KafkaYammerMetrics
import org.junit.jupiter.params.ParameterizedTest
Expand Down Expand Up @@ -72,7 +71,6 @@ class MetricsDuringTopicCreationDeletionTest extends KafkaServerTestHarness with
*/
@ParameterizedTest
@ValueSource(strings = Array("kraft"))
@Flaky("KAFKA-18245")
def testMetricsDuringTopicCreateDelete(quorum: String): Unit = {

// For UnderReplicatedPartitions, because of https://issues.apache.org/jira/browse/KAFKA-4605
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ class ProduceRequestTest extends BaseRequestTest {
}).toMap
}

@ParameterizedTest(name = "quorum=kraft")
@ParameterizedTest
@MethodSource(Array("timestampConfigProvider"))
def testProduceWithInvalidTimestamp(messageTimeStampConfig: String, recordTimestamp: Long): Unit = {
val topic = "topic"
Expand Down

0 comments on commit a2a8d87

Please sign in to comment.