Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

IGNITE-24137 Add HIGH_AVAILABILITY mode to zone suites, where valuable #5028

Open
wants to merge 5 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
import org.apache.ignite.Ignite;
import org.apache.ignite.internal.ClusterPerTestIntegrationTest;
import org.apache.ignite.internal.app.IgniteImpl;
import org.apache.ignite.internal.catalog.descriptors.ConsistencyMode;
import org.apache.ignite.internal.catalog.events.CatalogEvent;
import org.apache.ignite.internal.lang.ByteArray;
import org.apache.ignite.internal.metastorage.Entry;
Expand All @@ -52,7 +53,8 @@
import org.apache.ignite.internal.testframework.IgniteTestUtils;
import org.intellij.lang.annotations.Language;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;

/**
* Integration test for data nodes' filters functionality.
Expand Down Expand Up @@ -111,15 +113,20 @@ protected int initialNodes() {
* @throws Exception If failed.
*/
@Disabled("https://issues.apache.org/jira/browse/IGNITE-21387")
void testFilteredDataNodesPropagatedToStable() throws Exception {
@ParameterizedTest
@EnumSource(ConsistencyMode.class)
void testFilteredDataNodesPropagatedToStable(ConsistencyMode consistencyMode) throws Exception {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've run this test, it fails with some strange error, please take a look

java.lang.IllegalArgumentException: object is not an instance of org.apache.ignite.internal.app.IgniteImpl

	at java.base/java.lang.reflect.AccessibleObject.canAccess(AccessibleObject.java:463)
	at org.apache.ignite.internal.testframework.IgniteTestUtils.getField(IgniteTestUtils.java:184)
	at org.apache.ignite.internal.testframework.IgniteTestUtils.getFieldValue(IgniteTestUtils.java:201)
	at org.apache.ignite.internal.distributionzones.ItDistributionZonesFiltersTest.testFilteredDataNodesPropagatedToStable(ItDistributionZonesFiltersTest.java:133)
	at java.base/java.lang.reflect.Method.invoke(Method.java:566)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:177)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.accept(ForEachOps.java:183)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.Iterator.forEachRemaining(Iterator.java:133)
	at java.base/java.util.Spliterators$IteratorSpliterator.forEachRemaining(Spliterators.java:1801)
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
	at java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
	at java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:497)
	at java.base/java.util.stream.ReferencePipeline$7$1.accept(ReferencePipeline.java:274)
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1655)
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
	at java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
	at java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:497)
	at java.base/java.util.stream.ReferencePipeline$7$1.accept(ReferencePipeline.java:274)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:195)
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1655)
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
	at java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
	at java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:497)
	at java.base/java.util.stream.ReferencePipeline$7$1.accept(ReferencePipeline.java:274)
	at java.base/java.util.ArrayList$ArrayListSpliterator.forEachRemaining(ArrayList.java:1655)
	at java.base/java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:484)
	at java.base/java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:474)
	at java.base/java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150)
	at java.base/java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173)
	at java.base/java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234)
	at java.base/java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:497)
	at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)
	at java.base/java.util.ArrayList.forEach(ArrayList.java:1541)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed, but the reasons were not connected with consistency mode. Now this test has another problem, but it is connected with the issue in Disabled() I think.

String filter = "$[?(@.region == \"US\" && @.storage == \"SSD\")]";

// This node do not pass the filter
@Language("HOCON") String firstNodeAttributes = "{region: EU, storage: SSD}";

Ignite node = startNode(1, createStartConfig(firstNodeAttributes, STORAGE_PROFILES_CONFIGS));
Ignite node = unwrapIgniteImpl(startNode(1, createStartConfig(firstNodeAttributes, STORAGE_PROFILES_CONFIGS)));

node.sql().execute(null, createZoneSql(2, 3, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, filter, STORAGE_PROFILES));
node.sql().execute(
null,
createZoneSql(2, 3, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, filter, STORAGE_PROFILES, consistencyMode)
);

node.sql().execute(null, createTableSql());

Expand Down Expand Up @@ -182,13 +189,17 @@ void testFilteredDataNodesPropagatedToStable() throws Exception {
*
* @throws Exception If failed.
*/
@Test
void testAlteringFiltersPropagatedDataNodesToStableImmediately() throws Exception {
@ParameterizedTest
@EnumSource(ConsistencyMode.class)
void testAlteringFiltersPropagatedDataNodesToStableImmediately(ConsistencyMode consistencyMode) throws Exception {
String filter = "$[?(@.region == \"US\" && @.storage == \"SSD\")]";

Ignite node0 = unwrapIgniteImpl(node(0));

node0.sql().execute(null, createZoneSql(2, 3, 10_000, 10_000, filter, STORAGE_PROFILES));
node0.sql().execute(
null,
createZoneSql(2, 3, 10_000, 10_000, filter, STORAGE_PROFILES, consistencyMode)
);

node0.sql().execute(null, createTableSql());

Expand Down Expand Up @@ -237,13 +248,17 @@ void testAlteringFiltersPropagatedDataNodesToStableImmediately() throws Exceptio
*
* @throws Exception If failed.
*/
@Test
void testEmptyDataNodesDoNotPropagatedToStableAfterAlteringFilter() throws Exception {
@ParameterizedTest
@EnumSource(ConsistencyMode.class)
void testEmptyDataNodesDoNotPropagatedToStableAfterAlteringFilter(ConsistencyMode consistencyMode) throws Exception {
String filter = "$[?(@.region == \"US\" && @.storage == \"SSD\")]";

Ignite node0 = unwrapIgniteImpl(node(0));

node0.sql().execute(null, createZoneSql(2, 3, 10_000, 10_000, filter, STORAGE_PROFILES));
node0.sql().execute(
null,
createZoneSql(2, 3, 10_000, 10_000, filter, STORAGE_PROFILES, consistencyMode)
);

node0.sql().execute(null, createTableSql());

Expand Down Expand Up @@ -298,8 +313,9 @@ void testEmptyDataNodesDoNotPropagatedToStableAfterAlteringFilter() throws Excep
*
* @throws Exception If failed.
*/
@Test
void testFilteredEmptyDataNodesDoNotTriggerRebalance() throws Exception {
@ParameterizedTest
@EnumSource(ConsistencyMode.class)
void testFilteredEmptyDataNodesDoNotTriggerRebalance(ConsistencyMode consistencyMode) throws Exception {
String filter = "$[?(@.region == \"EU\" && @.storage == \"HDD\")]";

// This node do not pass the filter.
Expand All @@ -310,7 +326,10 @@ void testFilteredEmptyDataNodesDoNotTriggerRebalance() throws Exception {

Ignite node1 = startNode(1, createStartConfig(firstNodeAttributes, STORAGE_PROFILES_CONFIGS));

node1.sql().execute(null, createZoneSql(1, 1, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, filter, STORAGE_PROFILES));
node1.sql().execute(
null,
createZoneSql(1, 1, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, filter, STORAGE_PROFILES, consistencyMode)
);

MetaStorageManager metaStorageManager = IgniteTestUtils.getFieldValue(
node0,
Expand Down Expand Up @@ -342,8 +361,9 @@ void testFilteredEmptyDataNodesDoNotTriggerRebalance() throws Exception {
assertPendingAssignmentsWereNeverExist(metaStorageManager, partId);
}

@Test
void testFilteredEmptyDataNodesDoNotTriggerRebalanceOnReplicaUpdate() throws Exception {
@ParameterizedTest
@EnumSource(ConsistencyMode.class)
void testFilteredEmptyDataNodesDoNotTriggerRebalanceOnReplicaUpdate(ConsistencyMode consistencyMode) throws Exception {
String filter = "$[?(@.region == \"EU\" && @.storage == \"HDD\")]";

// This node do not pass the filter.
Expand All @@ -354,7 +374,10 @@ void testFilteredEmptyDataNodesDoNotTriggerRebalanceOnReplicaUpdate() throws Exc

startNode(1, createStartConfig(firstNodeAttributes, STORAGE_PROFILES_CONFIGS));

node0.sql().execute(null, createZoneSql(1, 1, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, filter, STORAGE_PROFILES));
node0.sql().execute(
null,
createZoneSql(1, 1, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, filter, STORAGE_PROFILES, consistencyMode)
);

MetaStorageManager metaStorageManager = IgniteTestUtils.getFieldValue(
unwrapIgniteImpl(node0),
Expand Down Expand Up @@ -444,16 +467,25 @@ private static void assertPendingAssignmentsWereNeverExist(
assertTrue(metaStorageManager.get(pendingPartAssignmentsKey(partId)).get().empty());
}

private static String createZoneSql(int partitions, int replicas, int scaleUp, int scaleDown, String filter, String storageProfiles) {
private static String createZoneSql(
int partitions,
int replicas,
int scaleUp,
int scaleDown,
String filter,
String storageProfiles,
ConsistencyMode consistencyMode
) {
String sqlFormat = "CREATE ZONE \"%s\" WITH "
+ "\"REPLICAS\" = %s, "
+ "\"PARTITIONS\" = %s, "
+ "\"DATA_NODES_FILTER\" = '%s', "
+ "\"DATA_NODES_AUTO_ADJUST_SCALE_UP\" = %s, "
+ "\"DATA_NODES_AUTO_ADJUST_SCALE_DOWN\" = %s, "
+ "\"STORAGE_PROFILES\" = %s";
+ "\"STORAGE_PROFILES\" = %s, "
+ "\"CONSISTENCY_MODE\" = '%s'";

return String.format(sqlFormat, ZONE_NAME, replicas, partitions, filter, scaleUp, scaleDown, storageProfiles);
return String.format(sqlFormat, ZONE_NAME, replicas, partitions, filter, scaleUp, scaleDown, storageProfiles, consistencyMode);
}

private static String alterZoneSql(String filter) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@
import org.apache.ignite.internal.catalog.CatalogManagerImpl;
import org.apache.ignite.internal.catalog.CatalogTestUtils;
import org.apache.ignite.internal.catalog.descriptors.CatalogZoneDescriptor;
import org.apache.ignite.internal.catalog.descriptors.ConsistencyMode;
import org.apache.ignite.internal.catalog.storage.UpdateLogImpl;
import org.apache.ignite.internal.cluster.management.ClusterManagementGroupManager;
import org.apache.ignite.internal.cluster.management.raft.TestClusterStateStorage;
Expand Down Expand Up @@ -136,7 +137,8 @@
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.junit.jupiter.params.provider.CsvSource;
import org.junit.jupiter.params.provider.EnumSource;

/**
* Tests for checking {@link DistributionZoneManager} behavior after node's restart.
Expand Down Expand Up @@ -395,14 +397,18 @@ public void testNodeAttributesRestoredAfterRestart() throws Exception {
assertEquals(nodeAttributesBeforeRestart, nodeAttributesAfterRestart);
}

@ParameterizedTest(name = "defaultZone={0}")
@ValueSource(booleans = {true, false})
public void testTopologyAugmentationMapRestoredAfterRestart(boolean defaultZone) throws Exception {
@ParameterizedTest(name = "defaultZone={0},consistencyMode={1}")
@CsvSource({
"true,",
"false, HIGH_AVAILABILITY",
"false, STRONG_CONSISTENCY",
})
public void testTopologyAugmentationMapRestoredAfterRestart(boolean defaultZone, ConsistencyMode consistencyMode) throws Exception {
PartialNode node = startPartialNode(0);

node.logicalTopology().putNode(A);

String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE);
String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, consistencyMode);

node.logicalTopology().putNode(B);
node.logicalTopology().putNode(C);
Expand Down Expand Up @@ -604,8 +610,9 @@ public void testFirstLogicalTopologyUpdateInterruptedEventRestoredAfterRestart()
);
}

@Test
public void testCreationZoneWhenDataNodesAreDeletedIsNotSuccessful() throws Exception {
@ParameterizedTest
@EnumSource(ConsistencyMode.class)
public void testCreationZoneWhenDataNodesAreDeletedIsNotSuccessful(ConsistencyMode consistencyMode) throws Exception {
PartialNode node = startPartialNode(0);

node.logicalTopology().putNode(A);
Expand Down Expand Up @@ -662,7 +669,15 @@ public void testCreationZoneWhenDataNodesAreDeletedIsNotSuccessful() throws Exce
return dataNodeKeyOptional.isPresent();
}));

createZone(getCatalogManager(node), "zone1", INFINITE_TIMER_VALUE, INFINITE_TIMER_VALUE, null, DEFAULT_STORAGE_PROFILE);
createZone(
getCatalogManager(node),
"zone1",
INFINITE_TIMER_VALUE,
INFINITE_TIMER_VALUE,
null,
consistencyMode,
DEFAULT_STORAGE_PROFILE
);

// Assert that after creation of a zone, data nodes are still tombstone, but not the logical topology, as for default zone.
assertThat(metastore.get(new ByteArray(dataNodeKey[0])).thenApply(Entry::tombstone), willBe(true));
Expand All @@ -672,12 +687,16 @@ private Set<NodeWithAttributes> deserializeLogicalTopologySet(byte[] bytes) {
return DistributionZonesUtil.deserializeLogicalTopologySet(bytes);
}

@ParameterizedTest(name = "defaultZone={0}")
@ValueSource(booleans = {true, false})
public void testLocalDataNodesAreRestoredAfterRestart(boolean defaultZone) throws Exception {
@ParameterizedTest(name = "defaultZone={0},consistencyMode={1}")
@CsvSource({
"true,",
"false, HIGH_AVAILABILITY",
"false, STRONG_CONSISTENCY",
})
public void testLocalDataNodesAreRestoredAfterRestart(boolean defaultZone, ConsistencyMode consistencyMode) throws Exception {
PartialNode node = startPartialNode(0);

String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE);
String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, IMMEDIATE_TIMER_VALUE, IMMEDIATE_TIMER_VALUE, consistencyMode);

node.logicalTopology().putNode(A);
node.logicalTopology().putNode(B);
Expand Down Expand Up @@ -709,17 +728,21 @@ public void testLocalDataNodesAreRestoredAfterRestart(boolean defaultZone) throw
Set.of(A), TIMEOUT_MILLIS);
}

@ParameterizedTest(name = "defaultZone={0}")
@ValueSource(booleans = {true, false})
public void testScaleUpTimerIsRestoredAfterRestart(boolean defaultZone) throws Exception {
@ParameterizedTest(name = "defaultZone={0},consistencyMode={1}")
@CsvSource({
"true,",
"false, HIGH_AVAILABILITY",
"false, STRONG_CONSISTENCY",
})
public void testScaleUpTimerIsRestoredAfterRestart(boolean defaultZone, ConsistencyMode consistencyMode) throws Exception {
PartialNode node = startPartialNode(0);

node.logicalTopology().putNode(A);
node.logicalTopology().putNode(B);

assertLogicalTopologyInMetastorage(Set.of(A, B), metastore);

String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, 1, 1);
String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, 1, 1, consistencyMode);

int zoneId = getZoneId(node, zoneName);

Expand Down Expand Up @@ -779,9 +802,13 @@ public void testScaleUpTimerIsRestoredAfterRestart(boolean defaultZone) throws E
);
}

@ParameterizedTest(name = "defaultZone={0}")
@ValueSource(booleans = {true, false})
public void testScaleDownTimerIsRestoredAfterRestart(boolean defaultZone) throws Exception {
@ParameterizedTest(name = "defaultZone={0},consistencyMode={1}")
@CsvSource({
"true,",
"false, HIGH_AVAILABILITY",
"false, STRONG_CONSISTENCY",
})
public void testScaleDownTimerIsRestoredAfterRestart(boolean defaultZone, ConsistencyMode consistencyMode) throws Exception {
PartialNode node = startPartialNode(0);

node.logicalTopology().putNode(A);
Expand All @@ -792,7 +819,7 @@ public void testScaleDownTimerIsRestoredAfterRestart(boolean defaultZone) throws
DistributionZoneManager distributionZoneManager = getDistributionZoneManager(node);
CatalogManager catalogManager = getCatalogManager(node);

String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, 1, 1);
String zoneName = createZoneOrAlterDefaultZone(node, defaultZone, 1, 1, consistencyMode);

int zoneId = getZoneId(node, zoneName);

Expand Down Expand Up @@ -844,7 +871,8 @@ private static String createZoneOrAlterDefaultZone(
PartialNode node,
boolean useDefaultZone,
int scaleUp,
int scaleDown
int scaleDown,
ConsistencyMode consistencyMode
) throws Exception {
String zoneName;

Expand All @@ -869,7 +897,7 @@ private static String createZoneOrAlterDefaultZone(
} else {
zoneName = ZONE_NAME;

createZone(getCatalogManager(node), zoneName, scaleUp, scaleDown, null, DEFAULT_STORAGE_PROFILE);
createZone(getCatalogManager(node), zoneName, scaleUp, scaleDown, null, consistencyMode, DEFAULT_STORAGE_PROFILE);
}

return zoneName;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.apache.ignite.internal.catalog.CatalogManager;
import org.apache.ignite.internal.catalog.CatalogTestUtils;
import org.apache.ignite.internal.catalog.descriptors.CatalogZoneDescriptor;
import org.apache.ignite.internal.catalog.descriptors.ConsistencyMode;
import org.apache.ignite.internal.cluster.management.ClusterManagementGroupManager;
import org.apache.ignite.internal.cluster.management.raft.ClusterStateStorage;
import org.apache.ignite.internal.cluster.management.raft.TestClusterStateStorage;
Expand Down Expand Up @@ -185,6 +186,7 @@ protected void createZone(
@Nullable Integer dataNodesAutoAdjustScaleUp,
@Nullable Integer dataNodesAutoAdjustScaleDown,
@Nullable String filter,
@Nullable ConsistencyMode consistencyMode,
String storageProfiles
) {
DistributionZonesTestUtil.createZone(
Expand All @@ -193,6 +195,7 @@ protected void createZone(
dataNodesAutoAdjustScaleUp,
dataNodesAutoAdjustScaleDown,
filter,
consistencyMode,
storageProfiles
);
}
Expand Down
Loading