diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index e6165b9e2a..fec0b13cea 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -34,6 +34,7 @@ import java.util.Optional; import javax.validation.Valid; +import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotBlank; import javax.validation.constraints.NotEmpty; @@ -74,8 +75,9 @@ public class WorkflowDef extends Auditable { private String failureWorkflow; @ProtoField(id = 8) - @Min(value = 1, message = "workflowDef schemaVersion: ${validatedValue} should be >= {value}") - private int schemaVersion = 1; + @Min(value = 2, message = "workflowDef schemaVersion: {value} is only supported") + @Max(value = 2, message = "workflowDef schemaVersion: {value} is only supported") + private int schemaVersion = 2; //By default a workflow is restartable @ProtoField(id = 9) diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefTest.java b/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefTest.java index 16869458e2..769fce07e4 100644 --- a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefTest.java +++ b/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefTest.java @@ -110,7 +110,7 @@ public void test() { @Test public void testWorkflowDefConstraints() { WorkflowDef workflowDef = new WorkflowDef();//name is null - workflowDef.setSchemaVersion(1); + workflowDef.setSchemaVersion(2); ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); Validator validator = factory.getValidator(); @@ -128,7 +128,7 @@ public void testWorkflowDefConstraints() { @Test public void testWorkflowDefConstraintsWithMultipleEnvVariable() { WorkflowDef workflowDef = new WorkflowDef();//name is null - workflowDef.setSchemaVersion(1); + workflowDef.setSchemaVersion(2); workflowDef.setName("test_env"); WorkflowTask workflowTask_1 = new WorkflowTask(); @@ -167,7 +167,7 @@ public void testWorkflowDefConstraintsWithMultipleEnvVariable() { @Test public void testWorkflowDefConstraintsSingleEnvVariable() { WorkflowDef workflowDef = new WorkflowDef();//name is null - workflowDef.setSchemaVersion(1); + workflowDef.setSchemaVersion(2); workflowDef.setName("test_env"); WorkflowTask workflowTask_1 = new WorkflowTask(); @@ -194,7 +194,7 @@ public void testWorkflowDefConstraintsSingleEnvVariable() { @Test public void testWorkflowDefConstraintsDualEnvVariable() { WorkflowDef workflowDef = new WorkflowDef();//name is null - workflowDef.setSchemaVersion(1); + workflowDef.setSchemaVersion(2); workflowDef.setName("test_env"); WorkflowTask workflowTask_1 = new WorkflowTask(); @@ -223,7 +223,7 @@ public void testWorkflowDefConstraintsDualEnvVariable() { @Test public void testWorkflowDefConstraintsWithMapAsInputParam() { WorkflowDef workflowDef = new WorkflowDef();//name is null - workflowDef.setSchemaVersion(1); + workflowDef.setSchemaVersion(2); workflowDef.setName("test_env"); WorkflowTask workflowTask_1 = new WorkflowTask(); @@ -311,4 +311,33 @@ public void testWorkflowTaskInputParamValueInvalid() { assertTrue(validationErrors.contains("key: blabla input parameter value: is null or empty")); } + + @Test + public void testWorkflowSchemaVersion1() { + WorkflowDef workflowDef = new WorkflowDef();//name is null + workflowDef.setSchemaVersion(3); + workflowDef.setName("test_env"); + + WorkflowTask workflowTask = new WorkflowTask(); + + workflowTask.setName("t1"); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); + workflowTask.setTaskReferenceName("t1"); + + Map map = new HashMap<>(); + map.put("blabla", ""); + workflowTask.setInputParameters(map); + + workflowDef.getTasks().add(workflowTask); + + ValidatorFactory factory = Validation.buildDefaultValidatorFactory(); + Validator validator = factory.getValidator(); + Set> result = validator.validate(workflowDef); + assertEquals(2, result.size()); + + List validationErrors = new ArrayList<>(); + result.forEach(e -> validationErrors.add(e.getMessage())); + + assertTrue(validationErrors.contains("workflowDef schemaVersion: 2 is only supported")); + } } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java index 166a100d46..20a5064e2b 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java @@ -44,9 +44,9 @@ public void setUp() { parametersUtils = new ParametersUtils(); ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - ip1.put("case", "workflow.input.case"); + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + ip1.put("case", "${workflow.input.case}"); task1 = new WorkflowTask(); task1.setName("Test1"); @@ -152,8 +152,8 @@ public void getEvaluatedCaseValue() { Workflow workflowInstance = new Workflow(); workflowInstance.setWorkflowDefinition(new WorkflowDef()); Map workflowInput = new HashMap<>(); - workflowInput.put("p1", "workflow.input.param1"); - workflowInput.put("p2", "workflow.input.param2"); + workflowInput.put("param1", "test1"); + workflowInput.put("param2", "test2"); workflowInput.put("case", "0"); workflowInstance.setInput(workflowInput); diff --git a/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java b/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java index 6ffd643b3b..6d1dca5119 100644 --- a/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java +++ b/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java @@ -267,7 +267,7 @@ public void testRegisterWorkflowDefInvalidName() { public void testRegisterWorkflowDef() { WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName("somename"); - workflowDef.setSchemaVersion(5); + workflowDef.setSchemaVersion(2); List tasks = new ArrayList<>(); WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setTaskReferenceName("hello"); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java index 7f1fcbb293..cff7a0d42d 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java @@ -148,7 +148,6 @@ public void init() { return; } - WorkflowContext.set(new WorkflowContext("junit_app")); for (int i = 0; i < 21; i++) { @@ -286,7 +285,6 @@ public void testWorkflowWithNoTasks() { @Test public void testTaskDefTemplate() throws Exception { - System.setProperty("STACK2", "test_stack"); TaskDef templatedTask = new TaskDef(); templatedTask.setName("templated_task"); @@ -342,7 +340,6 @@ public void testTaskDefTemplate() throws Exception { assertEquals(expected, om.writeValueAsString(taskInput)); } - @Test public void testWorkflowSchemaVersion() { WorkflowDef ver2 = new WorkflowDef(); @@ -361,8 +358,7 @@ public void testWorkflowSchemaVersion() { assertEquals(2, found.getSchemaVersion()); WorkflowDef found1 = metadataService.getWorkflowDef(ver1.getName(), 1); - assertEquals(1, found1.getSchemaVersion()); - + assertEquals(2, found1.getSchemaVersion()); } @Test @@ -432,7 +428,6 @@ public void testForkJoin() throws Exception { } catch (Exception e) { throw new RuntimeException(e); } - }); future1.get(); @@ -470,9 +465,9 @@ public void testForkJoin() throws Exception { } @Test - public void testForkJoinNested() { + public void testForkJoinNestedSchemaVersion1() { + createForkJoinNestedWorkflow(1); - createForkJoinNestedWorkflow(); Map input = new HashMap<>(); input.put("case", "a"); //This should execute t16 and t19 @@ -567,10 +562,106 @@ public void testForkJoinNested() { } + @Test + public void testForkJoinNestedSchemaVersion2() { + createForkJoinNestedWorkflow(2); + + Map input = new HashMap<>(); + input.put("case", "a"); //This should execute t16 and t19 + String wfid = startOrLoadWorkflowExecution("forkJoinNested", FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input, null, null); + System.out.println("testForkJoinNested.wfid=" + wfid); + + Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(RUNNING, wf.getStatus()); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); + + + Task t1 = workflowExecutionService.poll("junit_task_11", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); + + Task t2 = workflowExecutionService.poll("junit_task_12", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + + Task t3 = workflowExecutionService.poll("junit_task_13", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); + + assertNotNull(t1); + assertNotNull(t2); + assertNotNull(t3); + + t1.setStatus(COMPLETED); + t2.setStatus(COMPLETED); + t3.setStatus(COMPLETED); + + workflowExecutionService.updateTask(t1); + workflowExecutionService.updateTask(t2); + workflowExecutionService.updateTask(t3); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); + + String[] tasks = new String[]{"junit_task_14", "junit_task_16"}; + for (String tt : tasks) { + Task polled = workflowExecutionService.poll(tt, "test"); + assertNotNull("poll resulted empty for task: " + tt, polled); + polled.setStatus(COMPLETED); + workflowExecutionService.updateTask(polled); + } + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(RUNNING, wf.getStatus()); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet + + Task task19 = workflowExecutionService.poll("junit_task_19", "test"); + assertNotNull(task19); + task19.setStatus(COMPLETED); + workflowExecutionService.updateTask(task19); + + Task task20 = workflowExecutionService.poll("junit_task_20", "test"); + assertNotNull(task20); + task20.setStatus(COMPLETED); + workflowExecutionService.updateTask(task20); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(RUNNING, wf.getStatus()); + + Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); + assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); + + pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); + assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); + + Task task15 = workflowExecutionService.poll("junit_task_15", "test"); + assertNotNull(task15); + task15.setStatus(COMPLETED); + workflowExecutionService.updateTask(task15); + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); + } + @Test public void testForkJoinNestedWithSubWorkflow() { - createForkJoinNestedWorkflowWithSubworkflow(); + createForkJoinNestedWorkflowWithSubworkflow(1); Map input = new HashMap<>(); input.put("case", "a"); //This should execute t16 and t19 @@ -722,9 +813,8 @@ public void testForkJoinFailure() { @SuppressWarnings("unchecked") @Test public void testDynamicForkJoinLegacy() { - try { - createDynamicForkJoinWorkflowDefsLegacy(); + createDynamicForkJoinWorkflowDefsLegacy(1); } catch (Exception e) { } @@ -915,7 +1005,6 @@ public void testDynamicForkJoin() { } private void createForkJoinWorkflow() { - WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName(FORK_JOIN_WF); workflowDef.setDescription(workflowDef.getName()); @@ -968,7 +1057,6 @@ private void createForkJoinWorkflow() { private void createForkJoinWorkflowWithZeroRetry() { - WorkflowDef def = new WorkflowDef(); def.setName(FORK_JOIN_WF + "_2"); def.setDescription(def.getName()); @@ -1017,24 +1105,29 @@ private void createForkJoinWorkflowWithZeroRetry() { def.getTasks().add(join); def.getTasks().add(wft4); metadataService.updateWorkflowDef(def); - } - private void createForkJoinNestedWorkflow() { - + private void createForkJoinNestedWorkflow(int schemaVersion) { WorkflowDef def = new WorkflowDef(); def.setName(FORK_JOIN_NESTED_WF); def.setDescription(def.getName()); def.setVersion(1); + def.setSchemaVersion(schemaVersion); def.setInputParameters(Arrays.asList("param1", "param2")); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - ip1.put("case", "workflow.input.case"); - WorkflowTask[] tasks = new WorkflowTask[21]; + Map ip1 = new HashMap<>(); + if (schemaVersion <= 1) { + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + ip1.put("case", "workflow.input.case"); + } else { + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + ip1.put("case", "${workflow.input.case}"); + } + for (int i = 10; i < 21; i++) { WorkflowTask wft = new WorkflowTask(); wft.setName("junit_task_" + i); @@ -1085,11 +1178,11 @@ private void createForkJoinNestedWorkflow() { metadataService.updateWorkflowDef(def); } - private void createForkJoinNestedWorkflowWithSubworkflow() { - + private void createForkJoinNestedWorkflowWithSubworkflow(int schemaVersion) { WorkflowDef def = new WorkflowDef(); def.setName(FORK_JOIN_NESTED_WF); def.setDescription(def.getName()); + def.setSchemaVersion(1); def.setVersion(1); def.setInputParameters(Arrays.asList("param1", "param2")); @@ -1157,8 +1250,6 @@ private void createForkJoinNestedWorkflowWithSubworkflow() { def.getTasks().add(tasks[15]); metadataService.updateWorkflowDef(def); - - } private void createDynamicForkJoinWorkflowDefs() { @@ -1167,13 +1258,14 @@ private void createDynamicForkJoinWorkflowDefs() { def.setName(DYNAMIC_FORK_JOIN_WF); def.setDescription(def.getName()); def.setVersion(1); + def.setSchemaVersion(2); def.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask workflowTask1 = new WorkflowTask(); workflowTask1.setName("junit_task_1"); Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); workflowTask1.setInputParameters(ip1); workflowTask1.setTaskReferenceName("dt1"); @@ -1182,8 +1274,8 @@ private void createDynamicForkJoinWorkflowDefs() { fanout.setTaskReferenceName("dynamicfanouttask"); fanout.setDynamicForkTasksParam("dynamicTasks"); fanout.setDynamicForkTasksInputParamName("dynamicTasksInput"); - fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); - fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); + fanout.getInputParameters().put("dynamicTasks", "${dt1.output.dynamicTasks}"); + fanout.getInputParameters().put("dynamicTasksInput", "${dt1.output.dynamicTasksInput}"); WorkflowTask join = new WorkflowTask(); join.setType(TaskType.JOIN.name()); @@ -1204,19 +1296,25 @@ private void createDynamicForkJoinWorkflowDefs() { } @SuppressWarnings("deprecation") - private void createDynamicForkJoinWorkflowDefsLegacy() { - + private void createDynamicForkJoinWorkflowDefsLegacy(int schemaVersion) { WorkflowDef def = new WorkflowDef(); def.setName(DYNAMIC_FORK_JOIN_WF_LEGACY); def.setDescription(def.getName()); def.setVersion(1); + def.setSchemaVersion(schemaVersion); def.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask wft1 = new WorkflowTask(); wft1.setName("junit_task_1"); Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); + if (schemaVersion <= 1) { + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + } else { + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + } + wft1.setInputParameters(ip1); wft1.setTaskReferenceName("dt1"); @@ -1224,9 +1322,13 @@ private void createDynamicForkJoinWorkflowDefsLegacy() { fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); fanout.setTaskReferenceName("dynamicfanouttask"); fanout.setDynamicForkJoinTasksParam("dynamicTasks"); - fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); - fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - + if (schemaVersion <= 1) { + fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); + fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); + } else { + fanout.getInputParameters().put("dynamicTasks", "${dt1.output.dynamicTasks}"); + fanout.getInputParameters().put("dynamicTasksInput", "${dt1.output.dynamicTasksInput}"); + } WorkflowTask join = new WorkflowTask(); join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("dynamicfanouttask_join"); @@ -1241,20 +1343,30 @@ private void createDynamicForkJoinWorkflowDefsLegacy() { } - private void createConditionalWF() { - + private void createConditionalWF(int schemaVersion) { WorkflowTask wft1 = new WorkflowTask(); wft1.setName("junit_task_1"); Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); + + if (schemaVersion <= 1) { + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + } else { + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + } + wft1.setInputParameters(ip1); wft1.setTaskReferenceName("t1"); WorkflowTask wft2 = new WorkflowTask(); wft2.setName("junit_task_2"); Map ip2 = new HashMap<>(); - ip2.put("tp1", "workflow.input.param1"); + if (schemaVersion <= 1) { + ip2.put("tp1", "workflow.input.param1"); + } else { + ip2.put("tp1", "${workflow.input.param1}"); + } wft2.setInputParameters(ip2); wft2.setTaskReferenceName("t2"); @@ -1279,10 +1391,21 @@ private void createConditionalWF() { dc.put("one", Arrays.asList(wft1, wft3)); dc.put("two", Arrays.asList(wft2)); c2.setDecisionCases(dc); - c2.getInputParameters().put("case", "workflow.input.param2"); - WorkflowTask condition = new WorkflowTask(); + Map fi = new HashMap<>(); + + if (schemaVersion <= 1) { + condition.getInputParameters().put("case", "workflow.input.param1"); + c2.getInputParameters().put("case", "workflow.input.param2"); + fi.put("finalCase", "workflow.input.finalCase"); + } else { + condition.getInputParameters().put("case", "${workflow.input.param1}"); + c2.getInputParameters().put("case", "${workflow.input.param2}"); + fi.put("finalCase", "${workflow.input.finalCase}"); + } + + condition.setType(TaskType.DECISION.name()); condition.setCaseValueParam("case"); condition.setName("conditional"); @@ -1291,7 +1414,6 @@ private void createConditionalWF() { decisionCases.put("nested", Arrays.asList(c2)); decisionCases.put("three", Arrays.asList(wft3)); condition.setDecisionCases(decisionCases); - condition.getInputParameters().put("case", "workflow.input.param1"); condition.getDefaultCase().add(wft2); def2.getTasks().add(condition); @@ -1304,14 +1426,12 @@ private void createConditionalWF() { finalTask.setTaskReferenceName("tf"); finalTask.setType(TaskType.DECISION.name()); finalTask.setCaseValueParam("finalCase"); - Map fi = new HashMap<>(); - fi.put("finalCase", "workflow.input.finalCase"); finalTask.setInputParameters(fi); finalTask.getDecisionCases().put("notify", Arrays.asList(notifyTask)); + def2.setSchemaVersion(schemaVersion); def2.getTasks().add(finalTask); metadataService.updateWorkflowDef(def2); - } @@ -2197,15 +2317,14 @@ public void testConcurrentWorkflowExecutions() { } @Test - public void testCaseStatements() { - createConditionalWF(); + public void testCaseStatementsSchemaVersion1() { + createConditionalWF(1); String correlationId = "testCaseStatements: " + System.currentTimeMillis(); Map input = new HashMap(); String wfid; String[] sequence; - //default case input.put("param1", "xxx"); input.put("param2", "two"); @@ -2225,8 +2344,66 @@ public void testCaseStatements() { assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); assertEquals(3, es.getTasks().size()); - /// + //nested - one + input.put("param1", "nested"); + input.put("param2", "one"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF + 2, COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(RUNNING, es.getStatus()); + sequence = new String[]{"junit_task_1", "junit_task_3"}; + + validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_1", "junit_task_3", SystemTaskType.DECISION.name()}, 5); + + //nested - two + input.put("param1", "nested"); + input.put("param2", "two"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF + 3, COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + sequence = new String[]{"junit_task_2"}; + validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_2", SystemTaskType.DECISION.name()}, 4); + + //three + input.put("param1", "three"); + input.put("param2", "two"); + input.put("finalCase", "notify"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF + 4, COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + sequence = new String[]{"junit_task_3", "junit_task_4"}; + validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), "junit_task_3", SystemTaskType.DECISION.name(), "junit_task_4"}, 3); + } + @Test + public void testCaseStatementsSchemaVersion2() { + createConditionalWF(2); + + String correlationId = "testCaseStatements: " + System.currentTimeMillis(); + Map input = new HashMap(); + String wfid; + String[] sequence; + + //default case + input.put("param1", "xxx"); + input.put("param2", "two"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(RUNNING, es.getStatus()); + Task task = workflowExecutionService.poll("junit_task_2", "junit"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + assertEquals(3, es.getTasks().size()); //nested - one input.put("param1", "nested"); @@ -2240,7 +2417,6 @@ public void testCaseStatements() { sequence = new String[]{"junit_task_1", "junit_task_3"}; validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_1", "junit_task_3", SystemTaskType.DECISION.name()}, 5); - // //nested - two input.put("param1", "nested"); @@ -2250,7 +2426,6 @@ public void testCaseStatements() { assertNotNull(wfid); sequence = new String[]{"junit_task_2"}; validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_2", SystemTaskType.DECISION.name()}, 4); - // //three input.put("param1", "three"); @@ -2261,8 +2436,6 @@ public void testCaseStatements() { assertNotNull(wfid); sequence = new String[]{"junit_task_3", "junit_task_4"}; validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), "junit_task_3", SystemTaskType.DECISION.name(), "junit_task_4"}, 3); - // - } private void validate(String wfid, String[] sequence, String[] executedTasks, int expectedTotalTasks) {