diff --git a/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java b/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java index 080fbe44e6..fc79d4831d 100644 --- a/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java +++ b/json-jq-task/src/main/java/com/netflix/conductor/tasks/json/JsonJqTransform.java @@ -46,6 +46,7 @@ public class JsonJqTransform extends WorkflowSystemTask { private static final String OUTPUT_RESULT_LIST = "resultList"; private static final String OUTPUT_ERROR = "error"; private static final TypeReference> mapType = new TypeReference<>() {}; + private final TypeReference> listType = new TypeReference<>() {}; private final Scope rootScope; private final ObjectMapper objectMapper; private final LoadingCache queryCache = createQueryCache(); @@ -125,8 +126,18 @@ private String extractFirstValidMessage(final Exception e) { private Object extractBody(JsonNode node) { if (node.isObject()) { return objectMapper.convertValue(node, mapType); + } else if (node.isArray()) { + return objectMapper.convertValue(node, listType); + } else if (node.isBoolean()) { + return node.asBoolean(); + } else if (node.isNumber()) { + if (node.isIntegralNumber()) { + return node.asLong(); + } else { + return node.asDouble(); + } } else { - return node; + return node.asText(); } } } diff --git a/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java b/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java index 6610851cac..3f12d48ec6 100644 --- a/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java +++ b/json-jq-task/src/test/java/com/netflix/conductor/tasks/json/JsonJqTransformTest.java @@ -12,8 +12,10 @@ */ package com.netflix.conductor.tasks.json; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import org.junit.Test; @@ -22,6 +24,7 @@ import com.netflix.conductor.model.TaskModel; import com.netflix.conductor.model.WorkflowModel; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import static org.junit.Assert.*; @@ -46,7 +49,7 @@ public void dataShouldBeCorrectlySelected() { jsonJqTransform.start(workflow, task, null); assertNull(task.getOutputData().get("error")); - assertEquals("\"VALUE\"", task.getOutputData().get("result").toString()); + assertEquals("VALUE", task.getOutputData().get("result").toString()); assertEquals("[\"VALUE\"]", task.getOutputData().get("resultList").toString()); } @@ -124,4 +127,47 @@ public void mapResultShouldBeCorrectlyExtracted() { assertEquals( "{result: \"reply: \" + .response.body.message}", result.get("responseTransform")); } + + @Test + public void stringResultShouldBeCorrectlyExtracted() { + final JsonJqTransform jsonJqTransform = new JsonJqTransform(objectMapper); + final WorkflowModel workflow = new WorkflowModel(); + final TaskModel task = new TaskModel(); + final Map taskInput = new HashMap<>(); + taskInput.put("data", new ArrayList<>()); + taskInput.put( + "queryExpression", "if(.data | length >0) then \"EXISTS\" else \"CREATE\" end"); + + task.setInputData(taskInput); + + jsonJqTransform.start(workflow, task, null); + + assertNull(task.getOutputData().get("error")); + assertTrue(task.getOutputData().get("result") instanceof String); + String result = (String) task.getOutputData().get("result"); + assertEquals("CREATE", result); + } + + @Test + public void listResultShouldBeCorrectlyExtracted() throws JsonProcessingException { + final JsonJqTransform jsonJqTransform = new JsonJqTransform(objectMapper); + final WorkflowModel workflow = new WorkflowModel(); + final TaskModel task = new TaskModel(); + String json = + "{ \"request\": { \"transitions\": [ { \"name\": \"redeliver\" }, { \"name\": \"redeliver_from_validation_error\" }, { \"name\": \"redelivery\" } ] } }"; + Map inputData = objectMapper.readValue(json, Map.class); + + final Map taskInput = new HashMap<>(); + taskInput.put("inputData", inputData); + taskInput.put("queryExpression", ".inputData.request.transitions | map(.name)"); + + task.setInputData(taskInput); + + jsonJqTransform.start(workflow, task, null); + + assertNull(task.getOutputData().get("error")); + assertTrue(task.getOutputData().get("result") instanceof List); + List result = (List) task.getOutputData().get("result"); + assertEquals(3, result.size()); + } } diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy index 1139b1cbd7..47b98d6244 100644 --- a/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy +++ b/test-harness/src/test/groovy/com/netflix/conductor/test/integration/JsonJQTransformSpec.groovy @@ -27,10 +27,14 @@ class JsonJQTransformSpec extends AbstractSpecification { @Shared def SEQUENTIAL_JSON_JQ_TRANSFORM_WF = 'sequential_json_jq_transform_wf' + @Shared + def JSON_JQ_TRANSFORM_RESULT_WF = 'json_jq_transform_result_wf' + def setup() { workflowTestUtil.registerWorkflows( 'simple_json_jq_transform_integration_test.json', - 'sequential_json_jq_transform_integration_test.json' + 'sequential_json_jq_transform_integration_test.json', + 'json_jq_transform_result_integration_test.json' ) } @@ -187,4 +191,39 @@ class JsonJQTransformSpec extends AbstractSpecification { result2.get("name") == "Beary Beariston you are the Brown Bear" } } + + def "Test json jq transform task with different json object results succeeds"() { + given: "workflow input" + def workflowInput = new HashMap() + workflowInput["requestedAction"] = "redeliver" + + when: "workflow which has the json jq transform task has started" + def workflowInstanceId = workflowExecutor.startWorkflow(JSON_JQ_TRANSFORM_RESULT_WF, 1, + '', workflowInput, null, null, null) + + then: "verify that the workflow and task are completed with expected output" + with(workflowExecutionService.getExecutionStatus(workflowInstanceId, true)) { + status == Workflow.WorkflowStatus.COMPLETED + tasks.size() == 4 + tasks[0].status == Task.Status.COMPLETED + tasks[0].taskType == 'JSON_JQ_TRANSFORM' + tasks[0].outputData.containsKey("result") && tasks[0].outputData.containsKey("resultList") + assert tasks[0].outputData.get("result") == "CREATE" + + tasks[1].status == Task.Status.COMPLETED + tasks[1].taskType == 'DECISION' + assert tasks[1].inputData.get("case") == "CREATE" + + tasks[2].status == Task.Status.COMPLETED + tasks[2].taskType == 'JSON_JQ_TRANSFORM' + tasks[2].outputData.containsKey("result") && tasks[0].outputData.containsKey("resultList") + List result = (List) tasks[2].outputData.get("result") + assert result.size() == 3 + assert result.indexOf("redeliver") >= 0 + + tasks[3].status == Task.Status.COMPLETED + tasks[3].taskType == 'DECISION' + assert tasks[3].inputData.get("case") == "true" + } + } } diff --git a/test-harness/src/test/resources/json_jq_transform_result_integration_test.json b/test-harness/src/test/resources/json_jq_transform_result_integration_test.json new file mode 100644 index 0000000000..89c349fd01 --- /dev/null +++ b/test-harness/src/test/resources/json_jq_transform_result_integration_test.json @@ -0,0 +1,95 @@ +{ + "name": "json_jq_transform_result_wf", + "version": 1, + "tasks": [ + { + "name": "json_jq_1", + "taskReferenceName": "json_jq_1", + "description": "json_jq_1", + "inputParameters": { + "data": [], + "queryExpression": "if(.data | length >0) then \"EXISTS\" else \"CREATE\" end" + }, + "type": "JSON_JQ_TRANSFORM", + "startDelay": 0, + "optional": false, + "asyncComplete": false + }, + { + "name": "decide_1", + "taskReferenceName": "decide_1", + "inputParameters": { + "outcome": "${json_jq_1.output.result}" + }, + "type": "DECISION", + "caseValueParam": "outcome", + "decisionCases": { + "CREATE": [ + { + "name": "json_jq_2", + "taskReferenceName": "json_jq_2", + "description": "json_jq_2", + "inputParameters": { + "inputData": { + "request": { + "transitions": [ + { + "name": "redeliver" + }, + { + "name": "redeliver_from_validation_error" + }, + { + "name": "redelivery" + } + ] + } + }, + "queryExpression": ".inputData.request.transitions | map(.name)" + }, + "type": "JSON_JQ_TRANSFORM", + "startDelay": 0, + "optional": false, + "asyncComplete": false + }, + { + "name": "decide_2", + "taskReferenceName": "decide_2", + "inputParameters": { + "requestedAction": "${workflow.input.requestedAction}", + "availableActions": "${json_jq_2.output.result}" + }, + "type": "DECISION", + "caseExpression": "if ($.availableActions.indexOf($.requestedAction) >= 0) { \"true\" } else { \"false\" }", + "decisionCases": { + "false": [ + { + "name": "get_population_data", + "taskReferenceName": "get_population_data", + "inputParameters": { + "http_request": { + "uri": "https://datausa.io/api/data?drilldowns=Nation&measures=Population", + "method": "GET" + } + }, + "type": "HTTP", + "startDelay": 0, + "optional": false, + "asyncComplete": false + } + ] + } + } + ] + } + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" +} \ No newline at end of file