getBulkErrorResults() {
return bulkErrorResults;
}
- public void appendSuccessResponse(String id) {
- bulkSuccessfulResults.add(id);
+ public void appendSuccessResponse(T result) {
+ bulkSuccessfulResults.add(result);
}
public void appendFailedResponse(String id, String errorMessage) {
@@ -56,10 +58,9 @@ public boolean equals(Object o) {
if (this == o) {
return true;
}
- if (!(o instanceof BulkResponse)) {
+ if (!(o instanceof BulkResponse that)) {
return false;
}
- BulkResponse that = (BulkResponse) o;
return Objects.equals(bulkSuccessfulResults, that.bulkSuccessfulResults)
&& Objects.equals(bulkErrorResults, that.bulkErrorResults);
}
diff --git a/common/src/test/java/com/netflix/conductor/common/constraints/NameValidatorTest.java b/common/src/test/java/com/netflix/conductor/common/constraints/NameValidatorTest.java
new file mode 100644
index 000000000..2fc196864
--- /dev/null
+++ b/common/src/test/java/com/netflix/conductor/common/constraints/NameValidatorTest.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2024 Conductor Authors.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
+ * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under the License.
+ */
+package com.netflix.conductor.common.constraints;
+
+import org.junit.Test;
+import org.springframework.test.util.ReflectionTestUtils;
+
+import jakarta.validation.ConstraintValidatorContext;
+
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.mockito.ArgumentMatchers.anyString;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class NameValidatorTest {
+ @Test
+ public void nameWithAllowedCharactersIsValid() {
+ ValidNameConstraint.NameValidator nameValidator = new ValidNameConstraint.NameValidator();
+ assertTrue(nameValidator.isValid("workflowDef", null));
+ }
+
+ @Test
+ public void nonAllowedCharactersInNameIsInvalid() {
+ ValidNameConstraint.NameValidator nameValidator = new ValidNameConstraint.NameValidator();
+ ConstraintValidatorContext context = mock(ConstraintValidatorContext.class);
+ ConstraintValidatorContext.ConstraintViolationBuilder builder =
+ mock(ConstraintValidatorContext.ConstraintViolationBuilder.class);
+ when(context.buildConstraintViolationWithTemplate(anyString())).thenReturn(builder);
+
+ ReflectionTestUtils.setField(nameValidator, "nameValidationEnabled", true);
+
+ assertFalse(nameValidator.isValid("workflowDef@", context));
+ }
+
+ // Null should be tested by @NotEmpty or @NotNull
+ @Test
+ public void nullIsValid() {
+ ValidNameConstraint.NameValidator nameValidator = new ValidNameConstraint.NameValidator();
+ assertTrue(nameValidator.isValid(null, null));
+ }
+}
diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java b/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java
index 132e33d99..c2b36e688 100644
--- a/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java
+++ b/common/src/test/java/com/netflix/conductor/common/workflow/WorkflowDefValidatorTest.java
@@ -20,6 +20,7 @@
import org.junit.Before;
import org.junit.Test;
+import org.springframework.test.context.TestPropertySource;
import com.netflix.conductor.common.metadata.tasks.TaskType;
import com.netflix.conductor.common.metadata.workflow.WorkflowDef;
@@ -33,6 +34,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+@TestPropertySource(properties = "conductor.app.workflow.name-validation.enabled=true")
public class WorkflowDefValidatorTest {
@Before
diff --git a/common/src/test/resources/application.properties b/common/src/test/resources/application.properties
new file mode 100644
index 000000000..7c95d0a4d
--- /dev/null
+++ b/common/src/test/resources/application.properties
@@ -0,0 +1 @@
+conductor.app.workflow.name-validation.enabled=true
diff --git a/conductor-clients/java/conductor-java-sdk/README.md b/conductor-clients/java/conductor-java-sdk/README.md
index 024da8a8c..a6a18ceae 100644
--- a/conductor-clients/java/conductor-java-sdk/README.md
+++ b/conductor-clients/java/conductor-java-sdk/README.md
@@ -31,7 +31,7 @@ For insights into the Conductor project's future plans and upcoming features, ch
We are building this based on feedback from our users and community.
-We encourage everyone to share their thoughts and feedback! You can create new GitHub issues or comment on existing ones. You can also join our [Slack community](https://orkes-conductor.slack.com/) to connect with us.
+We encourage everyone to share their thoughts and feedback! You can create new GitHub issues or comment on existing ones. You can also join our [Slack community](https://join.slack.com/t/orkes-conductor/shared_invite/zt-2vdbx239s-Eacdyqya9giNLHfrCavfaA) to connect with us.
Thank you! ♥
diff --git a/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/client/http/ConductorClient.java b/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/client/http/ConductorClient.java
index 9cf7a7d21..127a55ae2 100644
--- a/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/client/http/ConductorClient.java
+++ b/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/client/http/ConductorClient.java
@@ -101,6 +101,10 @@ protected ConductorClient(Builder> builder) {
okHttpBuilder.writeTimeout(builder.writeTimeout, TimeUnit.MILLISECONDS);
}
+ if (builder.callTimeout > -1) {
+ okHttpBuilder.callTimeout(builder.callTimeout, TimeUnit.MILLISECONDS);
+ }
+
if (builder.proxy != null) {
okHttpBuilder.proxy(builder.proxy);
}
@@ -442,6 +446,7 @@ public static class Builder> {
private long connectTimeout = -1;
private long readTimeout = -1;
private long writeTimeout = -1;
+ private long callTimeout = -1;
private Proxy proxy;
private ConnectionPoolConfig connectionPoolConfig;
private Supplier objectMapperSupplier = () -> new ObjectMapperProvider().getObjectMapper();
@@ -487,6 +492,11 @@ public T writeTimeout(long writeTimeout) {
return self();
}
+ public T callTimeout(long callTimeout) {
+ this.callTimeout = callTimeout;
+ return self();
+ }
+
public T proxy(Proxy proxy) {
this.proxy = proxy;
return self();
diff --git a/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/common/model/BulkResponse.java b/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/common/model/BulkResponse.java
index 99b3f48f9..d27d9dc9e 100644
--- a/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/common/model/BulkResponse.java
+++ b/conductor-clients/java/conductor-java-sdk/conductor-client/src/main/java/com/netflix/conductor/common/model/BulkResponse.java
@@ -21,15 +21,17 @@
/**
* Response object to return a list of succeeded entities and a map of failed ones, including error
* message, for the bulk request.
+ *
+ * @param the type of entities included in the successful results
*/
-public class BulkResponse {
+public class BulkResponse {
/**
* Key - entityId Value - error message processing this entity
*/
private final Map bulkErrorResults;
- private final List bulkSuccessfulResults;
+ private final List bulkSuccessfulResults;
private final String message = "Bulk Request has been processed.";
@@ -38,7 +40,7 @@ public BulkResponse() {
this.bulkErrorResults = new HashMap<>();
}
- public List getBulkSuccessfulResults() {
+ public List getBulkSuccessfulResults() {
return bulkSuccessfulResults;
}
@@ -46,8 +48,8 @@ public Map getBulkErrorResults() {
return bulkErrorResults;
}
- public void appendSuccessResponse(String id) {
- bulkSuccessfulResults.add(id);
+ public void appendSuccessResponse(T result) {
+ bulkSuccessfulResults.add(result);
}
public void appendFailedResponse(String id, String errorMessage) {
diff --git a/conductor-clients/java/conductor-java-sdk/orkes-client/src/main/java/io/orkes/conductor/client/http/WorkflowBulkResource.java b/conductor-clients/java/conductor-java-sdk/orkes-client/src/main/java/io/orkes/conductor/client/http/WorkflowBulkResource.java
index 31db8c05a..56c3e92a5 100644
--- a/conductor-clients/java/conductor-java-sdk/orkes-client/src/main/java/io/orkes/conductor/client/http/WorkflowBulkResource.java
+++ b/conductor-clients/java/conductor-java-sdk/orkes-client/src/main/java/io/orkes/conductor/client/http/WorkflowBulkResource.java
@@ -31,7 +31,7 @@ class WorkflowBulkResource {
this.client = client;
}
- BulkResponse pauseWorkflows(List workflowIds) {
+ BulkResponse pauseWorkflows(List workflowIds) {
ConductorClientRequest request = ConductorClientRequest.builder()
.method(Method.PUT)
.path("/workflow/bulk/pause")
@@ -44,7 +44,7 @@ BulkResponse pauseWorkflows(List workflowIds) {
return resp.getData();
}
- BulkResponse restartWorkflows(List workflowIds, Boolean useLatestDefinitions) {
+ BulkResponse restartWorkflows(List workflowIds, Boolean useLatestDefinitions) {
ConductorClientRequest request = ConductorClientRequest.builder()
.method(Method.POST)
.path("/workflow/bulk/restart")
@@ -58,7 +58,7 @@ BulkResponse restartWorkflows(List workflowIds, Boolean useLatestDefinit
return resp.getData();
}
- BulkResponse resumeWorkflows(List workflowIds) {
+ BulkResponse resumeWorkflows(List workflowIds) {
ConductorClientRequest request = ConductorClientRequest.builder()
.method(Method.PUT)
.path("/workflow/bulk/resume")
@@ -71,7 +71,7 @@ BulkResponse resumeWorkflows(List workflowIds) {
return resp.getData();
}
- BulkResponse retryWorkflows(List workflowIds) {
+ BulkResponse retryWorkflows(List workflowIds) {
ConductorClientRequest request = ConductorClientRequest.builder()
.method(Method.POST)
.path("/workflow/bulk/retry")
@@ -84,7 +84,7 @@ BulkResponse retryWorkflows(List workflowIds) {
return resp.getData();
}
- public BulkResponse terminateWorkflows(List workflowIds, String reason, boolean triggerFailureWorkflow) {
+ public BulkResponse terminateWorkflows(List workflowIds, String reason, boolean triggerFailureWorkflow) {
ConductorClientRequest request = ConductorClientRequest.builder()
.method(Method.POST)
.path("/workflow/bulk/terminate")
diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java
index 5edd42ef5..413066689 100644
--- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java
+++ b/core/src/main/java/com/netflix/conductor/service/MetadataService.java
@@ -69,7 +69,7 @@ void registerTaskDef(
/**
* @param workflowDefList Workflow definitions to be updated.
*/
- BulkResponse updateWorkflowDef(
+ BulkResponse updateWorkflowDef(
@NotNull(message = "WorkflowDef list name cannot be null or empty")
@Size(min = 1, message = "WorkflowDefList is empty")
List<@NotNull(message = "WorkflowDef cannot be null") @Valid WorkflowDef>
diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java
index 1399e9f08..2e9af4acb 100644
--- a/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java
+++ b/core/src/main/java/com/netflix/conductor/service/MetadataServiceImpl.java
@@ -124,8 +124,8 @@ public void updateWorkflowDef(WorkflowDef workflowDef) {
/**
* @param workflowDefList Workflow definitions to be updated.
*/
- public BulkResponse updateWorkflowDef(List workflowDefList) {
- BulkResponse bulkResponse = new BulkResponse();
+ public BulkResponse updateWorkflowDef(List workflowDefList) {
+ BulkResponse bulkResponse = new BulkResponse<>();
for (WorkflowDef workflowDef : workflowDefList) {
try {
updateWorkflowDef(workflowDef);
diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java b/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java
index ca240b1a9..a82a8d5f2 100644
--- a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java
+++ b/core/src/main/java/com/netflix/conductor/service/WorkflowBulkService.java
@@ -17,6 +17,7 @@
import org.springframework.validation.annotation.Validated;
import com.netflix.conductor.common.model.BulkResponse;
+import com.netflix.conductor.model.WorkflowModel;
import jakarta.validation.constraints.NotEmpty;
import jakarta.validation.constraints.Size;
@@ -26,7 +27,7 @@ public interface WorkflowBulkService {
int MAX_REQUEST_ITEMS = 1000;
- BulkResponse pauseWorkflow(
+ BulkResponse pauseWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -34,7 +35,7 @@ BulkResponse pauseWorkflow(
"Cannot process more than {max} workflows. Please use multiple requests.")
List workflowIds);
- BulkResponse resumeWorkflow(
+ BulkResponse resumeWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -42,7 +43,7 @@ BulkResponse resumeWorkflow(
"Cannot process more than {max} workflows. Please use multiple requests.")
List workflowIds);
- BulkResponse restart(
+ BulkResponse restart(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -51,7 +52,7 @@ BulkResponse restart(
List workflowIds,
boolean useLatestDefinitions);
- BulkResponse retry(
+ BulkResponse retry(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -59,7 +60,7 @@ BulkResponse retry(
"Cannot process more than {max} workflows. Please use multiple requests.")
List workflowIds);
- BulkResponse terminate(
+ BulkResponse terminate(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -68,7 +69,7 @@ BulkResponse terminate(
List workflowIds,
String reason);
- BulkResponse deleteWorkflow(
+ BulkResponse deleteWorkflow(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -77,7 +78,7 @@ BulkResponse deleteWorkflow(
List workflowIds,
boolean archiveWorkflow);
- BulkResponse terminateRemove(
+ BulkResponse terminateRemove(
@NotEmpty(message = "WorkflowIds list cannot be null.")
@Size(
max = MAX_REQUEST_ITEMS,
@@ -86,4 +87,13 @@ BulkResponse terminateRemove(
List workflowIds,
String reason,
boolean archiveWorkflow);
+
+ BulkResponse searchWorkflow(
+ @NotEmpty(message = "WorkflowIds list cannot be null.")
+ @Size(
+ max = MAX_REQUEST_ITEMS,
+ message =
+ "Cannot process more than {max} workflows. Please use multiple requests.")
+ List workflowIds,
+ boolean includeTasks);
}
diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java b/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java
index fcbdbe3bc..aadacf9a1 100644
--- a/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java
+++ b/core/src/main/java/com/netflix/conductor/service/WorkflowBulkServiceImpl.java
@@ -22,6 +22,7 @@
import com.netflix.conductor.annotations.Trace;
import com.netflix.conductor.common.model.BulkResponse;
import com.netflix.conductor.core.execution.WorkflowExecutor;
+import com.netflix.conductor.model.WorkflowModel;
@Audit
@Trace
@@ -45,9 +46,9 @@ public WorkflowBulkServiceImpl(
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
- public BulkResponse pauseWorkflow(List workflowIds) {
+ public BulkResponse pauseWorkflow(List workflowIds) {
- BulkResponse bulkResponse = new BulkResponse();
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.pauseWorkflow(workflowId);
@@ -72,8 +73,8 @@ public BulkResponse pauseWorkflow(List workflowIds) {
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
- public BulkResponse resumeWorkflow(List workflowIds) {
- BulkResponse bulkResponse = new BulkResponse();
+ public BulkResponse resumeWorkflow(List workflowIds) {
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.resumeWorkflow(workflowId);
@@ -98,8 +99,8 @@ public BulkResponse resumeWorkflow(List workflowIds) {
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
- public BulkResponse restart(List workflowIds, boolean useLatestDefinitions) {
- BulkResponse bulkResponse = new BulkResponse();
+ public BulkResponse restart(List workflowIds, boolean useLatestDefinitions) {
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.restart(workflowId, useLatestDefinitions);
@@ -123,8 +124,8 @@ public BulkResponse restart(List workflowIds, boolean useLatestDefinitio
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
- public BulkResponse retry(List workflowIds) {
- BulkResponse bulkResponse = new BulkResponse();
+ public BulkResponse retry(List workflowIds) {
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.retry(workflowId, false);
@@ -150,8 +151,8 @@ public BulkResponse retry(List workflowIds) {
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
- public BulkResponse terminate(List workflowIds, String reason) {
- BulkResponse bulkResponse = new BulkResponse();
+ public BulkResponse terminate(List workflowIds, String reason) {
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.terminateWorkflow(workflowId, reason);
@@ -174,8 +175,8 @@ public BulkResponse terminate(List workflowIds, String reason) {
* @param workflowIds List of WorkflowIDs of the workflows you want to remove from system.
* @param archiveWorkflow Archives the workflow and associated tasks instead of removing them.
*/
- public BulkResponse deleteWorkflow(List workflowIds, boolean archiveWorkflow) {
- BulkResponse bulkResponse = new BulkResponse();
+ public BulkResponse deleteWorkflow(List workflowIds, boolean archiveWorkflow) {
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowService.deleteWorkflow(
@@ -203,9 +204,9 @@ public BulkResponse deleteWorkflow(List workflowIds, boolean archiveWork
* @return bulk response object containing a list of succeeded workflows and a list of failed
* ones with errors
*/
- public BulkResponse terminateRemove(
+ public BulkResponse terminateRemove(
List workflowIds, String reason, boolean archiveWorkflow) {
- BulkResponse bulkResponse = new BulkResponse();
+ BulkResponse bulkResponse = new BulkResponse<>();
for (String workflowId : workflowIds) {
try {
workflowExecutor.terminateWorkflow(workflowId, reason);
@@ -233,4 +234,32 @@ public BulkResponse terminateRemove(
}
return bulkResponse;
}
+
+ /**
+ * Fetch workflow details for given workflowIds.
+ *
+ * @param workflowIds List of workflow IDs to terminate and delete.
+ * @param includeTasks includes tasks from workflow
+ * @return bulk response object containing a list of workflow details
+ */
+ @Override
+ public BulkResponse searchWorkflow(
+ List workflowIds, boolean includeTasks) {
+ BulkResponse bulkResponse = new BulkResponse<>();
+ for (String workflowId : workflowIds) {
+ try {
+ WorkflowModel workflowModel =
+ workflowExecutor.getWorkflow(workflowId, includeTasks);
+ bulkResponse.appendSuccessResponse(workflowModel);
+ } catch (Exception e) {
+ LOGGER.error(
+ "bulk search exception, workflowId {}, message: {} ",
+ workflowId,
+ e.getMessage(),
+ e);
+ bulkResponse.appendFailedResponse(workflowId, e.getMessage());
+ }
+ }
+ return bulkResponse;
+ }
}
diff --git a/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java b/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java
index 255703be5..37ed10450 100644
--- a/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java
+++ b/core/src/test/java/com/netflix/conductor/service/MetadataServiceTest.java
@@ -21,6 +21,7 @@
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.context.annotation.Bean;
+import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import com.netflix.conductor.common.metadata.events.EventHandler;
@@ -50,6 +51,7 @@
@SuppressWarnings("SpringJavaAutowiredMembersInspection")
@RunWith(SpringRunner.class)
+@TestPropertySource(properties = "conductor.app.workflow.name-validation.enabled=true")
@EnableAutoConfiguration
public class MetadataServiceTest {
@@ -64,6 +66,7 @@ public MetadataDAO metadataDAO() {
public ConductorProperties properties() {
ConductorProperties properties = mock(ConductorProperties.class);
when(properties.isOwnerEmailMandatory()).thenReturn(true);
+
return properties;
}
@@ -336,7 +339,7 @@ public void testUpdateWorkflowDefWithCaseExpression() {
tasks.add(workflowTask);
workflowDef.setTasks(tasks);
- BulkResponse bulkResponse =
+ BulkResponse bulkResponse =
metadataService.updateWorkflowDef(Collections.singletonList(workflowDef));
}
@@ -366,7 +369,7 @@ public void testUpdateWorkflowDefWithJavscriptEvaluator() {
tasks.add(workflowTask);
workflowDef.setTasks(tasks);
- BulkResponse bulkResponse =
+ BulkResponse bulkResponse =
metadataService.updateWorkflowDef(Collections.singletonList(workflowDef));
}
@@ -415,7 +418,7 @@ public void testRegisterWorkflowDefInvalidName() {
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(
messages.contains(
- "Workflow name cannot contain the following set of characters: ':'"));
+ "Invalid name 'invalid:name'. Allowed characters are alphanumeric, underscores, spaces, hyphens, and special characters like <, >, {, }, #"));
throw ex;
}
fail("metadataService.registerWorkflowDef did not throw ConstraintViolationException !");
@@ -434,7 +437,7 @@ public void testValidateWorkflowDefInvalidName() {
assertTrue(messages.contains("WorkflowTask list cannot be empty"));
assertTrue(
messages.contains(
- "Workflow name cannot contain the following set of characters: ':'"));
+ "Invalid name 'invalid:name'. Allowed characters are alphanumeric, underscores, spaces, hyphens, and special characters like <, >, {, }, #"));
throw ex;
}
fail("metadataService.validateWorkflowDef did not throw ConstraintViolationException !");
diff --git a/kafka/src/test/groovy/com/netflix/conductor/test/integration/KafkaPublishTaskSpec.groovy b/kafka/src/test/groovy/com/netflix/conductor/test/integration/KafkaPublishTaskSpec.groovy
index 8087dd9d4..671a6f212 100644
--- a/kafka/src/test/groovy/com/netflix/conductor/test/integration/KafkaPublishTaskSpec.groovy
+++ b/kafka/src/test/groovy/com/netflix/conductor/test/integration/KafkaPublishTaskSpec.groovy
@@ -16,15 +16,15 @@ import com.fasterxml.jackson.databind.ObjectMapper
import com.netflix.conductor.common.metadata.tasks.TaskDef
import com.netflix.conductor.common.metadata.tasks.TaskResult
import com.netflix.conductor.common.metadata.tasks.TaskType
-import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest
import com.netflix.conductor.common.metadata.workflow.WorkflowDef
import com.netflix.conductor.common.metadata.workflow.WorkflowTask
import com.netflix.conductor.common.run.Workflow
-import com.netflix.conductor.core.execution.StartWorkflowInput
import com.netflix.conductor.test.base.AbstractSpecification
import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.test.context.TestPropertySource
import spock.lang.Shared
+@TestPropertySource(properties = "conductor.app.workflow.name-validation.enabled=true")
class KafkaPublishTaskSpec extends AbstractSpecification {
@Autowired
diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java
index 680623f43..1cc70310a 100644
--- a/mysql-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java
+++ b/mysql-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/mysql/MySQLGrpcEndToEndTest.java
@@ -35,7 +35,8 @@
"spring.datasource.password=root",
"spring.datasource.hikari.maximum-pool-size=8",
"spring.datasource.hikari.minimum-idle=300000",
- "conductor.elasticsearch.version=7"
+ "conductor.elasticsearch.version=7",
+ "conductor.app.workflow.name-validation.enabled=true"
})
public class MySQLGrpcEndToEndTest extends AbstractGrpcEndToEndTest {
diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/dao/PostgresLockDAOTest.java b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/dao/PostgresLockDAOTest.java
index 695f15f10..a6fb4b8a3 100644
--- a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/dao/PostgresLockDAOTest.java
+++ b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/dao/PostgresLockDAOTest.java
@@ -48,7 +48,8 @@
@TestPropertySource(
properties = {
"conductor.workflow-execution-lock.type=postgres",
- "spring.flyway.clean-disabled=false"
+ "spring.flyway.clean-disabled=false",
+ "conductor.app.workflow.name-validation.enabled=true"
})
@SpringBootTest
public class PostgresLockDAOTest {
diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java b/postgres-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java
index 00651d34f..9b00cbbcc 100644
--- a/postgres-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java
+++ b/postgres-persistence/src/test/java/com/netflix/conductor/test/integration/grpc/postgres/PostgresGrpcEndToEndTest.java
@@ -39,7 +39,8 @@
"spring.datasource.password=postgres",
"spring.datasource.hikari.maximum-pool-size=8",
"spring.datasource.hikari.minimum-idle=300000",
- "spring.flyway.clean-disabled=true"
+ "spring.flyway.clean-disabled=true",
+ "conductor.app.workflow.name-validation.enabled=true"
})
public class PostgresGrpcEndToEndTest extends AbstractGrpcEndToEndTest {
diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java
index 5ffda1ed4..234351883 100644
--- a/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java
+++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/MetadataResource.java
@@ -59,7 +59,7 @@ public void validate(@RequestBody WorkflowDef workflowDef) {
@PutMapping("/workflow")
@Operation(summary = "Create or update workflow definition")
- public BulkResponse update(@RequestBody List workflowDefs) {
+ public BulkResponse update(@RequestBody List workflowDefs) {
return metadataService.updateWorkflowDef(workflowDefs);
}
diff --git a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java
index 9005b9343..832e9d1da 100644
--- a/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java
+++ b/rest/src/main/java/com/netflix/conductor/rest/controllers/WorkflowBulkResource.java
@@ -23,6 +23,7 @@
import org.springframework.web.bind.annotation.RestController;
import com.netflix.conductor.common.model.BulkResponse;
+import com.netflix.conductor.model.WorkflowModel;
import com.netflix.conductor.service.WorkflowBulkService;
import io.swagger.v3.oas.annotations.Operation;
@@ -49,7 +50,7 @@ public WorkflowBulkResource(WorkflowBulkService workflowBulkService) {
*/
@PutMapping("/pause")
@Operation(summary = "Pause the list of workflows")
- public BulkResponse pauseWorkflow(@RequestBody List workflowIds) {
+ public BulkResponse pauseWorkflow(@RequestBody List workflowIds) {
return workflowBulkService.pauseWorkflow(workflowIds);
}
@@ -62,7 +63,7 @@ public BulkResponse pauseWorkflow(@RequestBody List workflowIds) {
*/
@PutMapping("/resume")
@Operation(summary = "Resume the list of workflows")
- public BulkResponse resumeWorkflow(@RequestBody List workflowIds) {
+ public BulkResponse resumeWorkflow(@RequestBody List workflowIds) {
return workflowBulkService.resumeWorkflow(workflowIds);
}
@@ -76,7 +77,7 @@ public BulkResponse resumeWorkflow(@RequestBody List workflowIds) {
*/
@PostMapping("/restart")
@Operation(summary = "Restart the list of completed workflow")
- public BulkResponse restart(
+ public BulkResponse restart(
@RequestBody List workflowIds,
@RequestParam(value = "useLatestDefinitions", defaultValue = "false", required = false)
boolean useLatestDefinitions) {
@@ -92,7 +93,7 @@ public BulkResponse restart(
*/
@PostMapping("/retry")
@Operation(summary = "Retry the last failed task for each workflow from the list")
- public BulkResponse retry(@RequestBody List workflowIds) {
+ public BulkResponse retry(@RequestBody List workflowIds) {
return workflowBulkService.retry(workflowIds);
}
@@ -107,7 +108,7 @@ public BulkResponse retry(@RequestBody List workflowIds) {
*/
@PostMapping("/terminate")
@Operation(summary = "Terminate workflows execution")
- public BulkResponse terminate(
+ public BulkResponse terminate(
@RequestBody List workflowIds,
@RequestParam(value = "reason", required = false) String reason) {
return workflowBulkService.terminate(workflowIds, reason);
@@ -120,7 +121,7 @@ public BulkResponse terminate(
* @return bulk reponse object containing a list of successfully deleted workflows
*/
@DeleteMapping("/remove")
- public BulkResponse deleteWorkflow(
+ public BulkResponse deleteWorkflow(
@RequestBody List workflowIds,
@RequestParam(value = "archiveWorkflow", defaultValue = "true", required = false)
boolean archiveWorkflow) {
@@ -134,11 +135,25 @@ public BulkResponse deleteWorkflow(
* @return bulk response object containing a list of successfully deleted workflows
*/
@DeleteMapping("/terminate-remove")
- public BulkResponse terminateRemove(
+ public BulkResponse terminateRemove(
@RequestBody List workflowIds,
@RequestParam(value = "archiveWorkflow", defaultValue = "true", required = false)
boolean archiveWorkflow,
@RequestParam(value = "reason", required = false) String reason) {
return workflowBulkService.terminateRemove(workflowIds, reason, archiveWorkflow);
}
+
+ /**
+ * Search workflows for given list of workflows.
+ *
+ * @param workflowIds - list of workflow Ids to be searched
+ * @return bulk response object containing a list of workflows
+ */
+ @PostMapping("/search")
+ public BulkResponse searchWorkflow(
+ @RequestBody List workflowIds,
+ @RequestParam(value = "includeTasks", defaultValue = "true", required = false)
+ boolean includeTasks) {
+ return workflowBulkService.searchWorkflow(workflowIds, includeTasks);
+ }
}
diff --git a/server/src/main/resources/application.properties b/server/src/main/resources/application.properties
index 4fcb33594..e342c390e 100644
--- a/server/src/main/resources/application.properties
+++ b/server/src/main/resources/application.properties
@@ -72,6 +72,8 @@ conductor.default-event-queue.type=sqs
#disable locking during workflow execution
conductor.app.workflow-execution-lock-enabled=false
conductor.workflow-execution-lock.type=noop_lock
+# enable name validation on workflow/task definitions
+conductor.app.workflow.name-validation.enabled=false
#Redis cluster settings for locking module
# conductor.redis-lock.serverType=single
diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy
index 0149352bd..2b82a7867 100644
--- a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy
+++ b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/QueueResiliencySpec.groovy
@@ -14,6 +14,7 @@ package com.netflix.conductor.test.resiliency
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.http.HttpStatus
+import org.springframework.test.context.TestPropertySource
import com.netflix.conductor.common.metadata.tasks.Task
import com.netflix.conductor.common.metadata.tasks.TaskResult
@@ -36,6 +37,7 @@ import com.netflix.conductor.test.base.AbstractResiliencySpecification
* 2. Succeeds
* 3. Doesn't involve QueueDAO
*/
+@TestPropertySource(properties = "conductor.app.workflow.name-validation.enabled=true")
class QueueResiliencySpec extends AbstractResiliencySpecification {
@Autowired
diff --git a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy
index 4695d6587..1f6d311e4 100644
--- a/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy
+++ b/test-harness/src/test/groovy/com/netflix/conductor/test/resiliency/TaskResiliencySpec.groovy
@@ -13,6 +13,7 @@
package com.netflix.conductor.test.resiliency
import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.test.context.TestPropertySource
import com.netflix.conductor.common.metadata.tasks.Task
import com.netflix.conductor.common.run.Workflow
@@ -22,7 +23,7 @@ import com.netflix.conductor.test.base.AbstractResiliencySpecification
import spock.lang.Shared
import static com.netflix.conductor.test.util.WorkflowTestUtil.verifyPolledAndAcknowledgedTask
-
+@TestPropertySource(properties = "conductor.app.workflow.name-validation.enabled=true")
class TaskResiliencySpec extends AbstractResiliencySpecification {
@Autowired
diff --git a/test-harness/src/test/resources/application-integrationtest.properties b/test-harness/src/test/resources/application-integrationtest.properties
index b209c8054..e6390edce 100644
--- a/test-harness/src/test/resources/application-integrationtest.properties
+++ b/test-harness/src/test/resources/application-integrationtest.properties
@@ -35,6 +35,7 @@ conductor.workflow-reconciler.enabled=true
conductor.workflow-repair-service.enabled=false
conductor.app.workflow-execution-lock-enabled=false
+conductor.app.workflow.name-validation.enabled=true
conductor.app.workflow-input-payload-size-threshold=10KB
conductor.app.max-workflow-input-payload-size-threshold=10240KB
diff --git a/ui/package.json b/ui/package.json
index 623a5c6dc..6b384d93e 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -20,12 +20,13 @@
"moment": "^2.29.2",
"monaco-editor": "^0.44.0",
"node-forge": "^1.3.0",
+ "orkes-workflow-visualizer": "^1.0.0",
"parse-svg-path": "^0.1.2",
"prop-types": "^15.7.2",
- "react": "^16.8.0",
+ "react": "^18.3.1",
"react-cron-generator": "^1.3.5",
"react-data-table-component": "^6.11.8",
- "react-dom": "^16.8.0",
+ "react-dom": "^18.3.1",
"react-helmet": "^6.1.0",
"react-is": "^17.0.2",
"react-query": "^3.19.4",
diff --git a/ui/src/components/diagram/WorkflowDAG.js b/ui/src/components/diagram/WorkflowDAG.js
index cabf35fa0..684fec032 100644
--- a/ui/src/components/diagram/WorkflowDAG.js
+++ b/ui/src/components/diagram/WorkflowDAG.js
@@ -580,7 +580,7 @@ export default class WorkflowDAG {
return this.taskResultsById.get(taskPointer.id);
} else {
const node = this.graph.node(taskPointer.ref);
- return _.last(node.taskResults);
+ return _.last(node?.taskResults);
}
}
}
diff --git a/ui/src/pages/definition/WorkflowDefinition.jsx b/ui/src/pages/definition/WorkflowDefinition.jsx
index 77963f518..7f18244ca 100644
--- a/ui/src/pages/definition/WorkflowDefinition.jsx
+++ b/ui/src/pages/definition/WorkflowDefinition.jsx
@@ -12,7 +12,6 @@ import {
useWorkflowNamesAndVersions,
} from "../../data/workflow";
import WorkflowDAG from "../../components/diagram/WorkflowDAG";
-import WorkflowGraph from "../../components/diagram/WorkflowGraph";
import ResetConfirmationDialog from "./ResetConfirmationDialog";
import {
configureMonaco,
@@ -23,6 +22,7 @@ import SaveWorkflowDialog from "./SaveWorkflowDialog";
import update from "immutability-helper";
import { usePushHistory } from "../../components/NavLink";
import { timestampRenderer } from "../../utils/helpers";
+import { WorkflowVisualizer } from "orkes-workflow-visualizer";
import {
KeyboardArrowLeftRounded,
@@ -67,8 +67,8 @@ const useStyles = makeStyles({
gap: 8,
},
editorLineDecorator: {
- backgroundColor: "rgb(45, 45, 45, 0.1)"
- }
+ backgroundColor: "rgb(45, 45, 45, 0.1)",
+ },
});
const actions = {
@@ -240,21 +240,26 @@ export default function Workflow() {
};
const handleWorkflowNodeClick = (node) => {
- let editor = editorRef.current.getModel()
- let searchResult = editor.findMatches(`"taskReferenceName": "${node.ref}"`)
- if (searchResult.length){
- editorRef.current.revealLineInCenter(searchResult[0]?.range?.startLineNumber, 0);
- setDecorations(editorRef.current.deltaDecorations(decorations, [
- {
- range: searchResult[0]?.range,
- options: {
- isWholeLine: true,
- inlineClassName: classes.editorLineDecorator
- }
- }
- ]))
+ let editor = editorRef.current.getModel();
+ let searchResult = editor.findMatches(`"taskReferenceName": "${node.ref}"`);
+ if (searchResult.length) {
+ editorRef.current.revealLineInCenter(
+ searchResult[0]?.range?.startLineNumber,
+ 0
+ );
+ setDecorations(
+ editorRef.current.deltaDecorations(decorations, [
+ {
+ range: searchResult[0]?.range,
+ options: {
+ isWholeLine: true,
+ inlineClassName: classes.editorLineDecorator,
+ },
+ },
+ ])
+ );
}
- }
+ };
return (
<>
@@ -369,8 +374,17 @@ export default function Workflow() {
className={classes.resizer}
onMouseDown={(e) => handleMouseDown(e)}
/>
-
- {dag &&
}
+
+ {dag && dag?.workflowDef && (
+ handleWorkflowNodeClick({ ref: data?.id })}
+ />
+ )}
>
diff --git a/ui/src/pages/execution/Execution.jsx b/ui/src/pages/execution/Execution.jsx
index 656713697..b05970bb5 100644
--- a/ui/src/pages/execution/Execution.jsx
+++ b/ui/src/pages/execution/Execution.jsx
@@ -126,6 +126,7 @@ export default function Execution() {
const [isFullWidth, setIsFullWidth] = useState(false);
const [isResizing, setIsResizing] = useState(false);
const [drawerWidth, setDrawerWidth] = useState(INIT_DRAWER_WIDTH);
+ const [selectedNode, setSelectedNode] = useState();
const [tabIndex, setTabIndex] = useQueryState("tabIndex", 0);
const [selectedTaskRison, setSelectedTaskRison] = useQueryState("task", "");
@@ -222,7 +223,12 @@ export default function Execution() {
)}
- Definition
+
+ Definition
+
Refresh
@@ -260,6 +266,7 @@ export default function Execution() {
execution={execution}
setSelectedTask={setSelectedTask}
selectedTask={selectedTask}
+ setSelectedNode={setSelectedNode}
/>
)}
{tabIndex === 1 && }
@@ -302,6 +309,8 @@ export default function Execution() {
className={classes.rightPanel}
selectedTask={selectedTask}
dag={dag}
+ execution={execution}
+ selectedNode={selectedNode}
onTaskChange={setSelectedTask}
/>
diff --git a/ui/src/pages/execution/RightPanel.jsx b/ui/src/pages/execution/RightPanel.jsx
index 7e2e0572b..b3dc7b79e 100644
--- a/ui/src/pages/execution/RightPanel.jsx
+++ b/ui/src/pages/execution/RightPanel.jsx
@@ -8,6 +8,10 @@ import TaskLogs from "./TaskLogs";
import { makeStyles } from "@material-ui/styles";
import _ from "lodash";
import TaskPollData from "./TaskPollData";
+import {
+ pendingTaskSelection,
+ taskWithLatestIteration,
+} from "../../utils/helpers";
const useStyles = makeStyles({
banner: {
@@ -24,7 +28,13 @@ const useStyles = makeStyles({
},
});
-export default function RightPanel({ selectedTask, dag, onTaskChange }) {
+export default function RightPanel({
+ selectedTask,
+ dag,
+ execution,
+ onTaskChange,
+ selectedNode,
+}) {
const [tabIndex, setTabIndex] = useState("summary");
const classes = useStyles();
@@ -33,10 +43,11 @@ export default function RightPanel({ selectedTask, dag, onTaskChange }) {
setTabIndex("summary"); // Reset to Status Tab on ref change
}, [selectedTask]);
- const taskResult = useMemo(
- () => dag && dag.resolveTaskResult(selectedTask),
- [dag, selectedTask]
- );
+ const taskResult =
+ selectedNode?.data?.task?.executionData?.status === "PENDING"
+ ? pendingTaskSelection(selectedNode?.data?.task)
+ : taskWithLatestIteration(execution?.tasks, selectedTask?.ref);
+
const dfOptions = useMemo(
() => dag && dag.getSiblings(selectedTask),
[dag, selectedTask]
diff --git a/ui/src/pages/execution/TaskDetails.jsx b/ui/src/pages/execution/TaskDetails.jsx
index 6948f645f..8cbfa192f 100644
--- a/ui/src/pages/execution/TaskDetails.jsx
+++ b/ui/src/pages/execution/TaskDetails.jsx
@@ -1,9 +1,13 @@
-import React, { useState } from "react";
+import { useState } from "react";
import { Tabs, Tab, Paper } from "../../components";
import Timeline from "./Timeline";
import TaskList from "./TaskList";
-import WorkflowGraph from "../../components/diagram/WorkflowGraph";
import { makeStyles } from "@material-ui/styles";
+import { WorkflowVisualizer } from "orkes-workflow-visualizer";
+import {
+ pendingTaskSelection,
+ taskWithLatestIteration,
+} from "../../utils/helpers";
const useStyles = makeStyles({
taskWrapper: {
@@ -18,6 +22,7 @@ export default function TaskDetails({
dag,
selectedTask,
setSelectedTask,
+ setSelectedNode,
}) {
const [tabIndex, setTabIndex] = useState(0);
const classes = useStyles();
@@ -32,11 +37,23 @@ export default function TaskDetails({
{tabIndex === 0 && (
- {
+ const selectedTaskRefName =
+ data?.data?.task?.executionData?.status === "PENDING"
+ ? pendingTaskSelection(data?.data?.task)?.workflowTask
+ ?.taskReferenceName
+ : taskWithLatestIteration(execution?.tasks, data?.id)
+ ?.referenceTaskName;
+ setSelectedNode(data);
+ setSelectedTask({ ref: selectedTaskRefName });
+ }}
/>
)}
{tabIndex === 1 && (
diff --git a/ui/src/utils/helpers.js b/ui/src/utils/helpers.js
index 11bf45e94..407732f63 100644
--- a/ui/src/utils/helpers.js
+++ b/ui/src/utils/helpers.js
@@ -1,6 +1,7 @@
import { format, formatDuration, intervalToDuration } from "date-fns";
import _ from "lodash";
-import packageJson from '../../package.json';
+import packageJson from "../../package.json";
+import _nth from "lodash/nth";
export function timestampRenderer(date) {
if (_.isNil(date)) return null;
@@ -91,9 +92,45 @@ export function isEmptyIterable(iterable) {
}
export function getBasename() {
- let basename = '/';
- try{
+ let basename = "/";
+ try {
basename = new URL(packageJson.homepage).pathname;
- } catch(e) {}
- return _.isEmpty(basename) ? '/' : basename;
+ } catch (e) {}
+ return _.isEmpty(basename) ? "/" : basename;
}
+
+export const taskWithLatestIteration = (tasksList, taskReferenceName) => {
+ const filteredTasks = tasksList?.filter(
+ (task) =>
+ task?.workflowTask?.taskReferenceName === taskReferenceName ||
+ task?.referenceTaskName === taskReferenceName
+ );
+
+ if (filteredTasks && filteredTasks.length === 1) {
+ // task without any retry/iteration
+ return _nth(filteredTasks, 0);
+ } else if (filteredTasks && filteredTasks.length > 1) {
+ const result = filteredTasks.reduce(
+ (acc, task, idx) => {
+ if (task?.seq && acc?.seqNumber < Number(task.seq)) {
+ return { seqNumber: Number(task.seq), idx };
+ }
+ return acc;
+ },
+ { seqNumber: 0, idx: -1 }
+ );
+
+ if (result?.idx > -1) {
+ return _nth(filteredTasks, result.idx);
+ }
+ }
+ return undefined;
+};
+
+export const pendingTaskSelection = (task) => {
+ const result = {
+ ...task?.executionData,
+ workflowTask: task,
+ };
+ return result;
+};
diff --git a/workflow-event-listener/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java b/workflow-event-listener/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java
index 33d909a84..38e5f737b 100644
--- a/workflow-event-listener/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java
+++ b/workflow-event-listener/src/test/java/com/netflix/conductor/test/listener/WorkflowStatusPublisherIntegrationTest.java
@@ -58,7 +58,8 @@
"conductor.workflow-status-listener.type=queue_publisher",
"conductor.workflow-status-listener.queue-publisher.successQueue=dummy",
"conductor.workflow-status-listener.queue-publisher.failureQueue=dummy",
- "conductor.workflow-status-listener.queue-publisher.finalizeQueue=final"
+ "conductor.workflow-status-listener.queue-publisher.finalizeQueue=final",
+ "conductor.app.workflow.name-validation.enabled=true"
})
@TestPropertySource(locations = "classpath:application-integrationtest.properties")
public class WorkflowStatusPublisherIntegrationTest {