From dfa795582e68e58e77eb6e45d1fba1813c419dc1 Mon Sep 17 00:00:00 2001 From: Anoop Panicker Date: Wed, 14 Apr 2021 14:31:00 -0700 Subject: [PATCH] sync to 2.31 98c75e9216a59e42f281fc31fcdac3e24f9ca55e --- .../cassandra/config/CassandraProperties.java | 2 +- .../core/execution/WorkflowExecutor.java | 2 +- .../mysql/config/MySQLDataSourceProvider.java | 26 +- .../mysql/config/MySQLProperties.java | 12 +- .../mysql/util/MySQLDAOTestUtil.java | 30 +- polyglot-clients/go/README.md | 109 + polyglot-clients/go/conductorhttpclient.go | 411 ++++ polyglot-clients/go/conductorworker.go | 113 + polyglot-clients/go/go.mod | 3 + polyglot-clients/go/httpclient/httpclient.go | 168 ++ .../go/startclient/startclient.go | 26 + .../go/task/sample/task_1_exec.go | 34 + .../go/task/sample/task_2_exec.go | 34 + polyglot-clients/go/task/task.go | 101 + .../go/task/task_exec_template.go | 39 + polyglot-clients/go/task/taskresult.go | 72 + polyglot-clients/gogrpc/.gitignore | 1 + polyglot-clients/gogrpc/Gopkg.lock | 220 ++ polyglot-clients/gogrpc/Gopkg.toml | 12 + polyglot-clients/gogrpc/Makefile | 36 + polyglot-clients/gogrpc/README.md | 7 + polyglot-clients/gogrpc/conductor/client.go | 77 + .../conductor/grpc/events/event_service.pb.go | 991 +++++++++ .../grpc/metadata/metadata_service.pb.go | 867 ++++++++ .../gogrpc/conductor/grpc/search/search.pb.go | 113 + .../conductor/grpc/tasks/task_service.pb.go | 1757 ++++++++++++++++ .../grpc/workflows/workflow_service.pb.go | 1822 +++++++++++++++++ .../conductor/model/dynamicforkjointask.pb.go | 124 ++ .../model/dynamicforkjointasklist.pb.go | 82 + .../conductor/model/eventexecution.pb.go | 185 ++ .../gogrpc/conductor/model/eventhandler.pb.go | 379 ++++ .../gogrpc/conductor/model/polldata.pb.go | 106 + .../model/rerunworkflowrequest.pb.go | 128 ++ .../conductor/model/skiptaskrequest.pb.go | 119 ++ .../model/startworkflowrequest.pb.go | 137 ++ .../conductor/model/subworkflowparams.pb.go | 89 + .../gogrpc/conductor/model/task.pb.go | 422 ++++ .../gogrpc/conductor/model/taskdef.pb.go | 254 +++ .../gogrpc/conductor/model/taskexeclog.pb.go | 98 + .../gogrpc/conductor/model/taskresult.pb.go | 192 ++ .../gogrpc/conductor/model/tasksummary.pb.go | 217 ++ .../gogrpc/conductor/model/workflow.pb.go | 289 +++ .../gogrpc/conductor/model/workflowdef.pb.go | 161 ++ .../conductor/model/workflowsummary.pb.go | 200 ++ .../gogrpc/conductor/model/workflowtask.pb.go | 308 +++ polyglot-clients/gogrpc/conductor/worker.go | 177 ++ .../gogrpc/conductor/worker_test.go | 197 ++ polyglot-clients/gogrpc/go.mod | 6 + polyglot-clients/gogrpc/go.sum | 21 + polyglot-clients/gogrpc/tools.go | 9 + polyglot-clients/python/.gitignore | 1 + polyglot-clients/python/README.md | 68 + .../python/conductor/ConductorWorker.py | 182 ++ polyglot-clients/python/conductor/__init__.py | 19 + .../python/conductor/conductor.py | 381 ++++ polyglot-clients/python/conductor_shell.py | 56 + .../python/kitchensink_workers.py | 27 + polyglot-clients/python/setup.cfg | 2 + polyglot-clients/python/setup.py | 45 + polyglot-clients/python/test_conductor.py | 89 + .../config/PostgresDataSourceProvider.java | 30 +- .../postgres/config/PostgresProperties.java | 12 +- .../postgres/performance/PerformanceTest.java | 11 +- .../postgres/util/PostgresDAOTestUtil.java | 28 +- .../config/DynomiteClusterConfiguration.java | 7 +- .../redis/config/RedisProperties.java | 44 +- 66 files changed, 11908 insertions(+), 79 deletions(-) create mode 100644 polyglot-clients/go/README.md create mode 100644 polyglot-clients/go/conductorhttpclient.go create mode 100644 polyglot-clients/go/conductorworker.go create mode 100644 polyglot-clients/go/go.mod create mode 100644 polyglot-clients/go/httpclient/httpclient.go create mode 100644 polyglot-clients/go/startclient/startclient.go create mode 100644 polyglot-clients/go/task/sample/task_1_exec.go create mode 100644 polyglot-clients/go/task/sample/task_2_exec.go create mode 100644 polyglot-clients/go/task/task.go create mode 100644 polyglot-clients/go/task/task_exec_template.go create mode 100644 polyglot-clients/go/task/taskresult.go create mode 100644 polyglot-clients/gogrpc/.gitignore create mode 100644 polyglot-clients/gogrpc/Gopkg.lock create mode 100644 polyglot-clients/gogrpc/Gopkg.toml create mode 100644 polyglot-clients/gogrpc/Makefile create mode 100644 polyglot-clients/gogrpc/README.md create mode 100644 polyglot-clients/gogrpc/conductor/client.go create mode 100644 polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/polldata.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/task.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/taskdef.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/taskresult.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/workflow.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go create mode 100644 polyglot-clients/gogrpc/conductor/worker.go create mode 100644 polyglot-clients/gogrpc/conductor/worker_test.go create mode 100644 polyglot-clients/gogrpc/go.mod create mode 100644 polyglot-clients/gogrpc/go.sum create mode 100644 polyglot-clients/gogrpc/tools.go create mode 100644 polyglot-clients/python/.gitignore create mode 100644 polyglot-clients/python/README.md create mode 100644 polyglot-clients/python/conductor/ConductorWorker.py create mode 100644 polyglot-clients/python/conductor/__init__.py create mode 100644 polyglot-clients/python/conductor/conductor.py create mode 100644 polyglot-clients/python/conductor_shell.py create mode 100644 polyglot-clients/python/kitchensink_workers.py create mode 100644 polyglot-clients/python/setup.cfg create mode 100644 polyglot-clients/python/setup.py create mode 100644 polyglot-clients/python/test_conductor.py diff --git a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java index ba8340f117..559bee67df 100644 --- a/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java +++ b/cassandra-persistence/src/main/java/com/netflix/conductor/cassandra/config/CassandraProperties.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 Netflix, Inc. + * Copyright 2021 Netflix, Inc. *

* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index bf4c0db7f8..4cdb326603 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -1236,8 +1236,8 @@ List cancelNonTerminalTasks(Workflow workflow) { } if (erroredTasks.isEmpty()) { try { - queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); workflowStatusListener.onWorkflowFinalizedIfEnabled(workflow); + queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); } catch (Exception e) { LOGGER.error("Error removing workflow: {} from decider queue", workflow.getWorkflowId(), e); } diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java index f5f599bae1..1836bc348e 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLDataSourceProvider.java @@ -15,13 +15,14 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; -import java.util.concurrent.ThreadFactory; -import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.configuration.FluentConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.sql.DataSource; +import java.util.concurrent.ThreadFactory; + public class MySQLDataSourceProvider { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLDataSourceProvider.class); @@ -62,9 +63,9 @@ private HikariConfig createConfiguration() { hikariConfig.setAutoCommit(properties.isAutoCommit()); ThreadFactory threadFactory = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("hikari-mysql-%d") - .build(); + .setDaemon(true) + .setNameFormat("hikari-mysql-%d") + .build(); hikariConfig.setThreadFactory(threadFactory); return hikariConfig; @@ -78,16 +79,15 @@ private void flywayMigrate(DataSource dataSource) { return; } - FluentConfiguration fluentConfiguration = Flyway.configure() - .dataSource(dataSource) - .placeholderReplacement(false); + String flywayTable = properties.getFlywayTable(); + LOGGER.debug("Using Flyway migration table '{}'", flywayTable); - properties.getFlywayTable().ifPresent(tableName -> { - LOGGER.debug("Using Flyway migration table '{}'", tableName); - fluentConfiguration.table(tableName); - }); + FluentConfiguration fluentConfiguration = Flyway.configure() + .table(flywayTable) + .dataSource(dataSource) + .placeholderReplacement(false); - Flyway flyway = new Flyway(fluentConfiguration); + Flyway flyway = fluentConfiguration.load(); flyway.migrate(); } } diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLProperties.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLProperties.java index 96875e561c..ae9e543392 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLProperties.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/config/MySQLProperties.java @@ -12,12 +12,12 @@ */ package com.netflix.conductor.mysql.config; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.convert.DurationUnit; + import java.sql.Connection; import java.time.Duration; import java.time.temporal.ChronoUnit; -import java.util.Optional; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; @ConfigurationProperties("conductor.mysql") public class MySQLProperties { @@ -45,7 +45,7 @@ public class MySQLProperties { /** * Used to override the default flyway migration table */ - private String flywayTable = null; + private String flywayTable = "schema_version"; // The defaults are currently in line with the HikariConfig defaults, which are unfortunately private. /** @@ -125,8 +125,8 @@ public void setFlywayEnabled(boolean flywayEnabled) { this.flywayEnabled = flywayEnabled; } - public Optional getFlywayTable() { - return Optional.ofNullable(flywayTable); + public String getFlywayTable() { + return flywayTable; } public void setFlywayTable(String flywayTable) { diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java b/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java index c19c7cd187..09845dae49 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/mysql/util/MySQLDAOTestUtil.java @@ -12,24 +12,25 @@ */ package com.netflix.conductor.mysql.util; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.mysql.config.MySQLProperties; import com.zaxxer.hikari.HikariDataSource; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.time.Duration; -import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.configuration.FluentConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.containers.MySQLContainer; +import javax.sql.DataSource; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.time.Duration; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class MySQLDAOTestUtil { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLDAOTestUtil.class); @@ -40,7 +41,7 @@ public class MySQLDAOTestUtil { public MySQLDAOTestUtil(MySQLContainer mySQLContainer, ObjectMapper objectMapper, String dbName) { properties = mock(MySQLProperties.class); when(properties.getJdbcUrl()).thenReturn(mySQLContainer.getJdbcUrl() - + "?useSSL=false&useUnicode=true&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC"); + + "?useSSL=false&useUnicode=true&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC"); when(properties.getJdbcUsername()).thenReturn(mySQLContainer.getUsername()); when(properties.getJdbcPassword()).thenReturn(mySQLContainer.getPassword()); when(properties.getTaskDefCacheRefreshInterval()).thenReturn(Duration.ofSeconds(60)); @@ -66,10 +67,11 @@ private HikariDataSource getDataSource(MySQLProperties properties) { private void flywayMigrate(DataSource dataSource) { FluentConfiguration fluentConfiguration = Flyway.configure() - .dataSource(dataSource) - .placeholderReplacement(false); + .table("schema_version") + .dataSource(dataSource) + .placeholderReplacement(false); - Flyway flyway = new Flyway(fluentConfiguration); + Flyway flyway = fluentConfiguration.load(); flyway.migrate(); } @@ -89,7 +91,7 @@ public void resetAllData() { LOGGER.info("Resetting data for test"); try (Connection connection = dataSource.getConnection()) { try (ResultSet rs = connection.prepareStatement("SHOW TABLES").executeQuery(); - PreparedStatement keysOn = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=1")) { + PreparedStatement keysOn = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=1")) { try (PreparedStatement keysOff = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=0")) { keysOff.execute(); while (rs.next()) { diff --git a/polyglot-clients/go/README.md b/polyglot-clients/go/README.md new file mode 100644 index 0000000000..93b1381561 --- /dev/null +++ b/polyglot-clients/go/README.md @@ -0,0 +1,109 @@ +# Go client for Conductor +Go client for Conductor provides two sets of functions: + +1. Workflow Management APIs (start, terminate, get workflow status etc.) +2. Worker execution framework + +## Prerequisites +Go must be installed and GOPATH env variable set. + +## Install + +```shell +go get github.com/netflix/conductor/client/go +``` +This will create a Go project under $GOPATH/src and download any dependencies. + +## Run + +```shell +go run $GOPATH/src/netflix-conductor/client/go/startclient/startclient.go +``` + +## Using Workflow Management API +Go struct ```ConductorHttpClient``` provides client API calls to the conductor server to start and manage workflows and tasks. + +### Example +```go +package main + +import ( + conductor "github.com/netflix/conductor/client/go" +) + +func main() { + conductorClient := conductor.NewConductorHttpClient("http://localhost:8080") + + // Example API that will print out workflow definition meta + conductorClient.GetAllWorkflowDefs() +} + +``` + +## Task Worker Execution +Task Worker execution APIs facilitates execution of a task worker using go. The API provides necessary tools to poll for tasks at a specified interval and executing the go worker in a separate goroutine. + +### Example +The following go code demonstrates workers for tasks "task_1" and "task_2". + +```go +package task + +import ( + "fmt" +) + +// Implementation for "task_1" +func Task_1_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { + log.Println("Executing Task_1_Execution_Function for", t.TaskType) + + //Do some logic + taskResult = task.NewTaskResult(t) + + output := map[string]interface{}{"task":"task_1", "key2":"value2", "key3":3, "key4":false} + taskResult.OutputData = output + taskResult.Status = "COMPLETED" + err = nil + + return taskResult, err +} + +// Implementation for "task_2" +func Task_2_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { + log.Println("Executing Task_2_Execution_Function for", t.TaskType) + + //Do some logic + taskResult = task.NewTaskResult(t) + + output := map[string]interface{}{"task":"task_2", "key2":"value2", "key3":3, "key4":false} + taskResult.OutputData = output + taskResult.Status = "COMPLETED" + err = nil + + return taskResult, err +} + +``` + + +Then main application to utilize these workers + +```go +package main + +import ( + "github.com/netflix/conductor/client/go" + "github.com/netflix/conductor/client/go/task/sample" +) + +func main() { + c := conductor.NewConductorWorker("http://localhost:8080", 1, 10000) + + c.Start("task_1", "", sample.Task_1_Execution_Function, false) + c.Start("task_2", "mydomain", sample.Task_2_Execution_Function, true) +} + +``` + +Note: For the example listed above the example task implementations are in conductor/task/sample package. Real task implementations can be placed in conductor/task directory or new subdirectory. + diff --git a/polyglot-clients/go/conductorhttpclient.go b/polyglot-clients/go/conductorhttpclient.go new file mode 100644 index 0000000000..2610dda40c --- /dev/null +++ b/polyglot-clients/go/conductorhttpclient.go @@ -0,0 +1,411 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package conductor + +import ( + "fmt" + "github.com/netflix/conductor/client/go/httpclient" + "log" + "strconv" +) + +type ConductorHttpClient struct { + httpClient *httpclient.HttpClient +} + +func NewConductorHttpClient(baseUrl string) *ConductorHttpClient { + conductorClient := new(ConductorHttpClient) + headers := map[string]string{"Content-Type": "application/json", "Accept": "application/json"} + httpClient := httpclient.NewHttpClient(baseUrl, headers, true) + conductorClient.httpClient = httpClient + return conductorClient +} + + +/**********************/ +/* Metadata Functions */ +/**********************/ + +func (c *ConductorHttpClient) GetWorkflowDef(workflowName string, version int) (string, error) { + url := c.httpClient.MakeUrl("/metadata/workflow/{workflowName}", "{workflowName}", workflowName) + versionString := "1" + + // Set default version as 1 + if version > 0 { + versionString = strconv.Itoa(version) + } + params := map[string]string{"version":versionString} + outputString, err := c.httpClient.Get(url, params, nil) + if err != nil { + log.Println("Error while trying to Get Workflow Definition", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) CreateWorkflowDef(workflowDefBody string) (string, error) { + url := c.httpClient.MakeUrl("/metadata/workflow") + outputString, err := c.httpClient.Post(url, nil, nil, workflowDefBody) + if err != nil { + log.Println("Error while trying to Create Workflow Definition", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) UpdateWorkflowDefs(workflowDefsBody string) (string, error) { + url := c.httpClient.MakeUrl("/metadata/workflow") + outputString, err := c.httpClient.Put(url, nil, nil, workflowDefsBody) + if err != nil { + log.Println("Error while trying to Update Workflow Definitions", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) GetAllWorkflowDefs() (string, error) { + url := c.httpClient.MakeUrl("/metadata/workflow") + outputString, err := c.httpClient.Get(url, nil, nil) + if err != nil { + log.Println("Error while trying to Get All Workflow Definitions", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) UnRegisterWorkflowDef(workflowDefName string, version int) (string, error) { + versionString := "" + + versionString = strconv.Itoa(version) + + url := c.httpClient.MakeUrl("/metadata/workflow/{workflowDefName}/{version}", "{workflowDefName}", + workflowDefName, "{version}", versionString) + + outputString, err := c.httpClient.Delete(url, nil, nil, "") + + if err != nil { + log.Println("Error while trying to Unregister Workflow Definition", workflowDefName, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) GetTaskDef(taskDefName string) (string, error) { + url := c.httpClient.MakeUrl("/metadata/taskdefs/{taskDefName}", "{taskDefName}", taskDefName) + outputString, err := c.httpClient.Get(url, nil, nil) + if err != nil { + log.Println("Error while trying to Get Task Definition", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) RegisterTaskDefs(taskDefsMeta string) (string, error) { + url := c.httpClient.MakeUrl("/metadata/taskdefs") + outputString, err := c.httpClient.Post(url, nil, nil, taskDefsMeta) + if err != nil { + log.Println("Error while trying to Register Task Definitions", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) UpdateTaskDef(taskDefMeta string) (string, error) { + url := c.httpClient.MakeUrl("/metadata/taskdefs") + outputString, err := c.httpClient.Put(url, nil, nil, taskDefMeta) + if err != nil { + log.Println("Error while trying to Update Task Definition", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) UnRegisterTaskDef(taskDefName string) (string, error) { + url := c.httpClient.MakeUrl("/metadata/taskdefs/{taskDefName}", "{taskDefName}", taskDefName) + outputString, err := c.httpClient.Delete(url, nil, nil, "") + if err != nil { + log.Println("Error while trying to Unregister Task Definition", taskDefName, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) GetAllTaskDefs() (string, error) { + url := c.httpClient.MakeUrl("/metadata/taskdefs") + outputString, err := c.httpClient.Get(url, nil, nil) + if err != nil { + log.Println("Error while trying to Get All Task Definitions", err) + return "", err + } else { + return outputString, nil + } +} + + +/**********************/ +/* Task Functions */ +/**********************/ + +func (c *ConductorHttpClient) GetTask(taskId string) (string, error) { + url := c.httpClient.MakeUrl("/tasks/{taskId}", "{taskId}", taskId) + outputString, err := c.httpClient.Get(url, nil, nil) + if err != nil { + log.Println("Error while trying to Get Task", taskId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) UpdateTask(taskBody string) (string, error) { + url := c.httpClient.MakeUrl("/tasks") + outputString, err := c.httpClient.Post(url, nil, nil, taskBody) + if err != nil { + log.Println("Error while trying to Update Task", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) PollForTask(taskType string, workerid string, domain string) (string, error) { + url := c.httpClient.MakeUrl("/tasks/poll/{taskType}", "{taskType}", taskType) + params := map[string]string{ + "workerid": workerid, + } + // only add the domain if requested, otherwise conductor will silently fail (https://github.com/Netflix/conductor/issues/1952) + if domain != "" { + params["domain"] = domain + } + outputString, err := c.httpClient.Get(url, params, nil) + if err != nil { + log.Println("Error while trying to Poll For Task taskType:", taskType, ",workerid:", workerid, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) AckTask(taskId, workerid, domain string) (string, error) { + url := c.httpClient.MakeUrl("/tasks/{taskId}/ack", "{taskId}", taskId) + params := map[string]string{ + "workerid": workerid, + } + // only add the domain if requested, otherwise conductor will silently fail (https://github.com/Netflix/conductor/issues/1952) + if domain != "" { + params["domain"] = domain + } + headers := map[string]string{"Accept": "application/json"} + outputString, err := c.httpClient.Post(url, params, headers, "") + if err != nil { + return "", err + } + if outputString != "true" { + return "", fmt.Errorf("Task id: %s has already been Acked", taskId) + } + return outputString, nil +} + +func (c *ConductorHttpClient) GetAllTasksInQueue() (string, error) { + url := c.httpClient.MakeUrl("/tasks/queue/all") + outputString, err := c.httpClient.Get(url, nil, nil) + if err != nil { + log.Println("Error while trying to Get All Tasks in Queue", err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) RemoveTaskFromQueue(taskType string, taskId string) (string, error) { + url := c.httpClient.MakeUrl("/tasks/queue/{taskType}/{taskId}", "{taskType}", taskType, "{taskId}", taskId) + outputString, err := c.httpClient.Delete(url, nil, nil, "") + if err != nil { + log.Println("Error while trying to Delete Task taskType:", taskType, ",taskId:", taskId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) GetTaskQueueSizes(taskNames string) (string, error) { + url := c.httpClient.MakeUrl("/tasks/queue/sizes") + outputString, err := c.httpClient.Post(url, nil, nil, taskNames) + if err != nil { + log.Println("Error while trying to Get Task Queue Sizes", err) + return "", err + } else { + return outputString, nil + } +} + + +/**********************/ +/* Workflow Functions */ +/**********************/ + +func (c *ConductorHttpClient) GetWorkflow(workflowId string, includeTasks bool) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}", "{workflowId}", workflowId) + includeTasksString := "false" + if includeTasks { + includeTasksString = "true" + } + params := map[string]string{"includeTasks":includeTasksString} + outputString, err := c.httpClient.Get(url, params, nil) + if err != nil { + log.Println("Error while trying to Get Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) GetRunningWorkflows(workflowName string, version int, startTime float64, endTime float64) (string, error) { + url := c.httpClient.MakeUrl("/workflow/running/{workflowName}", "{workflowName}", workflowName) + versionString := "1" + // Set default version as 1 + if version > 0 { + versionString = strconv.Itoa(version) + } + params := map[string]string{"version":versionString} + if startTime != 0 { + params["startTime"] = strconv.FormatFloat(startTime, 'f', -1, 64) + } + if endTime != 0 { + params["endTime"] = strconv.FormatFloat(endTime, 'f', -1, 64) + } + + outputString, err := c.httpClient.Get(url, params, nil) + if err != nil { + log.Println("Error while trying to Get Running Workflows", workflowName, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) StartWorkflow(workflowName string, version int, correlationId string, inputJson string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowName}", "{workflowName}", workflowName) + + params := make(map[string]string) + if version > 0 { + params["version"] = strconv.Itoa(version) + } + + if correlationId != "" { + params["correlationId"] = correlationId + } + + if inputJson == "" { + inputJson = "{}" + } + + headers := map[string]string{"Accept":"text/plain"} + + outputString, err := c.httpClient.Post(url, params, headers, inputJson) + if err != nil { + log.Println("Error while trying to Start Workflow", workflowName, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) TerminateWorkflow(workflowId string, reason string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}", "{workflowId}", workflowId) + + params := make(map[string]string) + + if reason != "" { + params["reason"] = reason + } + + outputString, err := c.httpClient.Delete(url, params, nil, "") + if err != nil { + log.Println("Error while trying to Terminate Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) PauseWorkflow(workflowId string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}/pause", "{workflowId}", workflowId) + outputString, err := c.httpClient.Put(url, nil, nil, "") + if err != nil { + log.Println("Error while trying to Pause Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) ResumeWorkflow(workflowId string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}/resume", "{workflowId}", workflowId) + outputString, err := c.httpClient.Put(url, nil, nil, "") + if err != nil { + log.Println("Error while trying to Resume Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) SkipTaskFromWorkflow(workflowId string, taskReferenceName string, skipTaskRequestBody string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}/skiptask/{taskReferenceName}", "{workflowId}", workflowId, "{taskReferenceName}", taskReferenceName) + + outputString, err := c.httpClient.Put(url, nil, nil, skipTaskRequestBody) + if err != nil { + log.Println("Error while trying to Skip Task From Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) RerunWorkflow(workflowId string, rerunWorkflowRequest string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}/rerun", "{workflowId}", workflowId) + if rerunWorkflowRequest == "" { + rerunWorkflowRequest = "{}" + } + + outputString, err := c.httpClient.Post(url, nil, nil, rerunWorkflowRequest) + if err != nil { + log.Println("Error while trying to Rerun Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} + +func (c *ConductorHttpClient) RestartWorkflow(workflowId string) (string, error) { + url := c.httpClient.MakeUrl("/workflow/{workflowId}/restart", "{workflowId}", workflowId) + + outputString, err := c.httpClient.Post(url, nil, nil, "") + if err != nil { + log.Println("Error while trying to Restart Completed Workflow", workflowId, err) + return "", err + } else { + return outputString, nil + } +} diff --git a/polyglot-clients/go/conductorworker.go b/polyglot-clients/go/conductorworker.go new file mode 100644 index 0000000000..8d6b57ee50 --- /dev/null +++ b/polyglot-clients/go/conductorworker.go @@ -0,0 +1,113 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package conductor + +import ( + "github.com/netflix/conductor/client/go/task" + "log" + "os" + "time" +) + +var ( + hostname, hostnameError = os.Hostname() +) + +func init() { + if hostnameError != nil { + log.Fatal("Could not get hostname") + } +} + +type ConductorWorker struct { + ConductorHttpClient *ConductorHttpClient + ThreadCount int + PollingInterval int +} + +func NewConductorWorker(baseUrl string, threadCount int, pollingInterval int) *ConductorWorker { + conductorWorker := new(ConductorWorker) + conductorWorker.ThreadCount = threadCount + conductorWorker.PollingInterval = pollingInterval + conductorHttpClient := NewConductorHttpClient(baseUrl) + conductorWorker.ConductorHttpClient = conductorHttpClient + return conductorWorker +} + +func (c *ConductorWorker) Execute(t *task.Task, executeFunction func(t *task.Task) (*task.TaskResult, error)) { + taskResult, err := executeFunction(t) + if taskResult == nil { + log.Println("TaskResult cannot be nil: ", t.TaskId) + return + } + if err != nil { + log.Println("Error Executing task:", err.Error()) + taskResult.Status = task.FAILED + taskResult.ReasonForIncompletion = err.Error() + } + + taskResultJsonString, err := taskResult.ToJSONString() + if err != nil { + log.Println(err.Error()) + log.Println("Error Forming TaskResult JSON body") + return + } + _, _ = c.ConductorHttpClient.UpdateTask(taskResultJsonString) +} + +func (c *ConductorWorker) PollAndExecute(taskType string, domain string, executeFunction func(t *task.Task) (*task.TaskResult, error)) { + for { + time.Sleep(time.Duration(c.PollingInterval) * time.Millisecond) + + // Poll for Task taskType + polled, err := c.ConductorHttpClient.PollForTask(taskType, hostname, domain) + if err != nil { + log.Println("Error Polling task:", err.Error()) + continue + } + if polled == "" { + log.Println("No task found for:", taskType) + continue + } + + // Parse Http response into Task + parsedTask, err := task.ParseTask(polled) + if err != nil { + log.Println("Error Parsing task:", err.Error()) + continue + } + + // Found a task, so we send an Ack + _, ackErr := c.ConductorHttpClient.AckTask(parsedTask.TaskId, hostname, domain) + if ackErr != nil { + log.Println("Error Acking task:", ackErr.Error()) + continue + } + + // Execute given function + c.Execute(parsedTask, executeFunction) + } +} + +func (c *ConductorWorker) Start(taskType string, domain string, executeFunction func(t *task.Task) (*task.TaskResult, error), wait bool) { + log.Println("Polling for task:", taskType, "with a:", c.PollingInterval, "(ms) polling interval with", c.ThreadCount, "goroutines for task execution, with workerid as", hostname) + for i := 1; i <= c.ThreadCount; i++ { + go c.PollAndExecute(taskType, domain, executeFunction) + } + + // wait infinitely while the go routines are running + if wait { + select {} + } +} diff --git a/polyglot-clients/go/go.mod b/polyglot-clients/go/go.mod new file mode 100644 index 0000000000..fd669b91d7 --- /dev/null +++ b/polyglot-clients/go/go.mod @@ -0,0 +1,3 @@ +module github.com/netflix/conductor/client/go + +go 1.12 diff --git a/polyglot-clients/go/httpclient/httpclient.go b/polyglot-clients/go/httpclient/httpclient.go new file mode 100644 index 0000000000..50fcf9c3ef --- /dev/null +++ b/polyglot-clients/go/httpclient/httpclient.go @@ -0,0 +1,168 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package httpclient + +import ( + "bytes" + "fmt" + "io/ioutil" + "log" + "net/http" + "strings" +) + +type HttpClient struct { + BaseUrl string + Headers map[string]string + PrintLogs bool + client *http.Client +} + +func NewHttpClient(baseUrl string, headers map[string]string, printLogs bool) *HttpClient { + httpClient := new(HttpClient) + httpClient.BaseUrl = baseUrl + httpClient.Headers = headers + httpClient.PrintLogs = printLogs + httpClient.client = &http.Client{} + return httpClient +} + +func (c *HttpClient) logSendRequest(url string, requestType string, body string) { + log.Println("Sending [", requestType, "] request to Server (", url, "):") + log.Println("Body:") + log.Println(body) +} + +func (c *HttpClient) logResponse(statusCode string, response string) { + log.Println("Received response from Server (", c.BaseUrl, "):") + log.Println("Status: ", statusCode) + log.Println("Response:") + log.Println(response) +} + +func genParamString(paramMap map[string]string) string { + if paramMap == nil || len(paramMap) == 0 { + return "" + } + + output := "?" + for key, value := range paramMap { + output += key + output += "=" + output += value + output += "&" + } + return output +} + +func (c *HttpClient) httpRequest(url string, requestType string, headers map[string]string, body string) (string, error) { + var req *http.Request + var err error + + if requestType == "GET" { + req, err = http.NewRequest(requestType, url, nil) + } else { + var bodyStr = []byte(body) + req, err = http.NewRequest(requestType, url, bytes.NewBuffer(bodyStr)) + } + + if err != nil { + return "", err + } + // Default Headers + for key, value := range c.Headers { + req.Header.Set(key, value) + } + + // Custom Headers + for key, value := range headers { + req.Header.Set(key, value) + } + + if c.PrintLogs { + c.logSendRequest(url, requestType, body) + } + + resp, err := c.client.Do(req) + if err != nil { + return "", err + } + + // If successful HTTP call, but Client/Server error, we return error + if resp.StatusCode >= 400 && resp.StatusCode < 500 { + return "", fmt.Errorf("%d Http Client Error for url: %s", resp.StatusCode, url) + } + if resp.StatusCode >= 500 && resp.StatusCode < 600 { + return "", fmt.Errorf("%d Http Server Error for url: %s", resp.StatusCode, url) + } + + defer resp.Body.Close() + response, err := ioutil.ReadAll(resp.Body) + responseString := string(response) + if err != nil { + log.Println("ERROR reading response for URL: ", url) + return "", err + } + + if c.PrintLogs { + c.logResponse(resp.Status, responseString) + } + return responseString, nil +} + +func (c *HttpClient) Get(url string, queryParamsMap map[string]string, headers map[string]string) (string, error) { + urlString := url + genParamString(queryParamsMap) + resp, err := c.httpRequest(urlString, "GET", headers, "") + if err != nil { + log.Println("Http GET Error for URL: ", urlString) + return "", err + } + return resp, nil +} + +func (c *HttpClient) Put(url string, queryParamsMap map[string]string, headers map[string]string, body string) (string, error) { + urlString := url + genParamString(queryParamsMap) + resp, err := c.httpRequest(urlString, "PUT", headers, body) + if err != nil { + log.Println("Http PUT Error for URL: ", urlString, ) + return "", err + } + return resp, nil +} + +func (c *HttpClient) Post(url string, queryParamsMap map[string]string, headers map[string]string, body string) (string, error) { + urlString := url + genParamString(queryParamsMap) + resp, err := c.httpRequest(urlString, "POST", headers, body) + if err != nil { + log.Println("Http POST Error for URL: ", urlString) + return "", err + } + return resp, nil +} + +func (c *HttpClient) Delete(url string, queryParamsMap map[string]string, headers map[string]string, body string) (string, error) { + urlString := url + genParamString(queryParamsMap) + resp, err := c.httpRequest(urlString, "DELETE", headers, body) + if err != nil { + log.Println("Http DELETE Error for URL: ", urlString) + return "", err + } + return resp, nil +} + +func (c *HttpClient) MakeUrl(path string, args ...string) string { + url := c.BaseUrl + r := strings.NewReplacer(args...) + return url + r.Replace(path) +} diff --git a/polyglot-clients/go/startclient/startclient.go b/polyglot-clients/go/startclient/startclient.go new file mode 100644 index 0000000000..ca22ed8765 --- /dev/null +++ b/polyglot-clients/go/startclient/startclient.go @@ -0,0 +1,26 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package main + +import ( + conductor "github.com/netflix/conductor/client/go" + "github.com/netflix/conductor/client/go/task/sample" +) + +func main() { + c := conductor.NewConductorWorker("http://localhost:8080/api", 1, 10000) + + c.Start("task_1", "", sample.Task_1_Execution_Function, false) + c.Start("task_2", "mydomain", sample.Task_2_Execution_Function, true) +} diff --git a/polyglot-clients/go/task/sample/task_1_exec.go b/polyglot-clients/go/task/sample/task_1_exec.go new file mode 100644 index 0000000000..77526f053b --- /dev/null +++ b/polyglot-clients/go/task/sample/task_1_exec.go @@ -0,0 +1,34 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package sample + +import ( + "log" + "github.com/netflix/conductor/client/go/task" +) + +// Implementation for "task_1" +func Task_1_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { + log.Println("Executing Task_1_Execution_Function for", t.TaskType) + + //Do some logic + taskResult = task.NewTaskResult(t) + + output := map[string]interface{}{"task":"task_1", "key2":"value2", "key3":3, "key4":false} + taskResult.OutputData = output + taskResult.Status = "COMPLETED" + err = nil + + return taskResult, err +} diff --git a/polyglot-clients/go/task/sample/task_2_exec.go b/polyglot-clients/go/task/sample/task_2_exec.go new file mode 100644 index 0000000000..d73f0beaaf --- /dev/null +++ b/polyglot-clients/go/task/sample/task_2_exec.go @@ -0,0 +1,34 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package sample + +import ( + "log" + "github.com/netflix/conductor/client/go/task" +) + +// Implementation for "task_2" +func Task_2_Execution_Function(t *task.Task) (taskResult *task.TaskResult, err error) { + log.Println("Executing Task_2_Execution_Function for", t.TaskType) + + //Do some logic + taskResult = task.NewTaskResult(t) + + output := map[string]interface{}{"task":"task_2", "key2":"value2", "key3":3, "key4":false} + taskResult.OutputData = output + taskResult.Status = "COMPLETED" + err = nil + + return taskResult, err +} diff --git a/polyglot-clients/go/task/task.go b/polyglot-clients/go/task/task.go new file mode 100644 index 0000000000..104128cad7 --- /dev/null +++ b/polyglot-clients/go/task/task.go @@ -0,0 +1,101 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package task + +import ( + "encoding/json" +) + +type WorkflowTaskType uint8 +type TaskStatus string + +const ( + SIMPLE WorkflowTaskType = iota + DYNAMIC + FORK_JOIN + FORK_JOIN_DYNAMIC + DECISION + JOIN + SUB_WORKFLOW + EVENT + WAIT + USER_DEFINED +) + +const ( + IN_PROGRESS TaskStatus = "IN_PROGRESS" + CANCELED = "CANCELED" + FAILED = "FAILED" + COMPLETED = "COMPLETED" + SCHEDULED = "SCHEDULED" + TIMED_OUT = "TIMED_OUT" + SKIPPED = "SKIPPED" +) + +type Task struct { + TaskType string `json:"taskType"` + Status TaskStatus `json:"status"` + InputData map[string]interface{} `json:"inputData"` + ReferenceTaskName string `json:"referenceTaskName"` + RetryCount int `json:"retryCount"` + Seq int `json:"seq"` + CorrelationId string `json:"correlationId"` + PollCount int `json:"pollCount"` + TaskDefName string `json:"taskDefName"` + // Time when the task was scheduled + ScheduledTime int64 `json:"scheduledTime"` + // Time when the task was first polled + StartTime int64 `json:"startTime"` + // Time when the task completed executing + EndTime int64 `json:"endTime"` + // Time when the task was last updated + UpdateTime int64 `json:"updateTime"` + StartDelayInSeconds int `json:"startDelayInSeconds"` + RetriedTaskId string `json:"retriedTaskId"` + Retried bool `json:"retried"` + // Default = true + CallbackFromWorker bool `json:"callbackFromWorker"` + // DynamicWorkflowTask + ResponseTimeoutSeconds int `json:"responseTimeoutSeconds"` + WorkflowInstanceId string `json:"workflowInstanceId"` + TaskId string `json:"taskId"` + ReasonForIncompletion string `json:"reasonForIncompletion"` + CallbackAfterSeconds int64 `json:"callbackAfterSeconds"` + WorkerId string `json:"workerId"` + OutputData map[string]interface{} `json:"outputData"` +} + +// "Constructor" to initialze non zero value defaults +func NewTask() *Task { + task := new(Task) + task.CallbackFromWorker = true + task.InputData = make(map[string]interface{}) + task.OutputData = make(map[string]interface{}) + return task +} + +func (t *Task) ToJSONString() (string, error) { + var jsonString string + b, err := json.Marshal(t) + if err == nil { + jsonString = string(b) + } + return jsonString, err +} + +func ParseTask(inputJSON string) (*Task, error) { + t := NewTask() + err := json.Unmarshal([]byte(inputJSON), t) + return t, err +} diff --git a/polyglot-clients/go/task/task_exec_template.go b/polyglot-clients/go/task/task_exec_template.go new file mode 100644 index 0000000000..364138abce --- /dev/null +++ b/polyglot-clients/go/task/task_exec_template.go @@ -0,0 +1,39 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package task + +import ( + "log" +) + +/* Format for functions must be: + func Name (t *Task) (taskResult TaskResult, err error) + - taskResult (TaskResult) should return struct with populated fields + - err (error) returns error if any +*/ + +func ExampleTaskExecutionFunction(t *Task) (taskResult *TaskResult, err error) { + log.Println("Executing Example Function for", t.TaskType) + log.Println(t) + + //Do some logic + taskResult = NewTaskResult(t) + + output := map[string]interface{}{"task":"example", "key2":"value2", "key3":3, "key4":false} + taskResult.OutputData = output + taskResult.Status = "COMPLETED" + err = nil + + return taskResult, err +} diff --git a/polyglot-clients/go/task/taskresult.go b/polyglot-clients/go/task/taskresult.go new file mode 100644 index 0000000000..b20c84f3b0 --- /dev/null +++ b/polyglot-clients/go/task/taskresult.go @@ -0,0 +1,72 @@ +// Copyright 2017 Netflix, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package task + +import ( + "encoding/json" +) + +type TaskResultStatus string + +type TaskResult struct { + Status TaskResultStatus `json:"status"` + WorkflowInstanceId string `json:"workflowInstanceId"` + TaskId string `json:"taskId"` + ReasonForIncompletion string `json:"reasonForIncompletion"` + CallbackAfterSeconds int64 `json:"callbackAfterSeconds"` + WorkerId string `json:"workerId"` + OutputData map[string]interface{} `json:"outputData"` + Logs []LogMessage `json:"logs"` +} + +// LogMessage used to sent logs to conductor server +type LogMessage struct { + Log string `json:"log"` + TaskID string `json:"taskId"` + CreatedTime int `json:"createdTime"` +} + +// "Constructor" to initialze non zero value defaults +func NewEmptyTaskResult() *TaskResult { + taskResult := new(TaskResult) + taskResult.OutputData = make(map[string]interface{}) + return taskResult +} + +func NewTaskResult(t *Task) *TaskResult { + taskResult := new(TaskResult) + taskResult.CallbackAfterSeconds = t.CallbackAfterSeconds + taskResult.WorkflowInstanceId = t.WorkflowInstanceId + taskResult.TaskId = t.TaskId + taskResult.ReasonForIncompletion = t.ReasonForIncompletion + taskResult.Status = TaskResultStatus(t.Status) + taskResult.WorkerId = t.WorkerId + taskResult.OutputData = t.OutputData + return taskResult +} + +func (t *TaskResult) ToJSONString() (string, error) { + var jsonString string + b, err := json.Marshal(t) + if err == nil { + jsonString = string(b) + } + return jsonString, err +} + +func ParseTaskResult(inputJSON string) (*TaskResult, error) { + t := NewEmptyTaskResult() + err := json.Unmarshal([]byte(inputJSON), t) + return t, err +} diff --git a/polyglot-clients/gogrpc/.gitignore b/polyglot-clients/gogrpc/.gitignore new file mode 100644 index 0000000000..49ce3c193f --- /dev/null +++ b/polyglot-clients/gogrpc/.gitignore @@ -0,0 +1 @@ +/vendor \ No newline at end of file diff --git a/polyglot-clients/gogrpc/Gopkg.lock b/polyglot-clients/gogrpc/Gopkg.lock new file mode 100644 index 0000000000..870521170f --- /dev/null +++ b/polyglot-clients/gogrpc/Gopkg.lock @@ -0,0 +1,220 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + digest = "1:ffe9824d294da03b391f44e1ae8281281b4afc1bdaa9588c9097785e3af10cec" + name = "github.com/davecgh/go-spew" + packages = ["spew"] + pruneopts = "NUT" + revision = "8991bc29aa16c548c550c7ff78260e27b9ab7c73" + version = "v1.1.1" + +[[projects]] + digest = "1:68496943b027f697647279f48129c75a65cd816a6a95fc6bb7b8b039ea050f1d" + name = "github.com/golang/protobuf" + packages = [ + "jsonpb", + "proto", + "protoc-gen-go", + "protoc-gen-go/descriptor", + "protoc-gen-go/generator", + "protoc-gen-go/generator/internal/remap", + "protoc-gen-go/grpc", + "protoc-gen-go/plugin", + "ptypes", + "ptypes/any", + "ptypes/duration", + "ptypes/empty", + "ptypes/struct", + "ptypes/timestamp", + "ptypes/wrappers", + ] + pruneopts = "NUT" + revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5" + version = "v1.2.0" + +[[projects]] + digest = "1:406338ad39ab2e37b7f4452906442a3dbf0eb3379dd1f06aafb5c07e769a5fbb" + name = "github.com/inconshreveable/mousetrap" + packages = ["."] + pruneopts = "NUT" + revision = "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" + version = "v1.0" + +[[projects]] + digest = "1:d0ede3366f0fca3c7c0eec74a2c5bfae7711a2fdecb2a3c63330840f333f7977" + name = "github.com/jhump/protoreflect" + packages = [ + "desc", + "desc/internal", + "dynamic", + "dynamic/grpcdynamic", + "grpcreflect", + "internal", + ] + pruneopts = "NUT" + revision = "b28d968eb345542b430a717dc72a88abf10d0b95" + version = "v1.0.0" + +[[projects]] + branch = "master" + digest = "1:d6b6479233449ae6a3defd500360f810d55415f2a768a9ecf62c126b7667903f" + name = "github.com/kazegusuri/grpcurl" + packages = ["."] + pruneopts = "NUT" + revision = "98e92bc156677950a3bedb128a7227b40f0ff125" + +[[projects]] + digest = "1:0028cb19b2e4c3112225cd871870f2d9cf49b9b4276531f03438a88e94be86fe" + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + pruneopts = "NUT" + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + digest = "1:343d44e06621142ab09ae0c76c1799104cdfddd3ffb445d78b1adf8dc3ffaf3d" + name = "github.com/spf13/cobra" + packages = ["."] + pruneopts = "NUT" + revision = "ef82de70bb3f60c65fb8eebacbb2d122ef517385" + version = "v0.0.3" + +[[projects]] + digest = "1:e3707aeaccd2adc89eba6c062fec72116fe1fc1ba71097da85b4d8ae1668a675" + name = "github.com/spf13/pflag" + packages = ["."] + pruneopts = "NUT" + revision = "9a97c102cda95a86cec2345a6f09f55a939babf5" + version = "v1.0.2" + +[[projects]] + branch = "master" + digest = "1:ea222cd3bb494fb2b0f799e33f91691d2f60cb298d064233367cd692d07d6c39" + name = "github.com/square/goprotowrap" + packages = [ + ".", + "cmd/protowrap", + "wrapper", + ] + pruneopts = "NUT" + revision = "6f414ea4a80cc23c26725b193215be2a0d85d6e1" + +[[projects]] + digest = "1:bacb8b590716ab7c33f2277240972c9582d389593ee8d66fc10074e0508b8126" + name = "github.com/stretchr/testify" + packages = ["assert"] + pruneopts = "NUT" + revision = "f35b8ab0b5a2cef36673838d662e249dd9c94686" + version = "v1.2.2" + +[[projects]] + branch = "master" + digest = "1:a2707daa031e6db5fcaae8a9b30eaa503a0d7d8aa72cd50b27fa394ef6c3f7fe" + name = "golang.org/x/net" + packages = [ + "context", + "http/httpguts", + "http2", + "http2/hpack", + "idna", + "internal/timeseries", + "trace", + ] + pruneopts = "NUT" + revision = "26e67e76b6c3f6ce91f7c52def5af501b4e0f3a2" + +[[projects]] + branch = "master" + digest = "1:5c2d57086d29bf60bf5fc8a1e6550650034f8b26177dced9b16d1f673311ab40" + name = "golang.org/x/sys" + packages = ["unix"] + pruneopts = "NUT" + revision = "d0be0721c37eeb5299f245a996a483160fc36940" + +[[projects]] + digest = "1:e7071ed636b5422cc51c0e3a6cebc229d6c9fffc528814b519a980641422d619" + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable", + ] + pruneopts = "NUT" + revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" + version = "v0.3.0" + +[[projects]] + branch = "master" + digest = "1:9b295ec121babd2b209d9d52115f839f2ea4f8165977f4e446326b875434ab7b" + name = "google.golang.org/genproto" + packages = [ + "googleapis/rpc/errdetails", + "googleapis/rpc/status", + ] + pruneopts = "NUT" + revision = "5a2fd4cab2d6d4a18e70c34937662526cd0c4bd1" + +[[projects]] + digest = "1:827de0295c937025afe1896edd20b56aa9be42840818c194cf0eee6ab7d27e43" + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "balancer/base", + "balancer/roundrobin", + "codes", + "connectivity", + "credentials", + "encoding", + "encoding/proto", + "grpclog", + "internal", + "internal/backoff", + "internal/channelz", + "internal/envconfig", + "internal/grpcrand", + "internal/transport", + "keepalive", + "metadata", + "naming", + "peer", + "reflection/grpc_reflection_v1alpha", + "resolver", + "resolver/dns", + "resolver/passthrough", + "stats", + "status", + "tap", + ] + pruneopts = "NUT" + revision = "8dea3dc473e90c8179e519d91302d0597c0ca1d1" + version = "v1.15.0" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + input-imports = [ + "github.com/golang/protobuf/proto", + "github.com/golang/protobuf/protoc-gen-go", + "github.com/golang/protobuf/ptypes/any", + "github.com/golang/protobuf/ptypes/struct", + "github.com/kazegusuri/grpcurl", + "github.com/square/goprotowrap/cmd/protowrap", + "github.com/stretchr/testify/assert", + "golang.org/x/net/context", + "google.golang.org/grpc", + ] + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/polyglot-clients/gogrpc/Gopkg.toml b/polyglot-clients/gogrpc/Gopkg.toml new file mode 100644 index 0000000000..55ddf0e9e4 --- /dev/null +++ b/polyglot-clients/gogrpc/Gopkg.toml @@ -0,0 +1,12 @@ +[prune] + go-tests = true + unused-packages = true + non-go = true + +[[constraint]] + name = "google.golang.org/grpc" + version = "1.15.0" + +[[constraint]] + name = "github.com/stretchr/testify" + version = "1.2.1" diff --git a/polyglot-clients/gogrpc/Makefile b/polyglot-clients/gogrpc/Makefile new file mode 100644 index 0000000000..74578d7e65 --- /dev/null +++ b/polyglot-clients/gogrpc/Makefile @@ -0,0 +1,36 @@ +PROTO_SRC = ../../grpc/src/main/proto +PROTO_VERSION = 3.5.1 + +BUILD_DIR = build +BIN_DIR = $(BUILD_DIR)/bin +SERVICES = \ + $(PROTO_SRC)/grpc/event_service.pb.go \ + $(PROTO_SRC)/grpc/metadata_service.pb.go \ + $(PROTO_SRC)/grpc/search.pb.go \ + $(PROTO_SRC)/grpc/task_service.pb.go \ + $(PROTO_SRC)/grpc/workflow_service.pb.go + +USER_ID := $(shell id -u) +GROUP_ID := $(shell id -g) +CONDUCTOR_ROOT = /go/src/github.com/netflix/conductor +PROTOC = docker run --rm -it \ + --user $(USER_ID):$(GROUP_ID) \ + -v '$(PWD)/../..':'$(CONDUCTOR_ROOT)' \ + -w $(CONDUCTOR_ROOT)/client/gogrpc \ + znly/protoc:0.3.0 + +proto: models $(SERVICES) + +build: + go fmt ./... + go build ./... + +test: + go test ./... + +# Helpers +$(SERVICES): %.pb.go: %.proto + $(PROTOC) -I $(PROTO_SRC) $< --go_out=plugins=grpc:/go/src + +models: + $(PROTOC) -I $(PROTO_SRC) $(PROTO_SRC)/model/*.proto --go_out=/go/src \ No newline at end of file diff --git a/polyglot-clients/gogrpc/README.md b/polyglot-clients/gogrpc/README.md new file mode 100644 index 0000000000..e57b61235d --- /dev/null +++ b/polyglot-clients/gogrpc/README.md @@ -0,0 +1,7 @@ +## Conductor: gRPC Go client generation +At the moment, the generation of the go client is manual. +In order to generate the Go gRPC client, run: +``` +make proto +``` +This should update the folder `client/gogrpc/conductor` if any changes. diff --git a/polyglot-clients/gogrpc/conductor/client.go b/polyglot-clients/gogrpc/conductor/client.go new file mode 100644 index 0000000000..264de7a861 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/client.go @@ -0,0 +1,77 @@ +package conductor + +import ( + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata" + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows" + grpc "google.golang.org/grpc" +) + +// TasksClient is a Conductor client that exposes the Conductor +// Tasks API. +type TasksClient interface { + Tasks() tasks.TaskServiceClient + Shutdown() +} + +// MetadataClient is a Conductor client that exposes the Conductor +// Metadata API. +type MetadataClient interface { + Metadata() metadata.MetadataServiceClient + Shutdown() +} + +// WorkflowsClient is a Conductor client that exposes the Conductor +// Workflows API. +type WorkflowsClient interface { + Workflows() workflows.WorkflowServiceClient + Shutdown() +} + +// Client encapsulates a GRPC connection to a Conductor server and +// the different services it exposes. +type Client struct { + conn *grpc.ClientConn + tasks tasks.TaskServiceClient + metadata metadata.MetadataServiceClient + workflows workflows.WorkflowServiceClient +} + +// NewClient returns a new Client with a GRPC connection to the given address, +// and any optional grpc.Dialoption settings. +func NewClient(address string, options ...grpc.DialOption) (*Client, error) { + conn, err := grpc.Dial(address, options...) + if err != nil { + return nil, err + } + return &Client{conn: conn}, nil +} + +// Shutdown closes the underlying GRPC connection for this client. +func (client *Client) Shutdown() { + client.conn.Close() +} + +// Tasks returns the Tasks service for this client +func (client *Client) Tasks() tasks.TaskServiceClient { + if client.tasks == nil { + client.tasks = tasks.NewTaskServiceClient(client.conn) + } + return client.tasks +} + +// Metadata returns the Metadata service for this client +func (client *Client) Metadata() metadata.MetadataServiceClient { + if client.metadata == nil { + client.metadata = metadata.NewMetadataServiceClient(client.conn) + } + return client.metadata +} + +// Workflows returns the workflows service for this client +func (client *Client) Workflows() workflows.WorkflowServiceClient { + if client.workflows == nil { + client.workflows = workflows.NewWorkflowServiceClient(client.conn) + } + return client.workflows +} diff --git a/polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go new file mode 100644 index 0000000000..6972471586 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/grpc/events/event_service.pb.go @@ -0,0 +1,991 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/event_service.proto + +package events // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/events" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AddEventHandlerRequest struct { + Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler,proto3" json:"handler,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddEventHandlerRequest) Reset() { *m = AddEventHandlerRequest{} } +func (m *AddEventHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*AddEventHandlerRequest) ProtoMessage() {} +func (*AddEventHandlerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{0} +} +func (m *AddEventHandlerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddEventHandlerRequest.Unmarshal(m, b) +} +func (m *AddEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddEventHandlerRequest.Marshal(b, m, deterministic) +} +func (dst *AddEventHandlerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddEventHandlerRequest.Merge(dst, src) +} +func (m *AddEventHandlerRequest) XXX_Size() int { + return xxx_messageInfo_AddEventHandlerRequest.Size(m) +} +func (m *AddEventHandlerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddEventHandlerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddEventHandlerRequest proto.InternalMessageInfo + +func (m *AddEventHandlerRequest) GetHandler() *model.EventHandler { + if m != nil { + return m.Handler + } + return nil +} + +type AddEventHandlerResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddEventHandlerResponse) Reset() { *m = AddEventHandlerResponse{} } +func (m *AddEventHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*AddEventHandlerResponse) ProtoMessage() {} +func (*AddEventHandlerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{1} +} +func (m *AddEventHandlerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddEventHandlerResponse.Unmarshal(m, b) +} +func (m *AddEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddEventHandlerResponse.Marshal(b, m, deterministic) +} +func (dst *AddEventHandlerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddEventHandlerResponse.Merge(dst, src) +} +func (m *AddEventHandlerResponse) XXX_Size() int { + return xxx_messageInfo_AddEventHandlerResponse.Size(m) +} +func (m *AddEventHandlerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AddEventHandlerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AddEventHandlerResponse proto.InternalMessageInfo + +type UpdateEventHandlerRequest struct { + Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler,proto3" json:"handler,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateEventHandlerRequest) Reset() { *m = UpdateEventHandlerRequest{} } +func (m *UpdateEventHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateEventHandlerRequest) ProtoMessage() {} +func (*UpdateEventHandlerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{2} +} +func (m *UpdateEventHandlerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateEventHandlerRequest.Unmarshal(m, b) +} +func (m *UpdateEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateEventHandlerRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateEventHandlerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateEventHandlerRequest.Merge(dst, src) +} +func (m *UpdateEventHandlerRequest) XXX_Size() int { + return xxx_messageInfo_UpdateEventHandlerRequest.Size(m) +} +func (m *UpdateEventHandlerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateEventHandlerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateEventHandlerRequest proto.InternalMessageInfo + +func (m *UpdateEventHandlerRequest) GetHandler() *model.EventHandler { + if m != nil { + return m.Handler + } + return nil +} + +type UpdateEventHandlerResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateEventHandlerResponse) Reset() { *m = UpdateEventHandlerResponse{} } +func (m *UpdateEventHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateEventHandlerResponse) ProtoMessage() {} +func (*UpdateEventHandlerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{3} +} +func (m *UpdateEventHandlerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateEventHandlerResponse.Unmarshal(m, b) +} +func (m *UpdateEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateEventHandlerResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateEventHandlerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateEventHandlerResponse.Merge(dst, src) +} +func (m *UpdateEventHandlerResponse) XXX_Size() int { + return xxx_messageInfo_UpdateEventHandlerResponse.Size(m) +} +func (m *UpdateEventHandlerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateEventHandlerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateEventHandlerResponse proto.InternalMessageInfo + +type RemoveEventHandlerRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveEventHandlerRequest) Reset() { *m = RemoveEventHandlerRequest{} } +func (m *RemoveEventHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveEventHandlerRequest) ProtoMessage() {} +func (*RemoveEventHandlerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{4} +} +func (m *RemoveEventHandlerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveEventHandlerRequest.Unmarshal(m, b) +} +func (m *RemoveEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveEventHandlerRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveEventHandlerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveEventHandlerRequest.Merge(dst, src) +} +func (m *RemoveEventHandlerRequest) XXX_Size() int { + return xxx_messageInfo_RemoveEventHandlerRequest.Size(m) +} +func (m *RemoveEventHandlerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveEventHandlerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveEventHandlerRequest proto.InternalMessageInfo + +func (m *RemoveEventHandlerRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type RemoveEventHandlerResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveEventHandlerResponse) Reset() { *m = RemoveEventHandlerResponse{} } +func (m *RemoveEventHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*RemoveEventHandlerResponse) ProtoMessage() {} +func (*RemoveEventHandlerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{5} +} +func (m *RemoveEventHandlerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveEventHandlerResponse.Unmarshal(m, b) +} +func (m *RemoveEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveEventHandlerResponse.Marshal(b, m, deterministic) +} +func (dst *RemoveEventHandlerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveEventHandlerResponse.Merge(dst, src) +} +func (m *RemoveEventHandlerResponse) XXX_Size() int { + return xxx_messageInfo_RemoveEventHandlerResponse.Size(m) +} +func (m *RemoveEventHandlerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveEventHandlerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveEventHandlerResponse proto.InternalMessageInfo + +type GetEventHandlersRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetEventHandlersRequest) Reset() { *m = GetEventHandlersRequest{} } +func (m *GetEventHandlersRequest) String() string { return proto.CompactTextString(m) } +func (*GetEventHandlersRequest) ProtoMessage() {} +func (*GetEventHandlersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{6} +} +func (m *GetEventHandlersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetEventHandlersRequest.Unmarshal(m, b) +} +func (m *GetEventHandlersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetEventHandlersRequest.Marshal(b, m, deterministic) +} +func (dst *GetEventHandlersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetEventHandlersRequest.Merge(dst, src) +} +func (m *GetEventHandlersRequest) XXX_Size() int { + return xxx_messageInfo_GetEventHandlersRequest.Size(m) +} +func (m *GetEventHandlersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetEventHandlersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetEventHandlersRequest proto.InternalMessageInfo + +type GetEventHandlersForEventRequest struct { + Event string `protobuf:"bytes,1,opt,name=event,proto3" json:"event,omitempty"` + ActiveOnly bool `protobuf:"varint,2,opt,name=active_only,json=activeOnly,proto3" json:"active_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetEventHandlersForEventRequest) Reset() { *m = GetEventHandlersForEventRequest{} } +func (m *GetEventHandlersForEventRequest) String() string { return proto.CompactTextString(m) } +func (*GetEventHandlersForEventRequest) ProtoMessage() {} +func (*GetEventHandlersForEventRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{7} +} +func (m *GetEventHandlersForEventRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetEventHandlersForEventRequest.Unmarshal(m, b) +} +func (m *GetEventHandlersForEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetEventHandlersForEventRequest.Marshal(b, m, deterministic) +} +func (dst *GetEventHandlersForEventRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetEventHandlersForEventRequest.Merge(dst, src) +} +func (m *GetEventHandlersForEventRequest) XXX_Size() int { + return xxx_messageInfo_GetEventHandlersForEventRequest.Size(m) +} +func (m *GetEventHandlersForEventRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetEventHandlersForEventRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetEventHandlersForEventRequest proto.InternalMessageInfo + +func (m *GetEventHandlersForEventRequest) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *GetEventHandlersForEventRequest) GetActiveOnly() bool { + if m != nil { + return m.ActiveOnly + } + return false +} + +type GetQueuesRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueuesRequest) Reset() { *m = GetQueuesRequest{} } +func (m *GetQueuesRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueuesRequest) ProtoMessage() {} +func (*GetQueuesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{8} +} +func (m *GetQueuesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueuesRequest.Unmarshal(m, b) +} +func (m *GetQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueuesRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueuesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueuesRequest.Merge(dst, src) +} +func (m *GetQueuesRequest) XXX_Size() int { + return xxx_messageInfo_GetQueuesRequest.Size(m) +} +func (m *GetQueuesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueuesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueuesRequest proto.InternalMessageInfo + +type GetQueuesResponse struct { + EventToQueueUri map[string]string `protobuf:"bytes,1,rep,name=event_to_queue_uri,json=eventToQueueUri,proto3" json:"event_to_queue_uri,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueuesResponse) Reset() { *m = GetQueuesResponse{} } +func (m *GetQueuesResponse) String() string { return proto.CompactTextString(m) } +func (*GetQueuesResponse) ProtoMessage() {} +func (*GetQueuesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{9} +} +func (m *GetQueuesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueuesResponse.Unmarshal(m, b) +} +func (m *GetQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueuesResponse.Marshal(b, m, deterministic) +} +func (dst *GetQueuesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueuesResponse.Merge(dst, src) +} +func (m *GetQueuesResponse) XXX_Size() int { + return xxx_messageInfo_GetQueuesResponse.Size(m) +} +func (m *GetQueuesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueuesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueuesResponse proto.InternalMessageInfo + +func (m *GetQueuesResponse) GetEventToQueueUri() map[string]string { + if m != nil { + return m.EventToQueueUri + } + return nil +} + +type GetQueueSizesRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueSizesRequest) Reset() { *m = GetQueueSizesRequest{} } +func (m *GetQueueSizesRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueSizesRequest) ProtoMessage() {} +func (*GetQueueSizesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{10} +} +func (m *GetQueueSizesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueSizesRequest.Unmarshal(m, b) +} +func (m *GetQueueSizesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueSizesRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueSizesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueSizesRequest.Merge(dst, src) +} +func (m *GetQueueSizesRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueSizesRequest.Size(m) +} +func (m *GetQueueSizesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueSizesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueSizesRequest proto.InternalMessageInfo + +type GetQueueSizesResponse struct { + EventToQueueInfo map[string]*GetQueueSizesResponse_QueueInfo `protobuf:"bytes,2,rep,name=event_to_queue_info,json=eventToQueueInfo,proto3" json:"event_to_queue_info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueSizesResponse) Reset() { *m = GetQueueSizesResponse{} } +func (m *GetQueueSizesResponse) String() string { return proto.CompactTextString(m) } +func (*GetQueueSizesResponse) ProtoMessage() {} +func (*GetQueueSizesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{11} +} +func (m *GetQueueSizesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueSizesResponse.Unmarshal(m, b) +} +func (m *GetQueueSizesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueSizesResponse.Marshal(b, m, deterministic) +} +func (dst *GetQueueSizesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueSizesResponse.Merge(dst, src) +} +func (m *GetQueueSizesResponse) XXX_Size() int { + return xxx_messageInfo_GetQueueSizesResponse.Size(m) +} +func (m *GetQueueSizesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueSizesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueSizesResponse proto.InternalMessageInfo + +func (m *GetQueueSizesResponse) GetEventToQueueInfo() map[string]*GetQueueSizesResponse_QueueInfo { + if m != nil { + return m.EventToQueueInfo + } + return nil +} + +type GetQueueSizesResponse_QueueInfo struct { + QueueSizes map[string]int64 `protobuf:"bytes,1,rep,name=queue_sizes,json=queueSizes,proto3" json:"queue_sizes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueSizesResponse_QueueInfo) Reset() { *m = GetQueueSizesResponse_QueueInfo{} } +func (m *GetQueueSizesResponse_QueueInfo) String() string { return proto.CompactTextString(m) } +func (*GetQueueSizesResponse_QueueInfo) ProtoMessage() {} +func (*GetQueueSizesResponse_QueueInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{11, 0} +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Unmarshal(m, b) +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Marshal(b, m, deterministic) +} +func (dst *GetQueueSizesResponse_QueueInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Merge(dst, src) +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_Size() int { + return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Size(m) +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueSizesResponse_QueueInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueSizesResponse_QueueInfo proto.InternalMessageInfo + +func (m *GetQueueSizesResponse_QueueInfo) GetQueueSizes() map[string]int64 { + if m != nil { + return m.QueueSizes + } + return nil +} + +type GetQueueProvidersRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueProvidersRequest) Reset() { *m = GetQueueProvidersRequest{} } +func (m *GetQueueProvidersRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueProvidersRequest) ProtoMessage() {} +func (*GetQueueProvidersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{12} +} +func (m *GetQueueProvidersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueProvidersRequest.Unmarshal(m, b) +} +func (m *GetQueueProvidersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueProvidersRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueProvidersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueProvidersRequest.Merge(dst, src) +} +func (m *GetQueueProvidersRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueProvidersRequest.Size(m) +} +func (m *GetQueueProvidersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueProvidersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueProvidersRequest proto.InternalMessageInfo + +type GetQueueProvidersResponse struct { + Providers []string `protobuf:"bytes,1,rep,name=providers,proto3" json:"providers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueProvidersResponse) Reset() { *m = GetQueueProvidersResponse{} } +func (m *GetQueueProvidersResponse) String() string { return proto.CompactTextString(m) } +func (*GetQueueProvidersResponse) ProtoMessage() {} +func (*GetQueueProvidersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{13} +} +func (m *GetQueueProvidersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueProvidersResponse.Unmarshal(m, b) +} +func (m *GetQueueProvidersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueProvidersResponse.Marshal(b, m, deterministic) +} +func (dst *GetQueueProvidersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueProvidersResponse.Merge(dst, src) +} +func (m *GetQueueProvidersResponse) XXX_Size() int { + return xxx_messageInfo_GetQueueProvidersResponse.Size(m) +} +func (m *GetQueueProvidersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueProvidersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueProvidersResponse proto.InternalMessageInfo + +func (m *GetQueueProvidersResponse) GetProviders() []string { + if m != nil { + return m.Providers + } + return nil +} + +func init() { + proto.RegisterType((*AddEventHandlerRequest)(nil), "conductor.grpc.events.AddEventHandlerRequest") + proto.RegisterType((*AddEventHandlerResponse)(nil), "conductor.grpc.events.AddEventHandlerResponse") + proto.RegisterType((*UpdateEventHandlerRequest)(nil), "conductor.grpc.events.UpdateEventHandlerRequest") + proto.RegisterType((*UpdateEventHandlerResponse)(nil), "conductor.grpc.events.UpdateEventHandlerResponse") + proto.RegisterType((*RemoveEventHandlerRequest)(nil), "conductor.grpc.events.RemoveEventHandlerRequest") + proto.RegisterType((*RemoveEventHandlerResponse)(nil), "conductor.grpc.events.RemoveEventHandlerResponse") + proto.RegisterType((*GetEventHandlersRequest)(nil), "conductor.grpc.events.GetEventHandlersRequest") + proto.RegisterType((*GetEventHandlersForEventRequest)(nil), "conductor.grpc.events.GetEventHandlersForEventRequest") + proto.RegisterType((*GetQueuesRequest)(nil), "conductor.grpc.events.GetQueuesRequest") + proto.RegisterType((*GetQueuesResponse)(nil), "conductor.grpc.events.GetQueuesResponse") + proto.RegisterMapType((map[string]string)(nil), "conductor.grpc.events.GetQueuesResponse.EventToQueueUriEntry") + proto.RegisterType((*GetQueueSizesRequest)(nil), "conductor.grpc.events.GetQueueSizesRequest") + proto.RegisterType((*GetQueueSizesResponse)(nil), "conductor.grpc.events.GetQueueSizesResponse") + proto.RegisterMapType((map[string]*GetQueueSizesResponse_QueueInfo)(nil), "conductor.grpc.events.GetQueueSizesResponse.EventToQueueInfoEntry") + proto.RegisterType((*GetQueueSizesResponse_QueueInfo)(nil), "conductor.grpc.events.GetQueueSizesResponse.QueueInfo") + proto.RegisterMapType((map[string]int64)(nil), "conductor.grpc.events.GetQueueSizesResponse.QueueInfo.QueueSizesEntry") + proto.RegisterType((*GetQueueProvidersRequest)(nil), "conductor.grpc.events.GetQueueProvidersRequest") + proto.RegisterType((*GetQueueProvidersResponse)(nil), "conductor.grpc.events.GetQueueProvidersResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EventServiceClient is the client API for EventService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EventServiceClient interface { + // POST / + AddEventHandler(ctx context.Context, in *AddEventHandlerRequest, opts ...grpc.CallOption) (*AddEventHandlerResponse, error) + // PUT / + UpdateEventHandler(ctx context.Context, in *UpdateEventHandlerRequest, opts ...grpc.CallOption) (*UpdateEventHandlerResponse, error) + // DELETE /{name} + RemoveEventHandler(ctx context.Context, in *RemoveEventHandlerRequest, opts ...grpc.CallOption) (*RemoveEventHandlerResponse, error) + // GET / + GetEventHandlers(ctx context.Context, in *GetEventHandlersRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersClient, error) + // GET /{name} + GetEventHandlersForEvent(ctx context.Context, in *GetEventHandlersForEventRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersForEventClient, error) + // GET /queues + GetQueues(ctx context.Context, in *GetQueuesRequest, opts ...grpc.CallOption) (*GetQueuesResponse, error) + GetQueueSizes(ctx context.Context, in *GetQueueSizesRequest, opts ...grpc.CallOption) (*GetQueueSizesResponse, error) + // GET /queues/providers + GetQueueProviders(ctx context.Context, in *GetQueueProvidersRequest, opts ...grpc.CallOption) (*GetQueueProvidersResponse, error) +} + +type eventServiceClient struct { + cc *grpc.ClientConn +} + +func NewEventServiceClient(cc *grpc.ClientConn) EventServiceClient { + return &eventServiceClient{cc} +} + +func (c *eventServiceClient) AddEventHandler(ctx context.Context, in *AddEventHandlerRequest, opts ...grpc.CallOption) (*AddEventHandlerResponse, error) { + out := new(AddEventHandlerResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/AddEventHandler", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) UpdateEventHandler(ctx context.Context, in *UpdateEventHandlerRequest, opts ...grpc.CallOption) (*UpdateEventHandlerResponse, error) { + out := new(UpdateEventHandlerResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/UpdateEventHandler", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) RemoveEventHandler(ctx context.Context, in *RemoveEventHandlerRequest, opts ...grpc.CallOption) (*RemoveEventHandlerResponse, error) { + out := new(RemoveEventHandlerResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/RemoveEventHandler", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) GetEventHandlers(ctx context.Context, in *GetEventHandlersRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersClient, error) { + stream, err := c.cc.NewStream(ctx, &_EventService_serviceDesc.Streams[0], "/conductor.grpc.events.EventService/GetEventHandlers", opts...) + if err != nil { + return nil, err + } + x := &eventServiceGetEventHandlersClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type EventService_GetEventHandlersClient interface { + Recv() (*model.EventHandler, error) + grpc.ClientStream +} + +type eventServiceGetEventHandlersClient struct { + grpc.ClientStream +} + +func (x *eventServiceGetEventHandlersClient) Recv() (*model.EventHandler, error) { + m := new(model.EventHandler) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *eventServiceClient) GetEventHandlersForEvent(ctx context.Context, in *GetEventHandlersForEventRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersForEventClient, error) { + stream, err := c.cc.NewStream(ctx, &_EventService_serviceDesc.Streams[1], "/conductor.grpc.events.EventService/GetEventHandlersForEvent", opts...) + if err != nil { + return nil, err + } + x := &eventServiceGetEventHandlersForEventClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type EventService_GetEventHandlersForEventClient interface { + Recv() (*model.EventHandler, error) + grpc.ClientStream +} + +type eventServiceGetEventHandlersForEventClient struct { + grpc.ClientStream +} + +func (x *eventServiceGetEventHandlersForEventClient) Recv() (*model.EventHandler, error) { + m := new(model.EventHandler) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *eventServiceClient) GetQueues(ctx context.Context, in *GetQueuesRequest, opts ...grpc.CallOption) (*GetQueuesResponse, error) { + out := new(GetQueuesResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueues", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) GetQueueSizes(ctx context.Context, in *GetQueueSizesRequest, opts ...grpc.CallOption) (*GetQueueSizesResponse, error) { + out := new(GetQueueSizesResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueueSizes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) GetQueueProviders(ctx context.Context, in *GetQueueProvidersRequest, opts ...grpc.CallOption) (*GetQueueProvidersResponse, error) { + out := new(GetQueueProvidersResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueueProviders", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EventServiceServer is the server API for EventService service. +type EventServiceServer interface { + // POST / + AddEventHandler(context.Context, *AddEventHandlerRequest) (*AddEventHandlerResponse, error) + // PUT / + UpdateEventHandler(context.Context, *UpdateEventHandlerRequest) (*UpdateEventHandlerResponse, error) + // DELETE /{name} + RemoveEventHandler(context.Context, *RemoveEventHandlerRequest) (*RemoveEventHandlerResponse, error) + // GET / + GetEventHandlers(*GetEventHandlersRequest, EventService_GetEventHandlersServer) error + // GET /{name} + GetEventHandlersForEvent(*GetEventHandlersForEventRequest, EventService_GetEventHandlersForEventServer) error + // GET /queues + GetQueues(context.Context, *GetQueuesRequest) (*GetQueuesResponse, error) + GetQueueSizes(context.Context, *GetQueueSizesRequest) (*GetQueueSizesResponse, error) + // GET /queues/providers + GetQueueProviders(context.Context, *GetQueueProvidersRequest) (*GetQueueProvidersResponse, error) +} + +func RegisterEventServiceServer(s *grpc.Server, srv EventServiceServer) { + s.RegisterService(&_EventService_serviceDesc, srv) +} + +func _EventService_AddEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddEventHandlerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).AddEventHandler(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/AddEventHandler", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).AddEventHandler(ctx, req.(*AddEventHandlerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_UpdateEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateEventHandlerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).UpdateEventHandler(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/UpdateEventHandler", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).UpdateEventHandler(ctx, req.(*UpdateEventHandlerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_RemoveEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveEventHandlerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).RemoveEventHandler(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/RemoveEventHandler", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).RemoveEventHandler(ctx, req.(*RemoveEventHandlerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_GetEventHandlers_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetEventHandlersRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(EventServiceServer).GetEventHandlers(m, &eventServiceGetEventHandlersServer{stream}) +} + +type EventService_GetEventHandlersServer interface { + Send(*model.EventHandler) error + grpc.ServerStream +} + +type eventServiceGetEventHandlersServer struct { + grpc.ServerStream +} + +func (x *eventServiceGetEventHandlersServer) Send(m *model.EventHandler) error { + return x.ServerStream.SendMsg(m) +} + +func _EventService_GetEventHandlersForEvent_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetEventHandlersForEventRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(EventServiceServer).GetEventHandlersForEvent(m, &eventServiceGetEventHandlersForEventServer{stream}) +} + +type EventService_GetEventHandlersForEventServer interface { + Send(*model.EventHandler) error + grpc.ServerStream +} + +type eventServiceGetEventHandlersForEventServer struct { + grpc.ServerStream +} + +func (x *eventServiceGetEventHandlersForEventServer) Send(m *model.EventHandler) error { + return x.ServerStream.SendMsg(m) +} + +func _EventService_GetQueues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueuesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).GetQueues(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/GetQueues", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).GetQueues(ctx, req.(*GetQueuesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_GetQueueSizes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueSizesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).GetQueueSizes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/GetQueueSizes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).GetQueueSizes(ctx, req.(*GetQueueSizesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_GetQueueProviders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueProvidersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).GetQueueProviders(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/GetQueueProviders", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).GetQueueProviders(ctx, req.(*GetQueueProvidersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _EventService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.events.EventService", + HandlerType: (*EventServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AddEventHandler", + Handler: _EventService_AddEventHandler_Handler, + }, + { + MethodName: "UpdateEventHandler", + Handler: _EventService_UpdateEventHandler_Handler, + }, + { + MethodName: "RemoveEventHandler", + Handler: _EventService_RemoveEventHandler_Handler, + }, + { + MethodName: "GetQueues", + Handler: _EventService_GetQueues_Handler, + }, + { + MethodName: "GetQueueSizes", + Handler: _EventService_GetQueueSizes_Handler, + }, + { + MethodName: "GetQueueProviders", + Handler: _EventService_GetQueueProviders_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetEventHandlers", + Handler: _EventService_GetEventHandlers_Handler, + ServerStreams: true, + }, + { + StreamName: "GetEventHandlersForEvent", + Handler: _EventService_GetEventHandlersForEvent_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc/event_service.proto", +} + +func init() { + proto.RegisterFile("grpc/event_service.proto", fileDescriptor_event_service_913a1fde08d4f277) +} + +var fileDescriptor_event_service_913a1fde08d4f277 = []byte{ + // 687 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5d, 0x6f, 0xd3, 0x3c, + 0x18, 0x55, 0xd6, 0xf7, 0x65, 0xf4, 0x29, 0xb0, 0x61, 0xf6, 0x91, 0x5a, 0x43, 0x9b, 0x7a, 0x43, + 0x25, 0xc0, 0x19, 0x45, 0x1a, 0x1f, 0xd2, 0x24, 0xa8, 0xb4, 0x0d, 0x24, 0x24, 0xb6, 0x6c, 0x93, + 0x10, 0x17, 0x54, 0x5d, 0xe2, 0x75, 0x19, 0xa9, 0xdd, 0x26, 0x4e, 0x44, 0xe1, 0x6f, 0xf0, 0x4f, + 0xb8, 0xe6, 0x8a, 0x3f, 0x86, 0x62, 0xa7, 0x49, 0x9a, 0x26, 0xb4, 0x45, 0xdc, 0xd5, 0xcf, 0xc7, + 0x39, 0x3e, 0xb6, 0x9f, 0xd3, 0x80, 0xde, 0xf3, 0x06, 0x96, 0x41, 0x43, 0xca, 0x44, 0xc7, 0xa7, + 0x5e, 0xe8, 0x58, 0x94, 0x0c, 0x3c, 0x2e, 0x38, 0x5a, 0xb7, 0x38, 0xb3, 0x03, 0x4b, 0x70, 0x8f, + 0x44, 0x35, 0x44, 0xd6, 0xf8, 0x58, 0xef, 0x73, 0x9b, 0xba, 0xaa, 0xe3, 0xaa, 0xcb, 0x6c, 0x97, + 0x7a, 0xaa, 0xa1, 0x71, 0x02, 0x1b, 0xaf, 0x6d, 0xfb, 0x20, 0x4a, 0xbc, 0x51, 0x09, 0x93, 0x0e, + 0x03, 0xea, 0x0b, 0xf4, 0x0c, 0x96, 0xe3, 0x52, 0x5d, 0xdb, 0xd1, 0x9a, 0xb5, 0xd6, 0x7d, 0x92, + 0x82, 0xcb, 0x66, 0x32, 0xd1, 0x36, 0xae, 0x6e, 0xd4, 0x61, 0x73, 0x0a, 0xd2, 0x1f, 0x70, 0xe6, + 0xd3, 0xc6, 0x19, 0xd4, 0xcf, 0x07, 0x76, 0x57, 0xd0, 0x7f, 0x4a, 0xb8, 0x05, 0xb8, 0x08, 0x35, + 0xe6, 0x34, 0xa0, 0x6e, 0xd2, 0x3e, 0x0f, 0x0b, 0x39, 0x11, 0xfc, 0xc7, 0xba, 0x7d, 0x2a, 0x09, + 0xab, 0xa6, 0xfc, 0x1d, 0xc1, 0x15, 0x35, 0xc4, 0x70, 0x75, 0xd8, 0x3c, 0xa2, 0x22, 0x9b, 0xf2, + 0x63, 0xb0, 0xc6, 0x07, 0xd8, 0xce, 0xa7, 0x0e, 0xb9, 0x27, 0xd7, 0x63, 0xbe, 0x35, 0xf8, 0x5f, + 0x5e, 0x42, 0x4c, 0xa8, 0x16, 0x68, 0x1b, 0x6a, 0x5d, 0x4b, 0x38, 0x21, 0xed, 0x70, 0xe6, 0x8e, + 0xf4, 0xa5, 0x1d, 0xad, 0x79, 0xd3, 0x04, 0x15, 0x7a, 0xcf, 0xdc, 0x51, 0x03, 0xc1, 0xea, 0x11, + 0x15, 0x27, 0x01, 0x0d, 0x68, 0xc2, 0xf6, 0x4b, 0x83, 0xbb, 0x99, 0xa0, 0xda, 0x1e, 0xba, 0x06, + 0xa4, 0xde, 0x85, 0xe0, 0x9d, 0x61, 0x94, 0xea, 0x04, 0x9e, 0xa3, 0x6b, 0x3b, 0x95, 0x66, 0xad, + 0xb5, 0x4f, 0x0a, 0x5f, 0x07, 0x99, 0x42, 0x51, 0xe7, 0x7c, 0xc6, 0x65, 0xf4, 0xdc, 0x73, 0x0e, + 0x98, 0xf0, 0x46, 0xe6, 0x0a, 0x9d, 0x8c, 0xe2, 0x36, 0xac, 0x15, 0x15, 0xa2, 0x55, 0xa8, 0x7c, + 0xa6, 0xa3, 0x58, 0x62, 0xf4, 0x33, 0x92, 0x1d, 0x76, 0xdd, 0x80, 0x4a, 0x69, 0x55, 0x53, 0x2d, + 0x5e, 0x2e, 0x3d, 0xd7, 0x1a, 0x1b, 0xb0, 0x36, 0xa6, 0x3f, 0x75, 0xbe, 0xa6, 0xea, 0x7e, 0x56, + 0x60, 0x3d, 0x97, 0x88, 0x15, 0x0e, 0xe1, 0x5e, 0x4e, 0xa1, 0xc3, 0x2e, 0xb9, 0xbe, 0x24, 0x25, + 0xb6, 0x67, 0x48, 0x9c, 0x80, 0x9a, 0x90, 0xf9, 0x96, 0x5d, 0x72, 0xa5, 0x73, 0x95, 0xe6, 0xc2, + 0xf8, 0x87, 0x06, 0xd5, 0x64, 0x85, 0x7a, 0x50, 0x53, 0xbc, 0x7e, 0x04, 0x16, 0x9f, 0xed, 0xe1, + 0x42, 0xc4, 0x09, 0x18, 0x49, 0x93, 0x8a, 0x1c, 0x86, 0x49, 0x00, 0xef, 0xc3, 0x4a, 0x2e, 0x3d, + 0xeb, 0x68, 0x2b, 0x99, 0xa3, 0xc5, 0xdf, 0x60, 0xbd, 0x50, 0x60, 0x01, 0xc8, 0xbb, 0x2c, 0x48, + 0xad, 0xb5, 0xf7, 0x77, 0x62, 0xb2, 0xf7, 0x8a, 0x41, 0x1f, 0x57, 0x1f, 0x7b, 0x3c, 0x74, 0xec, + 0xcc, 0x9c, 0xbc, 0x80, 0x7a, 0x41, 0x2e, 0xbe, 0xde, 0x2d, 0xa8, 0x0e, 0xc6, 0x41, 0x79, 0xb6, + 0x55, 0x33, 0x0d, 0xb4, 0xbe, 0x2f, 0xc3, 0x2d, 0x29, 0xea, 0x54, 0xd9, 0x1e, 0x1a, 0xc0, 0x4a, + 0xce, 0x6c, 0xd0, 0xe3, 0x92, 0xdd, 0x17, 0xfb, 0x1c, 0x26, 0xf3, 0x96, 0xc7, 0x1b, 0x1c, 0x01, + 0x9a, 0x76, 0x1b, 0xb4, 0x5b, 0x82, 0x52, 0x6a, 0x77, 0xf8, 0xc9, 0x02, 0x1d, 0x29, 0xf5, 0xb4, + 0x33, 0x95, 0x52, 0x97, 0xba, 0x5e, 0x29, 0x75, 0xb9, 0xed, 0x21, 0x4b, 0x3a, 0xd0, 0x84, 0xb7, + 0x21, 0x52, 0xfe, 0x4c, 0x8a, 0xfc, 0x11, 0xff, 0xd9, 0xcf, 0x77, 0x35, 0xe4, 0xcb, 0x47, 0x53, + 0x68, 0xa0, 0x68, 0x6f, 0x4e, 0xb2, 0x9c, 0xe3, 0xce, 0x26, 0xfd, 0x04, 0xd5, 0xc4, 0x00, 0xd1, + 0x83, 0xd9, 0x16, 0xa9, 0x60, 0x9b, 0xf3, 0x7a, 0x29, 0xba, 0x86, 0xdb, 0x13, 0x73, 0x83, 0x1e, + 0xce, 0x37, 0x5d, 0x8a, 0xe7, 0xd1, 0x22, 0xa3, 0x88, 0xc2, 0xf4, 0x2f, 0x21, 0x99, 0x2c, 0x64, + 0xcc, 0x80, 0xc8, 0xcf, 0x27, 0xde, 0x9d, 0xbf, 0x41, 0xf1, 0xb6, 0x19, 0x60, 0x8b, 0xf7, 0x09, + 0xa3, 0xe2, 0xd2, 0x75, 0xbe, 0xe4, 0xda, 0xdb, 0x77, 0xb2, 0x13, 0x7b, 0x7c, 0xf1, 0xf1, 0x55, + 0xcf, 0x11, 0x57, 0xc1, 0x05, 0xb1, 0x78, 0xdf, 0x88, 0x5b, 0x8c, 0xa4, 0xc5, 0xb0, 0x5c, 0x87, + 0x32, 0x61, 0xf4, 0xb8, 0xfc, 0xc8, 0x49, 0xe3, 0xe9, 0x37, 0x8f, 0x7f, 0x71, 0x43, 0xde, 0xe4, + 0xd3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x65, 0xa5, 0x44, 0x1b, 0x09, 0x09, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go new file mode 100644 index 0000000000..daf353165f --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/grpc/metadata/metadata_service.pb.go @@ -0,0 +1,867 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/metadata_service.proto + +package metadata // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type CreateWorkflowRequest struct { + Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWorkflowRequest) Reset() { *m = CreateWorkflowRequest{} } +func (m *CreateWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*CreateWorkflowRequest) ProtoMessage() {} +func (*CreateWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{0} +} +func (m *CreateWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWorkflowRequest.Unmarshal(m, b) +} +func (m *CreateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *CreateWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWorkflowRequest.Merge(dst, src) +} +func (m *CreateWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_CreateWorkflowRequest.Size(m) +} +func (m *CreateWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWorkflowRequest proto.InternalMessageInfo + +func (m *CreateWorkflowRequest) GetWorkflow() *model.WorkflowDef { + if m != nil { + return m.Workflow + } + return nil +} + +type CreateWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWorkflowResponse) Reset() { *m = CreateWorkflowResponse{} } +func (m *CreateWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*CreateWorkflowResponse) ProtoMessage() {} +func (*CreateWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{1} +} +func (m *CreateWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWorkflowResponse.Unmarshal(m, b) +} +func (m *CreateWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *CreateWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWorkflowResponse.Merge(dst, src) +} +func (m *CreateWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_CreateWorkflowResponse.Size(m) +} +func (m *CreateWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWorkflowResponse proto.InternalMessageInfo + +type UpdateWorkflowsRequest struct { + Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs,proto3" json:"defs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowsRequest) Reset() { *m = UpdateWorkflowsRequest{} } +func (m *UpdateWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowsRequest) ProtoMessage() {} +func (*UpdateWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{2} +} +func (m *UpdateWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowsRequest.Unmarshal(m, b) +} +func (m *UpdateWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowsRequest.Merge(dst, src) +} +func (m *UpdateWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowsRequest.Size(m) +} +func (m *UpdateWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowsRequest proto.InternalMessageInfo + +func (m *UpdateWorkflowsRequest) GetDefs() []*model.WorkflowDef { + if m != nil { + return m.Defs + } + return nil +} + +type UpdateWorkflowsResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowsResponse) Reset() { *m = UpdateWorkflowsResponse{} } +func (m *UpdateWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowsResponse) ProtoMessage() {} +func (*UpdateWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{3} +} +func (m *UpdateWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowsResponse.Unmarshal(m, b) +} +func (m *UpdateWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowsResponse.Merge(dst, src) +} +func (m *UpdateWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowsResponse.Size(m) +} +func (m *UpdateWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowsResponse proto.InternalMessageInfo + +type GetWorkflowRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowRequest) Reset() { *m = GetWorkflowRequest{} } +func (m *GetWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowRequest) ProtoMessage() {} +func (*GetWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{4} +} +func (m *GetWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowRequest.Unmarshal(m, b) +} +func (m *GetWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowRequest.Merge(dst, src) +} +func (m *GetWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowRequest.Size(m) +} +func (m *GetWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowRequest proto.InternalMessageInfo + +func (m *GetWorkflowRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +type GetWorkflowResponse struct { + Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowResponse) Reset() { *m = GetWorkflowResponse{} } +func (m *GetWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowResponse) ProtoMessage() {} +func (*GetWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{5} +} +func (m *GetWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowResponse.Unmarshal(m, b) +} +func (m *GetWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowResponse.Merge(dst, src) +} +func (m *GetWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowResponse.Size(m) +} +func (m *GetWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowResponse proto.InternalMessageInfo + +func (m *GetWorkflowResponse) GetWorkflow() *model.WorkflowDef { + if m != nil { + return m.Workflow + } + return nil +} + +type CreateTasksRequest struct { + Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs,proto3" json:"defs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTasksRequest) Reset() { *m = CreateTasksRequest{} } +func (m *CreateTasksRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTasksRequest) ProtoMessage() {} +func (*CreateTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{6} +} +func (m *CreateTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTasksRequest.Unmarshal(m, b) +} +func (m *CreateTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTasksRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTasksRequest.Merge(dst, src) +} +func (m *CreateTasksRequest) XXX_Size() int { + return xxx_messageInfo_CreateTasksRequest.Size(m) +} +func (m *CreateTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTasksRequest proto.InternalMessageInfo + +func (m *CreateTasksRequest) GetDefs() []*model.TaskDef { + if m != nil { + return m.Defs + } + return nil +} + +type CreateTasksResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTasksResponse) Reset() { *m = CreateTasksResponse{} } +func (m *CreateTasksResponse) String() string { return proto.CompactTextString(m) } +func (*CreateTasksResponse) ProtoMessage() {} +func (*CreateTasksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{7} +} +func (m *CreateTasksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTasksResponse.Unmarshal(m, b) +} +func (m *CreateTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTasksResponse.Marshal(b, m, deterministic) +} +func (dst *CreateTasksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTasksResponse.Merge(dst, src) +} +func (m *CreateTasksResponse) XXX_Size() int { + return xxx_messageInfo_CreateTasksResponse.Size(m) +} +func (m *CreateTasksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTasksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTasksResponse proto.InternalMessageInfo + +type UpdateTaskRequest struct { + Task *model.TaskDef `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } +func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskRequest) ProtoMessage() {} +func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{8} +} +func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) +} +func (m *UpdateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskRequest.Merge(dst, src) +} +func (m *UpdateTaskRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTaskRequest.Size(m) +} +func (m *UpdateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskRequest proto.InternalMessageInfo + +func (m *UpdateTaskRequest) GetTask() *model.TaskDef { + if m != nil { + return m.Task + } + return nil +} + +type UpdateTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } +func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskResponse) ProtoMessage() {} +func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{9} +} +func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) +} +func (m *UpdateTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskResponse.Merge(dst, src) +} +func (m *UpdateTaskResponse) XXX_Size() int { + return xxx_messageInfo_UpdateTaskResponse.Size(m) +} +func (m *UpdateTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo + +type GetTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{10} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +type GetTaskResponse struct { + Task *model.TaskDef `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } +func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskResponse) ProtoMessage() {} +func (*GetTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{11} +} +func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) +} +func (m *GetTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskResponse.Marshal(b, m, deterministic) +} +func (dst *GetTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskResponse.Merge(dst, src) +} +func (m *GetTaskResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskResponse.Size(m) +} +func (m *GetTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskResponse proto.InternalMessageInfo + +func (m *GetTaskResponse) GetTask() *model.TaskDef { + if m != nil { + return m.Task + } + return nil +} + +type DeleteTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } +func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskRequest) ProtoMessage() {} +func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{12} +} +func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) +} +func (m *DeleteTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskRequest.Merge(dst, src) +} +func (m *DeleteTaskRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTaskRequest.Size(m) +} +func (m *DeleteTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskRequest proto.InternalMessageInfo + +func (m *DeleteTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +type DeleteTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskResponse) Reset() { *m = DeleteTaskResponse{} } +func (m *DeleteTaskResponse) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskResponse) ProtoMessage() {} +func (*DeleteTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{13} +} +func (m *DeleteTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskResponse.Unmarshal(m, b) +} +func (m *DeleteTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskResponse.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskResponse.Merge(dst, src) +} +func (m *DeleteTaskResponse) XXX_Size() int { + return xxx_messageInfo_DeleteTaskResponse.Size(m) +} +func (m *DeleteTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CreateWorkflowRequest)(nil), "conductor.grpc.metadata.CreateWorkflowRequest") + proto.RegisterType((*CreateWorkflowResponse)(nil), "conductor.grpc.metadata.CreateWorkflowResponse") + proto.RegisterType((*UpdateWorkflowsRequest)(nil), "conductor.grpc.metadata.UpdateWorkflowsRequest") + proto.RegisterType((*UpdateWorkflowsResponse)(nil), "conductor.grpc.metadata.UpdateWorkflowsResponse") + proto.RegisterType((*GetWorkflowRequest)(nil), "conductor.grpc.metadata.GetWorkflowRequest") + proto.RegisterType((*GetWorkflowResponse)(nil), "conductor.grpc.metadata.GetWorkflowResponse") + proto.RegisterType((*CreateTasksRequest)(nil), "conductor.grpc.metadata.CreateTasksRequest") + proto.RegisterType((*CreateTasksResponse)(nil), "conductor.grpc.metadata.CreateTasksResponse") + proto.RegisterType((*UpdateTaskRequest)(nil), "conductor.grpc.metadata.UpdateTaskRequest") + proto.RegisterType((*UpdateTaskResponse)(nil), "conductor.grpc.metadata.UpdateTaskResponse") + proto.RegisterType((*GetTaskRequest)(nil), "conductor.grpc.metadata.GetTaskRequest") + proto.RegisterType((*GetTaskResponse)(nil), "conductor.grpc.metadata.GetTaskResponse") + proto.RegisterType((*DeleteTaskRequest)(nil), "conductor.grpc.metadata.DeleteTaskRequest") + proto.RegisterType((*DeleteTaskResponse)(nil), "conductor.grpc.metadata.DeleteTaskResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MetadataServiceClient is the client API for MetadataService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MetadataServiceClient interface { + // POST /workflow + CreateWorkflow(ctx context.Context, in *CreateWorkflowRequest, opts ...grpc.CallOption) (*CreateWorkflowResponse, error) + // PUT /workflow + UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*UpdateWorkflowsResponse, error) + // GET /workflow/{name} + GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*GetWorkflowResponse, error) + // POST /taskdefs + CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*CreateTasksResponse, error) + // PUT /taskdefs + UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) + // GET /taskdefs/{tasktype} + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) + // DELETE /taskdefs/{tasktype} + DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*DeleteTaskResponse, error) +} + +type metadataServiceClient struct { + cc *grpc.ClientConn +} + +func NewMetadataServiceClient(cc *grpc.ClientConn) MetadataServiceClient { + return &metadataServiceClient{cc} +} + +func (c *metadataServiceClient) CreateWorkflow(ctx context.Context, in *CreateWorkflowRequest, opts ...grpc.CallOption) (*CreateWorkflowResponse, error) { + out := new(CreateWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/CreateWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*UpdateWorkflowsResponse, error) { + out := new(UpdateWorkflowsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/UpdateWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*GetWorkflowResponse, error) { + out := new(GetWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/GetWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*CreateTasksResponse, error) { + out := new(CreateTasksResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/CreateTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) { + out := new(UpdateTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/UpdateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) { + out := new(GetTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*DeleteTaskResponse, error) { + out := new(DeleteTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/DeleteTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MetadataServiceServer is the server API for MetadataService service. +type MetadataServiceServer interface { + // POST /workflow + CreateWorkflow(context.Context, *CreateWorkflowRequest) (*CreateWorkflowResponse, error) + // PUT /workflow + UpdateWorkflows(context.Context, *UpdateWorkflowsRequest) (*UpdateWorkflowsResponse, error) + // GET /workflow/{name} + GetWorkflow(context.Context, *GetWorkflowRequest) (*GetWorkflowResponse, error) + // POST /taskdefs + CreateTasks(context.Context, *CreateTasksRequest) (*CreateTasksResponse, error) + // PUT /taskdefs + UpdateTask(context.Context, *UpdateTaskRequest) (*UpdateTaskResponse, error) + // GET /taskdefs/{tasktype} + GetTask(context.Context, *GetTaskRequest) (*GetTaskResponse, error) + // DELETE /taskdefs/{tasktype} + DeleteTask(context.Context, *DeleteTaskRequest) (*DeleteTaskResponse, error) +} + +func RegisterMetadataServiceServer(s *grpc.Server, srv MetadataServiceServer) { + s.RegisterService(&_MetadataService_serviceDesc, srv) +} + +func _MetadataService_CreateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).CreateWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/CreateWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).CreateWorkflow(ctx, req.(*CreateWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_UpdateWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).UpdateWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/UpdateWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).UpdateWorkflows(ctx, req.(*UpdateWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).GetWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/GetWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).GetWorkflow(ctx, req.(*GetWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_CreateTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).CreateTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/CreateTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).CreateTasks(ctx, req.(*CreateTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/UpdateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).DeleteTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/DeleteTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).DeleteTask(ctx, req.(*DeleteTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MetadataService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.metadata.MetadataService", + HandlerType: (*MetadataServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateWorkflow", + Handler: _MetadataService_CreateWorkflow_Handler, + }, + { + MethodName: "UpdateWorkflows", + Handler: _MetadataService_UpdateWorkflows_Handler, + }, + { + MethodName: "GetWorkflow", + Handler: _MetadataService_GetWorkflow_Handler, + }, + { + MethodName: "CreateTasks", + Handler: _MetadataService_CreateTasks_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _MetadataService_UpdateTask_Handler, + }, + { + MethodName: "GetTask", + Handler: _MetadataService_GetTask_Handler, + }, + { + MethodName: "DeleteTask", + Handler: _MetadataService_DeleteTask_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/metadata_service.proto", +} + +func init() { + proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_4778cc9d199e5aef) +} + +var fileDescriptor_metadata_service_4778cc9d199e5aef = []byte{ + // 526 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdf, 0x6b, 0xd3, 0x50, + 0x18, 0xa5, 0xba, 0xb9, 0xed, 0x1b, 0xac, 0xf4, 0x76, 0x5b, 0x63, 0xe6, 0x43, 0xc9, 0x8b, 0xc5, + 0xcd, 0x9b, 0x32, 0x5f, 0x7c, 0x13, 0xe3, 0x60, 0x20, 0x88, 0x5a, 0x27, 0x82, 0x08, 0x23, 0x4d, + 0xbe, 0x74, 0xa1, 0x49, 0x6e, 0x96, 0x7b, 0xbb, 0xd9, 0x7f, 0xdd, 0x27, 0x49, 0x72, 0xf3, 0xbb, + 0x4d, 0x2b, 0xbe, 0xb5, 0xf7, 0x9e, 0xef, 0x9c, 0x9c, 0x8f, 0x73, 0xb8, 0x70, 0x36, 0x8b, 0x42, + 0x4b, 0xf7, 0x51, 0x98, 0xb6, 0x29, 0xcc, 0x5b, 0x8e, 0xd1, 0x83, 0x6b, 0x21, 0x0d, 0x23, 0x26, + 0x18, 0x19, 0x58, 0x2c, 0xb0, 0x17, 0x96, 0x60, 0x11, 0x8d, 0x61, 0x34, 0x83, 0xa9, 0x7d, 0x9f, + 0xd9, 0xe8, 0xe9, 0xc2, 0xe4, 0x73, 0x1b, 0x9d, 0x14, 0xad, 0x0e, 0xd2, 0xc3, 0x47, 0x16, 0xcd, + 0x1d, 0x8f, 0x3d, 0xe6, 0x17, 0xda, 0x57, 0x38, 0xf9, 0x10, 0xa1, 0x29, 0xf0, 0x87, 0xbc, 0x9a, + 0xe0, 0xfd, 0x02, 0xb9, 0x20, 0x6f, 0x61, 0x3f, 0x43, 0x2b, 0x9d, 0x61, 0x67, 0x74, 0x78, 0xf9, + 0x82, 0x16, 0x92, 0xc9, 0x30, 0xcd, 0x66, 0xae, 0xd0, 0x99, 0xe4, 0x68, 0x4d, 0x81, 0xd3, 0x3a, + 0x25, 0x0f, 0x59, 0xc0, 0x51, 0xfb, 0x08, 0xa7, 0xdf, 0x43, 0xbb, 0x74, 0xc3, 0x33, 0xb5, 0x31, + 0xec, 0xd8, 0xe8, 0x70, 0xa5, 0x33, 0x7c, 0xba, 0x51, 0x29, 0x41, 0x6a, 0xcf, 0x61, 0xd0, 0xe0, + 0x92, 0x32, 0x06, 0x90, 0x6b, 0x14, 0x75, 0x43, 0x04, 0x76, 0x02, 0xd3, 0xc7, 0xc4, 0xcc, 0xc1, + 0x24, 0xf9, 0x4d, 0x14, 0xd8, 0x7b, 0xc0, 0x88, 0xbb, 0x2c, 0x50, 0x9e, 0x0c, 0x3b, 0xa3, 0xdd, + 0x49, 0xf6, 0x57, 0xfb, 0x0c, 0xfd, 0x0a, 0x47, 0x4a, 0xfd, 0x1f, 0x5b, 0x31, 0x80, 0xa4, 0x5b, + 0xb9, 0x31, 0xf9, 0x3c, 0xf7, 0x7d, 0x51, 0xf1, 0xad, 0x34, 0xb8, 0x62, 0x70, 0xe1, 0xf9, 0x04, + 0xfa, 0x15, 0x0e, 0xe9, 0xf7, 0x3d, 0xf4, 0xd2, 0x55, 0xc4, 0xc7, 0x25, 0xe6, 0x38, 0x02, 0xf2, + 0x2b, 0x5b, 0x98, 0x63, 0x94, 0x76, 0x0c, 0xa4, 0x4c, 0x21, 0x89, 0x5f, 0xc3, 0xd1, 0x35, 0x8a, + 0x32, 0xeb, 0x19, 0x1c, 0xc4, 0xf8, 0x5b, 0xb1, 0x0c, 0xb3, 0x4d, 0xee, 0xc7, 0x07, 0x37, 0xcb, + 0x10, 0xb5, 0x77, 0xd0, 0xcd, 0xe1, 0x72, 0x5f, 0xff, 0xf6, 0x15, 0x63, 0xe8, 0x5d, 0xa1, 0x87, + 0x55, 0x23, 0xad, 0x92, 0xc7, 0x40, 0xca, 0x13, 0xa9, 0xea, 0xe5, 0x9f, 0x5d, 0xe8, 0x7e, 0x92, + 0x7d, 0xf8, 0x96, 0xb6, 0x86, 0xdc, 0xc3, 0x51, 0x35, 0x95, 0x84, 0xd2, 0x35, 0x15, 0xa2, 0x2b, + 0x1b, 0xa1, 0xea, 0x5b, 0xe3, 0xa5, 0x79, 0x01, 0xdd, 0x5a, 0x44, 0xc9, 0x7a, 0x8e, 0xd5, 0xc5, + 0x50, 0xc7, 0xdb, 0x0f, 0x48, 0xd5, 0x3b, 0x38, 0x2c, 0x25, 0x97, 0x9c, 0xaf, 0x25, 0x68, 0x76, + 0x44, 0xbd, 0xd8, 0x0e, 0x5c, 0x28, 0x95, 0xe2, 0xd8, 0xa2, 0xd4, 0x0c, 0x7e, 0x8b, 0xd2, 0x8a, + 0x84, 0x13, 0x04, 0x28, 0xe2, 0x49, 0x5e, 0x6d, 0xd8, 0x49, 0x29, 0x3d, 0xea, 0xf9, 0x56, 0x58, + 0x29, 0xf3, 0x0b, 0xf6, 0x64, 0x80, 0xc9, 0xcb, 0xb6, 0x4d, 0x94, 0x05, 0x46, 0x9b, 0x81, 0x85, + 0x89, 0x22, 0xab, 0x2d, 0x26, 0x1a, 0x15, 0x68, 0x31, 0xd1, 0x0c, 0xbf, 0xc1, 0x41, 0xb5, 0x98, + 0x4f, 0x03, 0x14, 0x8e, 0xe7, 0xfe, 0xae, 0x4d, 0x1a, 0xbd, 0x5a, 0x2f, 0xbe, 0x4c, 0x7f, 0x1a, + 0x33, 0x57, 0xdc, 0x2d, 0xa6, 0xd4, 0x62, 0xbe, 0x2e, 0xa7, 0xf4, 0x7c, 0x4a, 0xb7, 0x3c, 0x17, + 0x03, 0xa1, 0xcf, 0x58, 0xf2, 0x18, 0x15, 0xe7, 0x95, 0xb7, 0x69, 0xfa, 0x2c, 0xa9, 0xf3, 0x9b, + 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5f, 0x81, 0xa1, 0x07, 0xb3, 0x06, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go b/polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go new file mode 100644 index 0000000000..2a5710156d --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/grpc/search/search.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/search.proto + +package search // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Request struct { + Start int32 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + Size int32 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` + Sort string `protobuf:"bytes,3,opt,name=sort,proto3" json:"sort,omitempty"` + FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText,proto3" json:"free_text,omitempty"` + Query string `protobuf:"bytes,5,opt,name=query,proto3" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_search_855a611014aa2143, []int{0} +} +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (dst *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(dst, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo + +func (m *Request) GetStart() int32 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Request) GetSize() int32 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *Request) GetSort() string { + if m != nil { + return m.Sort + } + return "" +} + +func (m *Request) GetFreeText() string { + if m != nil { + return m.FreeText + } + return "" +} + +func (m *Request) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func init() { + proto.RegisterType((*Request)(nil), "conductor.grpc.search.Request") +} + +func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_855a611014aa2143) } + +var fileDescriptor_search_855a611014aa2143 = []byte{ + // 212 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xc1, 0x4a, 0xc4, 0x30, + 0x10, 0x86, 0xa9, 0x6e, 0x75, 0x37, 0x37, 0x83, 0x42, 0xd0, 0xcb, 0xe2, 0x69, 0x4f, 0xc9, 0xc1, + 0x17, 0x90, 0x7d, 0x02, 0xa9, 0x9e, 0xbc, 0xc8, 0x36, 0x4e, 0xdb, 0x40, 0xdb, 0x69, 0x27, 0x13, + 0xa8, 0x7d, 0x7a, 0xe9, 0x54, 0xd4, 0xbd, 0xcd, 0x7c, 0xdf, 0x10, 0xfe, 0xfc, 0xea, 0xa6, 0xa6, + 0xc1, 0xbb, 0x08, 0x27, 0xf2, 0x8d, 0x1d, 0x08, 0x19, 0xf5, 0x9d, 0xc7, 0xfe, 0x33, 0x79, 0x46, + 0xb2, 0x8b, 0xb4, 0xab, 0x7c, 0x9c, 0xd5, 0x75, 0x01, 0x63, 0x82, 0xc8, 0xfa, 0x56, 0xe5, 0x91, + 0x4f, 0xc4, 0x26, 0xdb, 0x67, 0x87, 0xbc, 0x58, 0x17, 0xad, 0xd5, 0x26, 0x86, 0x19, 0xcc, 0x85, + 0x40, 0x99, 0x85, 0x21, 0xb1, 0xb9, 0xdc, 0x67, 0x87, 0x5d, 0x21, 0xb3, 0x7e, 0x50, 0xbb, 0x8a, + 0x00, 0x3e, 0x18, 0x26, 0x36, 0x1b, 0x11, 0xdb, 0x05, 0xbc, 0xc1, 0x24, 0x4f, 0x8f, 0x09, 0xe8, + 0xcb, 0xe4, 0x22, 0xd6, 0xe5, 0xd8, 0xa8, 0x7b, 0x8f, 0x9d, 0xed, 0x81, 0xab, 0x36, 0x4c, 0xf6, + 0x3c, 0xe0, 0x71, 0xfb, 0x2a, 0x09, 0x5f, 0xca, 0xf7, 0xe7, 0x3a, 0x70, 0x93, 0x4a, 0xeb, 0xb1, + 0x73, 0x3f, 0xc7, 0xee, 0xf7, 0xd8, 0xf9, 0x36, 0x40, 0xcf, 0xae, 0x46, 0xf9, 0xf3, 0x1f, 0xff, + 0x57, 0x41, 0x79, 0x25, 0x1d, 0x3c, 0x7d, 0x07, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x4d, 0x39, 0xe7, + 0x18, 0x01, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go new file mode 100644 index 0000000000..eabcf3ae1a --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/grpc/tasks/task_service.pb.go @@ -0,0 +1,1757 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/task_service.proto + +package tasks // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type PollRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollRequest) Reset() { *m = PollRequest{} } +func (m *PollRequest) String() string { return proto.CompactTextString(m) } +func (*PollRequest) ProtoMessage() {} +func (*PollRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{0} +} +func (m *PollRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollRequest.Unmarshal(m, b) +} +func (m *PollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollRequest.Marshal(b, m, deterministic) +} +func (dst *PollRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollRequest.Merge(dst, src) +} +func (m *PollRequest) XXX_Size() int { + return xxx_messageInfo_PollRequest.Size(m) +} +func (m *PollRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PollRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PollRequest proto.InternalMessageInfo + +func (m *PollRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *PollRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *PollRequest) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +type PollResponse struct { + Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollResponse) Reset() { *m = PollResponse{} } +func (m *PollResponse) String() string { return proto.CompactTextString(m) } +func (*PollResponse) ProtoMessage() {} +func (*PollResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{1} +} +func (m *PollResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollResponse.Unmarshal(m, b) +} +func (m *PollResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollResponse.Marshal(b, m, deterministic) +} +func (dst *PollResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollResponse.Merge(dst, src) +} +func (m *PollResponse) XXX_Size() int { + return xxx_messageInfo_PollResponse.Size(m) +} +func (m *PollResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PollResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PollResponse proto.InternalMessageInfo + +func (m *PollResponse) GetTask() *model.Task { + if m != nil { + return m.Task + } + return nil +} + +type BatchPollRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` + Count int32 `protobuf:"varint,4,opt,name=count,proto3" json:"count,omitempty"` + Timeout int32 `protobuf:"varint,5,opt,name=timeout,proto3" json:"timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPollRequest) Reset() { *m = BatchPollRequest{} } +func (m *BatchPollRequest) String() string { return proto.CompactTextString(m) } +func (*BatchPollRequest) ProtoMessage() {} +func (*BatchPollRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{2} +} +func (m *BatchPollRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPollRequest.Unmarshal(m, b) +} +func (m *BatchPollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPollRequest.Marshal(b, m, deterministic) +} +func (dst *BatchPollRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPollRequest.Merge(dst, src) +} +func (m *BatchPollRequest) XXX_Size() int { + return xxx_messageInfo_BatchPollRequest.Size(m) +} +func (m *BatchPollRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPollRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPollRequest proto.InternalMessageInfo + +func (m *BatchPollRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *BatchPollRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *BatchPollRequest) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *BatchPollRequest) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *BatchPollRequest) GetTimeout() int32 { + if m != nil { + return m.Timeout + } + return 0 +} + +type TasksInProgressRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + Count int32 `protobuf:"varint,3,opt,name=count,proto3" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} } +func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } +func (*TasksInProgressRequest) ProtoMessage() {} +func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{3} +} +func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) +} +func (m *TasksInProgressRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TasksInProgressRequest.Marshal(b, m, deterministic) +} +func (dst *TasksInProgressRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TasksInProgressRequest.Merge(dst, src) +} +func (m *TasksInProgressRequest) XXX_Size() int { + return xxx_messageInfo_TasksInProgressRequest.Size(m) +} +func (m *TasksInProgressRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TasksInProgressRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TasksInProgressRequest proto.InternalMessageInfo + +func (m *TasksInProgressRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *TasksInProgressRequest) GetStartKey() string { + if m != nil { + return m.StartKey + } + return "" +} + +func (m *TasksInProgressRequest) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +type TasksInProgressResponse struct { + Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse{} } +func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } +func (*TasksInProgressResponse) ProtoMessage() {} +func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{4} +} +func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) +} +func (m *TasksInProgressResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TasksInProgressResponse.Marshal(b, m, deterministic) +} +func (dst *TasksInProgressResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TasksInProgressResponse.Merge(dst, src) +} +func (m *TasksInProgressResponse) XXX_Size() int { + return xxx_messageInfo_TasksInProgressResponse.Size(m) +} +func (m *TasksInProgressResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TasksInProgressResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TasksInProgressResponse proto.InternalMessageInfo + +func (m *TasksInProgressResponse) GetTasks() []*model.Task { + if m != nil { + return m.Tasks + } + return nil +} + +type PendingTaskRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName,proto3" json:"task_ref_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } +func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } +func (*PendingTaskRequest) ProtoMessage() {} +func (*PendingTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{5} +} +func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) +} +func (m *PendingTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PendingTaskRequest.Marshal(b, m, deterministic) +} +func (dst *PendingTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PendingTaskRequest.Merge(dst, src) +} +func (m *PendingTaskRequest) XXX_Size() int { + return xxx_messageInfo_PendingTaskRequest.Size(m) +} +func (m *PendingTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PendingTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PendingTaskRequest proto.InternalMessageInfo + +func (m *PendingTaskRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *PendingTaskRequest) GetTaskRefName() string { + if m != nil { + return m.TaskRefName + } + return "" +} + +type PendingTaskResponse struct { + Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PendingTaskResponse) Reset() { *m = PendingTaskResponse{} } +func (m *PendingTaskResponse) String() string { return proto.CompactTextString(m) } +func (*PendingTaskResponse) ProtoMessage() {} +func (*PendingTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{6} +} +func (m *PendingTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PendingTaskResponse.Unmarshal(m, b) +} +func (m *PendingTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PendingTaskResponse.Marshal(b, m, deterministic) +} +func (dst *PendingTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PendingTaskResponse.Merge(dst, src) +} +func (m *PendingTaskResponse) XXX_Size() int { + return xxx_messageInfo_PendingTaskResponse.Size(m) +} +func (m *PendingTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PendingTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PendingTaskResponse proto.InternalMessageInfo + +func (m *PendingTaskResponse) GetTask() *model.Task { + if m != nil { + return m.Task + } + return nil +} + +type UpdateTaskRequest struct { + Result *model.TaskResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } +func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskRequest) ProtoMessage() {} +func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{7} +} +func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) +} +func (m *UpdateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskRequest.Merge(dst, src) +} +func (m *UpdateTaskRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTaskRequest.Size(m) +} +func (m *UpdateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskRequest proto.InternalMessageInfo + +func (m *UpdateTaskRequest) GetResult() *model.TaskResult { + if m != nil { + return m.Result + } + return nil +} + +type UpdateTaskResponse struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } +func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskResponse) ProtoMessage() {} +func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{8} +} +func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) +} +func (m *UpdateTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskResponse.Merge(dst, src) +} +func (m *UpdateTaskResponse) XXX_Size() int { + return xxx_messageInfo_UpdateTaskResponse.Size(m) +} +func (m *UpdateTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo + +func (m *UpdateTaskResponse) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type AckTaskRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } +func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } +func (*AckTaskRequest) ProtoMessage() {} +func (*AckTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{9} +} +func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) +} +func (m *AckTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AckTaskRequest.Marshal(b, m, deterministic) +} +func (dst *AckTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AckTaskRequest.Merge(dst, src) +} +func (m *AckTaskRequest) XXX_Size() int { + return xxx_messageInfo_AckTaskRequest.Size(m) +} +func (m *AckTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AckTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AckTaskRequest proto.InternalMessageInfo + +func (m *AckTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *AckTaskRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +type AckTaskResponse struct { + Ack bool `protobuf:"varint,1,opt,name=ack,proto3" json:"ack,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } +func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } +func (*AckTaskResponse) ProtoMessage() {} +func (*AckTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{10} +} +func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) +} +func (m *AckTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AckTaskResponse.Marshal(b, m, deterministic) +} +func (dst *AckTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AckTaskResponse.Merge(dst, src) +} +func (m *AckTaskResponse) XXX_Size() int { + return xxx_messageInfo_AckTaskResponse.Size(m) +} +func (m *AckTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AckTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AckTaskResponse proto.InternalMessageInfo + +func (m *AckTaskResponse) GetAck() bool { + if m != nil { + return m.Ack + } + return false +} + +type AddLogRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Log string `protobuf:"bytes,2,opt,name=log,proto3" json:"log,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } +func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } +func (*AddLogRequest) ProtoMessage() {} +func (*AddLogRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{11} +} +func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) +} +func (m *AddLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddLogRequest.Marshal(b, m, deterministic) +} +func (dst *AddLogRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddLogRequest.Merge(dst, src) +} +func (m *AddLogRequest) XXX_Size() int { + return xxx_messageInfo_AddLogRequest.Size(m) +} +func (m *AddLogRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddLogRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddLogRequest proto.InternalMessageInfo + +func (m *AddLogRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *AddLogRequest) GetLog() string { + if m != nil { + return m.Log + } + return "" +} + +type AddLogResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddLogResponse) Reset() { *m = AddLogResponse{} } +func (m *AddLogResponse) String() string { return proto.CompactTextString(m) } +func (*AddLogResponse) ProtoMessage() {} +func (*AddLogResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{12} +} +func (m *AddLogResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddLogResponse.Unmarshal(m, b) +} +func (m *AddLogResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddLogResponse.Marshal(b, m, deterministic) +} +func (dst *AddLogResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddLogResponse.Merge(dst, src) +} +func (m *AddLogResponse) XXX_Size() int { + return xxx_messageInfo_AddLogResponse.Size(m) +} +func (m *AddLogResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AddLogResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AddLogResponse proto.InternalMessageInfo + +type GetTaskLogsRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskLogsRequest) Reset() { *m = GetTaskLogsRequest{} } +func (m *GetTaskLogsRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskLogsRequest) ProtoMessage() {} +func (*GetTaskLogsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{13} +} +func (m *GetTaskLogsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskLogsRequest.Unmarshal(m, b) +} +func (m *GetTaskLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskLogsRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskLogsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskLogsRequest.Merge(dst, src) +} +func (m *GetTaskLogsRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskLogsRequest.Size(m) +} +func (m *GetTaskLogsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskLogsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskLogsRequest proto.InternalMessageInfo + +func (m *GetTaskLogsRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type GetTaskLogsResponse struct { + Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs,proto3" json:"logs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskLogsResponse) Reset() { *m = GetTaskLogsResponse{} } +func (m *GetTaskLogsResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskLogsResponse) ProtoMessage() {} +func (*GetTaskLogsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{14} +} +func (m *GetTaskLogsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskLogsResponse.Unmarshal(m, b) +} +func (m *GetTaskLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskLogsResponse.Marshal(b, m, deterministic) +} +func (dst *GetTaskLogsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskLogsResponse.Merge(dst, src) +} +func (m *GetTaskLogsResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskLogsResponse.Size(m) +} +func (m *GetTaskLogsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskLogsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskLogsResponse proto.InternalMessageInfo + +func (m *GetTaskLogsResponse) GetLogs() []*model.TaskExecLog { + if m != nil { + return m.Logs + } + return nil +} + +type GetTaskRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{15} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type GetTaskResponse struct { + Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } +func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskResponse) ProtoMessage() {} +func (*GetTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{16} +} +func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) +} +func (m *GetTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskResponse.Marshal(b, m, deterministic) +} +func (dst *GetTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskResponse.Merge(dst, src) +} +func (m *GetTaskResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskResponse.Size(m) +} +func (m *GetTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskResponse proto.InternalMessageInfo + +func (m *GetTaskResponse) GetTask() *model.Task { + if m != nil { + return m.Task + } + return nil +} + +type RemoveTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveTaskRequest) Reset() { *m = RemoveTaskRequest{} } +func (m *RemoveTaskRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveTaskRequest) ProtoMessage() {} +func (*RemoveTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{17} +} +func (m *RemoveTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveTaskRequest.Unmarshal(m, b) +} +func (m *RemoveTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveTaskRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveTaskRequest.Merge(dst, src) +} +func (m *RemoveTaskRequest) XXX_Size() int { + return xxx_messageInfo_RemoveTaskRequest.Size(m) +} +func (m *RemoveTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveTaskRequest proto.InternalMessageInfo + +func (m *RemoveTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *RemoveTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type RemoveTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveTaskResponse) Reset() { *m = RemoveTaskResponse{} } +func (m *RemoveTaskResponse) String() string { return proto.CompactTextString(m) } +func (*RemoveTaskResponse) ProtoMessage() {} +func (*RemoveTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{18} +} +func (m *RemoveTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveTaskResponse.Unmarshal(m, b) +} +func (m *RemoveTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveTaskResponse.Marshal(b, m, deterministic) +} +func (dst *RemoveTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveTaskResponse.Merge(dst, src) +} +func (m *RemoveTaskResponse) XXX_Size() int { + return xxx_messageInfo_RemoveTaskResponse.Size(m) +} +func (m *RemoveTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveTaskResponse proto.InternalMessageInfo + +type QueueSizesRequest struct { + TaskTypes []string `protobuf:"bytes,1,rep,name=task_types,json=taskTypes,proto3" json:"task_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueSizesRequest) Reset() { *m = QueueSizesRequest{} } +func (m *QueueSizesRequest) String() string { return proto.CompactTextString(m) } +func (*QueueSizesRequest) ProtoMessage() {} +func (*QueueSizesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{19} +} +func (m *QueueSizesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueSizesRequest.Unmarshal(m, b) +} +func (m *QueueSizesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueSizesRequest.Marshal(b, m, deterministic) +} +func (dst *QueueSizesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueSizesRequest.Merge(dst, src) +} +func (m *QueueSizesRequest) XXX_Size() int { + return xxx_messageInfo_QueueSizesRequest.Size(m) +} +func (m *QueueSizesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueueSizesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueSizesRequest proto.InternalMessageInfo + +func (m *QueueSizesRequest) GetTaskTypes() []string { + if m != nil { + return m.TaskTypes + } + return nil +} + +type QueueSizesResponse struct { + QueueForTask map[string]int32 `protobuf:"bytes,1,rep,name=queue_for_task,json=queueForTask,proto3" json:"queue_for_task,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueSizesResponse) Reset() { *m = QueueSizesResponse{} } +func (m *QueueSizesResponse) String() string { return proto.CompactTextString(m) } +func (*QueueSizesResponse) ProtoMessage() {} +func (*QueueSizesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{20} +} +func (m *QueueSizesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueSizesResponse.Unmarshal(m, b) +} +func (m *QueueSizesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueSizesResponse.Marshal(b, m, deterministic) +} +func (dst *QueueSizesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueSizesResponse.Merge(dst, src) +} +func (m *QueueSizesResponse) XXX_Size() int { + return xxx_messageInfo_QueueSizesResponse.Size(m) +} +func (m *QueueSizesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueueSizesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueSizesResponse proto.InternalMessageInfo + +func (m *QueueSizesResponse) GetQueueForTask() map[string]int32 { + if m != nil { + return m.QueueForTask + } + return nil +} + +type QueueInfoRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueInfoRequest) Reset() { *m = QueueInfoRequest{} } +func (m *QueueInfoRequest) String() string { return proto.CompactTextString(m) } +func (*QueueInfoRequest) ProtoMessage() {} +func (*QueueInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{21} +} +func (m *QueueInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueInfoRequest.Unmarshal(m, b) +} +func (m *QueueInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueInfoRequest.Marshal(b, m, deterministic) +} +func (dst *QueueInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueInfoRequest.Merge(dst, src) +} +func (m *QueueInfoRequest) XXX_Size() int { + return xxx_messageInfo_QueueInfoRequest.Size(m) +} +func (m *QueueInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueueInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueInfoRequest proto.InternalMessageInfo + +type QueueInfoResponse struct { + Queues map[string]int64 `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueInfoResponse) Reset() { *m = QueueInfoResponse{} } +func (m *QueueInfoResponse) String() string { return proto.CompactTextString(m) } +func (*QueueInfoResponse) ProtoMessage() {} +func (*QueueInfoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{22} +} +func (m *QueueInfoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueInfoResponse.Unmarshal(m, b) +} +func (m *QueueInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueInfoResponse.Marshal(b, m, deterministic) +} +func (dst *QueueInfoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueInfoResponse.Merge(dst, src) +} +func (m *QueueInfoResponse) XXX_Size() int { + return xxx_messageInfo_QueueInfoResponse.Size(m) +} +func (m *QueueInfoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueueInfoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueInfoResponse proto.InternalMessageInfo + +func (m *QueueInfoResponse) GetQueues() map[string]int64 { + if m != nil { + return m.Queues + } + return nil +} + +type QueueAllInfoRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoRequest) Reset() { *m = QueueAllInfoRequest{} } +func (m *QueueAllInfoRequest) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoRequest) ProtoMessage() {} +func (*QueueAllInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{23} +} +func (m *QueueAllInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoRequest.Unmarshal(m, b) +} +func (m *QueueAllInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoRequest.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoRequest.Merge(dst, src) +} +func (m *QueueAllInfoRequest) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoRequest.Size(m) +} +func (m *QueueAllInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoRequest proto.InternalMessageInfo + +type QueueAllInfoResponse struct { + Queues map[string]*QueueAllInfoResponse_QueueInfo `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoResponse) Reset() { *m = QueueAllInfoResponse{} } +func (m *QueueAllInfoResponse) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoResponse) ProtoMessage() {} +func (*QueueAllInfoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{24} +} +func (m *QueueAllInfoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoResponse.Unmarshal(m, b) +} +func (m *QueueAllInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoResponse.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoResponse.Merge(dst, src) +} +func (m *QueueAllInfoResponse) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoResponse.Size(m) +} +func (m *QueueAllInfoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoResponse proto.InternalMessageInfo + +func (m *QueueAllInfoResponse) GetQueues() map[string]*QueueAllInfoResponse_QueueInfo { + if m != nil { + return m.Queues + } + return nil +} + +type QueueAllInfoResponse_ShardInfo struct { + Size int64 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` + Uacked int64 `protobuf:"varint,2,opt,name=uacked,proto3" json:"uacked,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoResponse_ShardInfo) Reset() { *m = QueueAllInfoResponse_ShardInfo{} } +func (m *QueueAllInfoResponse_ShardInfo) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoResponse_ShardInfo) ProtoMessage() {} +func (*QueueAllInfoResponse_ShardInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 0} +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Unmarshal(m, b) +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoResponse_ShardInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Merge(dst, src) +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Size(m) +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoResponse_ShardInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoResponse_ShardInfo proto.InternalMessageInfo + +func (m *QueueAllInfoResponse_ShardInfo) GetSize() int64 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *QueueAllInfoResponse_ShardInfo) GetUacked() int64 { + if m != nil { + return m.Uacked + } + return 0 +} + +type QueueAllInfoResponse_QueueInfo struct { + Shards map[string]*QueueAllInfoResponse_ShardInfo `protobuf:"bytes,1,rep,name=shards,proto3" json:"shards,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoResponse_QueueInfo) Reset() { *m = QueueAllInfoResponse_QueueInfo{} } +func (m *QueueAllInfoResponse_QueueInfo) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoResponse_QueueInfo) ProtoMessage() {} +func (*QueueAllInfoResponse_QueueInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 1} +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Unmarshal(m, b) +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoResponse_QueueInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Merge(dst, src) +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Size(m) +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoResponse_QueueInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoResponse_QueueInfo proto.InternalMessageInfo + +func (m *QueueAllInfoResponse_QueueInfo) GetShards() map[string]*QueueAllInfoResponse_ShardInfo { + if m != nil { + return m.Shards + } + return nil +} + +func init() { + proto.RegisterType((*PollRequest)(nil), "conductor.grpc.tasks.PollRequest") + proto.RegisterType((*PollResponse)(nil), "conductor.grpc.tasks.PollResponse") + proto.RegisterType((*BatchPollRequest)(nil), "conductor.grpc.tasks.BatchPollRequest") + proto.RegisterType((*TasksInProgressRequest)(nil), "conductor.grpc.tasks.TasksInProgressRequest") + proto.RegisterType((*TasksInProgressResponse)(nil), "conductor.grpc.tasks.TasksInProgressResponse") + proto.RegisterType((*PendingTaskRequest)(nil), "conductor.grpc.tasks.PendingTaskRequest") + proto.RegisterType((*PendingTaskResponse)(nil), "conductor.grpc.tasks.PendingTaskResponse") + proto.RegisterType((*UpdateTaskRequest)(nil), "conductor.grpc.tasks.UpdateTaskRequest") + proto.RegisterType((*UpdateTaskResponse)(nil), "conductor.grpc.tasks.UpdateTaskResponse") + proto.RegisterType((*AckTaskRequest)(nil), "conductor.grpc.tasks.AckTaskRequest") + proto.RegisterType((*AckTaskResponse)(nil), "conductor.grpc.tasks.AckTaskResponse") + proto.RegisterType((*AddLogRequest)(nil), "conductor.grpc.tasks.AddLogRequest") + proto.RegisterType((*AddLogResponse)(nil), "conductor.grpc.tasks.AddLogResponse") + proto.RegisterType((*GetTaskLogsRequest)(nil), "conductor.grpc.tasks.GetTaskLogsRequest") + proto.RegisterType((*GetTaskLogsResponse)(nil), "conductor.grpc.tasks.GetTaskLogsResponse") + proto.RegisterType((*GetTaskRequest)(nil), "conductor.grpc.tasks.GetTaskRequest") + proto.RegisterType((*GetTaskResponse)(nil), "conductor.grpc.tasks.GetTaskResponse") + proto.RegisterType((*RemoveTaskRequest)(nil), "conductor.grpc.tasks.RemoveTaskRequest") + proto.RegisterType((*RemoveTaskResponse)(nil), "conductor.grpc.tasks.RemoveTaskResponse") + proto.RegisterType((*QueueSizesRequest)(nil), "conductor.grpc.tasks.QueueSizesRequest") + proto.RegisterType((*QueueSizesResponse)(nil), "conductor.grpc.tasks.QueueSizesResponse") + proto.RegisterMapType((map[string]int32)(nil), "conductor.grpc.tasks.QueueSizesResponse.QueueForTaskEntry") + proto.RegisterType((*QueueInfoRequest)(nil), "conductor.grpc.tasks.QueueInfoRequest") + proto.RegisterType((*QueueInfoResponse)(nil), "conductor.grpc.tasks.QueueInfoResponse") + proto.RegisterMapType((map[string]int64)(nil), "conductor.grpc.tasks.QueueInfoResponse.QueuesEntry") + proto.RegisterType((*QueueAllInfoRequest)(nil), "conductor.grpc.tasks.QueueAllInfoRequest") + proto.RegisterType((*QueueAllInfoResponse)(nil), "conductor.grpc.tasks.QueueAllInfoResponse") + proto.RegisterMapType((map[string]*QueueAllInfoResponse_QueueInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueuesEntry") + proto.RegisterType((*QueueAllInfoResponse_ShardInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.ShardInfo") + proto.RegisterType((*QueueAllInfoResponse_QueueInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueueInfo") + proto.RegisterMapType((map[string]*QueueAllInfoResponse_ShardInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueueInfo.ShardsEntry") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TaskServiceClient is the client API for TaskService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TaskServiceClient interface { + // GET /poll/{tasktype} + Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*PollResponse, error) + // /poll/batch/{tasktype} + BatchPoll(ctx context.Context, in *BatchPollRequest, opts ...grpc.CallOption) (TaskService_BatchPollClient, error) + // GET /in_progress/{tasktype} + GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) + // GET /in_progress/{workflowId}/{taskRefName} + GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*PendingTaskResponse, error) + // POST / + UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) + // POST /{taskId}/ack + AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) + // POST /{taskId}/log + AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*AddLogResponse, error) + // GET {taskId}/log + GetTaskLogs(ctx context.Context, in *GetTaskLogsRequest, opts ...grpc.CallOption) (*GetTaskLogsResponse, error) + // GET /{taskId} + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) + // DELETE /queue/{taskType}/{taskId} + RemoveTaskFromQueue(ctx context.Context, in *RemoveTaskRequest, opts ...grpc.CallOption) (*RemoveTaskResponse, error) + // GET /queue/sizes + GetQueueSizesForTasks(ctx context.Context, in *QueueSizesRequest, opts ...grpc.CallOption) (*QueueSizesResponse, error) + // GET /queue/all + GetQueueInfo(ctx context.Context, in *QueueInfoRequest, opts ...grpc.CallOption) (*QueueInfoResponse, error) + // GET /queue/all/verbose + GetQueueAllInfo(ctx context.Context, in *QueueAllInfoRequest, opts ...grpc.CallOption) (*QueueAllInfoResponse, error) +} + +type taskServiceClient struct { + cc *grpc.ClientConn +} + +func NewTaskServiceClient(cc *grpc.ClientConn) TaskServiceClient { + return &taskServiceClient{cc} +} + +func (c *taskServiceClient) Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*PollResponse, error) { + out := new(PollResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/Poll", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) BatchPoll(ctx context.Context, in *BatchPollRequest, opts ...grpc.CallOption) (TaskService_BatchPollClient, error) { + stream, err := c.cc.NewStream(ctx, &_TaskService_serviceDesc.Streams[0], "/conductor.grpc.tasks.TaskService/BatchPoll", opts...) + if err != nil { + return nil, err + } + x := &taskServiceBatchPollClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type TaskService_BatchPollClient interface { + Recv() (*model.Task, error) + grpc.ClientStream +} + +type taskServiceBatchPollClient struct { + grpc.ClientStream +} + +func (x *taskServiceBatchPollClient) Recv() (*model.Task, error) { + m := new(model.Task) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *taskServiceClient) GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) { + out := new(TasksInProgressResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTasksInProgress", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*PendingTaskResponse, error) { + out := new(PendingTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetPendingTaskForWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) { + out := new(UpdateTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/UpdateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) { + out := new(AckTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/AckTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*AddLogResponse, error) { + out := new(AddLogResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/AddLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetTaskLogs(ctx context.Context, in *GetTaskLogsRequest, opts ...grpc.CallOption) (*GetTaskLogsResponse, error) { + out := new(GetTaskLogsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTaskLogs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) { + out := new(GetTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) RemoveTaskFromQueue(ctx context.Context, in *RemoveTaskRequest, opts ...grpc.CallOption) (*RemoveTaskResponse, error) { + out := new(RemoveTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/RemoveTaskFromQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetQueueSizesForTasks(ctx context.Context, in *QueueSizesRequest, opts ...grpc.CallOption) (*QueueSizesResponse, error) { + out := new(QueueSizesResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueSizesForTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetQueueInfo(ctx context.Context, in *QueueInfoRequest, opts ...grpc.CallOption) (*QueueInfoResponse, error) { + out := new(QueueInfoResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetQueueAllInfo(ctx context.Context, in *QueueAllInfoRequest, opts ...grpc.CallOption) (*QueueAllInfoResponse, error) { + out := new(QueueAllInfoResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueAllInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TaskServiceServer is the server API for TaskService service. +type TaskServiceServer interface { + // GET /poll/{tasktype} + Poll(context.Context, *PollRequest) (*PollResponse, error) + // /poll/batch/{tasktype} + BatchPoll(*BatchPollRequest, TaskService_BatchPollServer) error + // GET /in_progress/{tasktype} + GetTasksInProgress(context.Context, *TasksInProgressRequest) (*TasksInProgressResponse, error) + // GET /in_progress/{workflowId}/{taskRefName} + GetPendingTaskForWorkflow(context.Context, *PendingTaskRequest) (*PendingTaskResponse, error) + // POST / + UpdateTask(context.Context, *UpdateTaskRequest) (*UpdateTaskResponse, error) + // POST /{taskId}/ack + AckTask(context.Context, *AckTaskRequest) (*AckTaskResponse, error) + // POST /{taskId}/log + AddLog(context.Context, *AddLogRequest) (*AddLogResponse, error) + // GET {taskId}/log + GetTaskLogs(context.Context, *GetTaskLogsRequest) (*GetTaskLogsResponse, error) + // GET /{taskId} + GetTask(context.Context, *GetTaskRequest) (*GetTaskResponse, error) + // DELETE /queue/{taskType}/{taskId} + RemoveTaskFromQueue(context.Context, *RemoveTaskRequest) (*RemoveTaskResponse, error) + // GET /queue/sizes + GetQueueSizesForTasks(context.Context, *QueueSizesRequest) (*QueueSizesResponse, error) + // GET /queue/all + GetQueueInfo(context.Context, *QueueInfoRequest) (*QueueInfoResponse, error) + // GET /queue/all/verbose + GetQueueAllInfo(context.Context, *QueueAllInfoRequest) (*QueueAllInfoResponse, error) +} + +func RegisterTaskServiceServer(s *grpc.Server, srv TaskServiceServer) { + s.RegisterService(&_TaskService_serviceDesc, srv) +} + +func _TaskService_Poll_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PollRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).Poll(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/Poll", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).Poll(ctx, req.(*PollRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_BatchPoll_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(BatchPollRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TaskServiceServer).BatchPoll(m, &taskServiceBatchPollServer{stream}) +} + +type TaskService_BatchPollServer interface { + Send(*model.Task) error + grpc.ServerStream +} + +type taskServiceBatchPollServer struct { + grpc.ServerStream +} + +func (x *taskServiceBatchPollServer) Send(m *model.Task) error { + return x.ServerStream.SendMsg(m) +} + +func _TaskService_GetTasksInProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TasksInProgressRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTasksInProgress(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetTasksInProgress", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTasksInProgress(ctx, req.(*TasksInProgressRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetPendingTaskForWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PendingTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetPendingTaskForWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, req.(*PendingTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/UpdateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_AckTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AckTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).AckTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/AckTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).AckTask(ctx, req.(*AckTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_AddLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).AddLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/AddLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).AddLog(ctx, req.(*AddLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetTaskLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskLogsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTaskLogs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetTaskLogs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTaskLogs(ctx, req.(*GetTaskLogsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_RemoveTaskFromQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).RemoveTaskFromQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/RemoveTaskFromQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).RemoveTaskFromQueue(ctx, req.(*RemoveTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetQueueSizesForTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueueSizesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetQueueSizesForTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueSizesForTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetQueueSizesForTasks(ctx, req.(*QueueSizesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetQueueInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueueInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetQueueInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetQueueInfo(ctx, req.(*QueueInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetQueueAllInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueueAllInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetQueueAllInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueAllInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetQueueAllInfo(ctx, req.(*QueueAllInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TaskService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.tasks.TaskService", + HandlerType: (*TaskServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Poll", + Handler: _TaskService_Poll_Handler, + }, + { + MethodName: "GetTasksInProgress", + Handler: _TaskService_GetTasksInProgress_Handler, + }, + { + MethodName: "GetPendingTaskForWorkflow", + Handler: _TaskService_GetPendingTaskForWorkflow_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _TaskService_UpdateTask_Handler, + }, + { + MethodName: "AckTask", + Handler: _TaskService_AckTask_Handler, + }, + { + MethodName: "AddLog", + Handler: _TaskService_AddLog_Handler, + }, + { + MethodName: "GetTaskLogs", + Handler: _TaskService_GetTaskLogs_Handler, + }, + { + MethodName: "GetTask", + Handler: _TaskService_GetTask_Handler, + }, + { + MethodName: "RemoveTaskFromQueue", + Handler: _TaskService_RemoveTaskFromQueue_Handler, + }, + { + MethodName: "GetQueueSizesForTasks", + Handler: _TaskService_GetQueueSizesForTasks_Handler, + }, + { + MethodName: "GetQueueInfo", + Handler: _TaskService_GetQueueInfo_Handler, + }, + { + MethodName: "GetQueueAllInfo", + Handler: _TaskService_GetQueueAllInfo_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "BatchPoll", + Handler: _TaskService_BatchPoll_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc/task_service.proto", +} + +func init() { + proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_2cd893b942ad08bb) +} + +var fileDescriptor_task_service_2cd893b942ad08bb = []byte{ + // 1114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xdd, 0x72, 0xdb, 0x54, + 0x10, 0x1e, 0xc5, 0x89, 0x53, 0xaf, 0x93, 0xd4, 0x39, 0xf9, 0x33, 0x2a, 0x0c, 0x41, 0x2d, 0x6d, + 0x02, 0x54, 0xe9, 0x24, 0x0c, 0xd0, 0x0c, 0x33, 0x6d, 0x32, 0x43, 0x82, 0x69, 0xe8, 0x04, 0xa5, + 0x94, 0x9f, 0x1b, 0xa3, 0x48, 0xc7, 0xb2, 0xb0, 0xac, 0xe3, 0x48, 0x47, 0x69, 0xd2, 0xe7, 0xe0, + 0x8e, 0x07, 0xe1, 0x4d, 0x78, 0x07, 0x5e, 0x81, 0x2b, 0xe6, 0xfc, 0x48, 0x3a, 0x8e, 0x25, 0xdb, + 0x19, 0xa6, 0x77, 0x3a, 0xab, 0xdd, 0xfd, 0xbe, 0x5d, 0xad, 0xf7, 0x3b, 0x86, 0x0d, 0x2f, 0x1a, + 0x38, 0x3b, 0xd4, 0x8e, 0x7b, 0xed, 0x18, 0x47, 0x97, 0xbe, 0x83, 0xcd, 0x41, 0x44, 0x28, 0x41, + 0xab, 0x0e, 0x09, 0xdd, 0xc4, 0xa1, 0x24, 0x32, 0x99, 0x8b, 0xc9, 0x5c, 0x62, 0x7d, 0xa3, 0x4f, + 0x5c, 0x1c, 0x70, 0x7f, 0x7c, 0x85, 0x9d, 0x80, 0x78, 0xc2, 0x5d, 0x5f, 0xcf, 0x5f, 0x44, 0x38, + 0x4e, 0x02, 0x2a, 0xed, 0x8d, 0xdc, 0x2e, 0x2c, 0x46, 0x1b, 0xea, 0xa7, 0x24, 0x08, 0x2c, 0x7c, + 0x91, 0xe0, 0x98, 0xa2, 0x7b, 0x50, 0xe3, 0xe8, 0xf4, 0x7a, 0x80, 0x9b, 0xda, 0xa6, 0xb6, 0x55, + 0xb3, 0xee, 0x30, 0xc3, 0xab, 0xeb, 0x01, 0x66, 0x2f, 0xdf, 0x90, 0xa8, 0x87, 0xa3, 0xb6, 0xef, + 0x36, 0x67, 0xc4, 0x4b, 0x61, 0x68, 0xb9, 0x68, 0x1d, 0xaa, 0x2e, 0xe9, 0xdb, 0x7e, 0xd8, 0xac, + 0xf0, 0x37, 0xf2, 0x64, 0x3c, 0x85, 0x05, 0x01, 0x10, 0x0f, 0x48, 0x18, 0x63, 0xb4, 0x0d, 0xb3, + 0x2c, 0x21, 0x4f, 0x5e, 0xdf, 0x5d, 0x33, 0xf3, 0xc2, 0x38, 0x21, 0xf3, 0x95, 0x1d, 0xf7, 0x2c, + 0xee, 0x62, 0xfc, 0xa1, 0x41, 0xe3, 0xd0, 0xa6, 0x4e, 0xf7, 0x9d, 0x32, 0x44, 0xab, 0x30, 0xe7, + 0x90, 0x24, 0xa4, 0xcd, 0xd9, 0x4d, 0x6d, 0x6b, 0xce, 0x12, 0x07, 0xd4, 0x84, 0x79, 0xea, 0xf7, + 0x31, 0x49, 0x68, 0x73, 0x8e, 0xdb, 0xd3, 0xa3, 0xd1, 0x85, 0x75, 0x46, 0x32, 0x6e, 0x85, 0xa7, + 0x11, 0xf1, 0x22, 0x1c, 0xc7, 0xd3, 0x72, 0x8b, 0xa9, 0x1d, 0xd1, 0x76, 0x0f, 0x5f, 0xa7, 0xdc, + 0xb8, 0xe1, 0x05, 0xbe, 0xce, 0x39, 0x54, 0x14, 0x0e, 0xc6, 0x11, 0x6c, 0x8c, 0x20, 0xc9, 0x36, + 0x7e, 0x0a, 0x73, 0x7c, 0x06, 0x9a, 0xda, 0x66, 0xa5, 0xbc, 0x8f, 0xc2, 0xc7, 0xf8, 0x05, 0xd0, + 0x29, 0x0e, 0x5d, 0x3f, 0xf4, 0xb8, 0x55, 0xb2, 0xfd, 0x10, 0xea, 0xac, 0x37, 0x9d, 0x80, 0xbc, + 0x61, 0xed, 0x12, 0x7c, 0x21, 0x35, 0xb5, 0x5c, 0x64, 0xc0, 0x22, 0x2f, 0x27, 0xc2, 0x9d, 0x76, + 0x68, 0xf7, 0xb1, 0x64, 0x5d, 0xa7, 0x3c, 0x49, 0xe7, 0xa5, 0xdd, 0xc7, 0xc6, 0x73, 0x58, 0x19, + 0x4a, 0x7d, 0xfb, 0xaf, 0xfc, 0x2d, 0x2c, 0xff, 0x38, 0x70, 0x6d, 0x8a, 0x55, 0x6e, 0x7b, 0x50, + 0x15, 0x83, 0x2b, 0x33, 0xdc, 0x2b, 0xce, 0xc0, 0x5d, 0x2c, 0xe9, 0x6a, 0x3c, 0x06, 0xa4, 0x66, + 0x92, 0x54, 0x36, 0x60, 0x9e, 0x57, 0x91, 0x95, 0x58, 0x65, 0xc7, 0x96, 0x6b, 0x1c, 0xc1, 0xd2, + 0x81, 0xd3, 0x53, 0x51, 0xcb, 0x5c, 0xc7, 0xce, 0x95, 0x71, 0x1f, 0xee, 0x66, 0x79, 0x24, 0x66, + 0x03, 0x2a, 0xb6, 0x23, 0xaa, 0xbf, 0x63, 0xb1, 0x47, 0x63, 0x1f, 0x16, 0x0f, 0x5c, 0xf7, 0x84, + 0x78, 0x13, 0xb1, 0x1a, 0x50, 0x09, 0x88, 0x27, 0x51, 0xd8, 0xa3, 0xd1, 0x80, 0xa5, 0x34, 0x56, + 0xe4, 0x67, 0x95, 0x1e, 0x63, 0xca, 0x20, 0x4f, 0x88, 0x17, 0x4f, 0x4a, 0x69, 0x1c, 0xc3, 0xca, + 0x90, 0xbb, 0x64, 0xf9, 0x04, 0x66, 0x03, 0xe2, 0xa5, 0x23, 0xf4, 0x7e, 0x61, 0x8b, 0xbf, 0xb9, + 0xc2, 0x0e, 0x43, 0xe6, 0x9e, 0xc6, 0x36, 0x2c, 0xc9, 0x44, 0x13, 0x31, 0xbf, 0x86, 0xbb, 0x99, + 0xeb, 0xed, 0x87, 0xa2, 0x05, 0xcb, 0x16, 0xee, 0x93, 0xcb, 0xa1, 0xa1, 0x18, 0xfb, 0xf3, 0x52, + 0x88, 0xcc, 0x0c, 0x11, 0x59, 0x05, 0xa4, 0xa6, 0x92, 0x1d, 0xdc, 0x85, 0xe5, 0x1f, 0x12, 0x9c, + 0xe0, 0x33, 0xff, 0x2d, 0xce, 0x1a, 0xf8, 0x01, 0x40, 0x06, 0x20, 0xda, 0x52, 0xb3, 0x6a, 0x29, + 0x42, 0x6c, 0xfc, 0xa5, 0x01, 0x52, 0x83, 0x64, 0x59, 0xbf, 0xc1, 0xd2, 0x05, 0xb3, 0xb6, 0x3b, + 0x24, 0x6a, 0xcb, 0x02, 0x59, 0x43, 0xf7, 0xcd, 0xa2, 0xa5, 0x6d, 0x8e, 0x66, 0x10, 0xa6, 0x23, + 0x12, 0xf1, 0x86, 0x87, 0x34, 0xba, 0xb6, 0x16, 0x2e, 0x14, 0x93, 0xfe, 0x4c, 0x92, 0x55, 0x5d, + 0xd8, 0x9c, 0xb0, 0x4d, 0x22, 0xfa, 0xc0, 0x1e, 0xd9, 0x12, 0xb9, 0xb4, 0x83, 0x44, 0xfc, 0x4e, + 0xe7, 0x2c, 0x71, 0xd8, 0x9f, 0xf9, 0x4a, 0x33, 0x10, 0x34, 0x78, 0x82, 0x56, 0xd8, 0x21, 0xb2, + 0x58, 0xe3, 0x4f, 0x4d, 0x66, 0x15, 0x46, 0x59, 0xcc, 0x0b, 0xa8, 0x72, 0xe8, 0x74, 0x2a, 0xf6, + 0xc6, 0x14, 0xa1, 0x06, 0x0a, 0x4b, 0x2c, 0xd8, 0xcb, 0x14, 0xfa, 0x53, 0xa8, 0x2b, 0xe6, 0x49, + 0x8c, 0x2b, 0x2a, 0xe3, 0x35, 0x58, 0xe1, 0xa1, 0x07, 0x41, 0xa0, 0x92, 0xfe, 0xa7, 0x02, 0xab, + 0xc3, 0x76, 0xc9, 0xfb, 0xe5, 0x0d, 0xde, 0x5f, 0x8c, 0xe1, 0x7d, 0x23, 0xb6, 0x90, 0xfa, 0x97, + 0x50, 0x3b, 0xeb, 0xda, 0x91, 0xcb, 0x1c, 0x11, 0x82, 0xd9, 0xd8, 0x7f, 0x2b, 0x66, 0xae, 0x62, + 0xf1, 0x67, 0xa6, 0x26, 0x89, 0xed, 0xf4, 0xb0, 0x2b, 0xb9, 0xcb, 0x93, 0xfe, 0xb7, 0x06, 0xb5, + 0xac, 0x3b, 0xe8, 0x67, 0xa8, 0xc6, 0x2c, 0x4d, 0x4a, 0xeb, 0xf9, 0x6d, 0x69, 0x31, 0x8b, 0xc9, + 0x99, 0xa4, 0x04, 0x45, 0x3e, 0x9d, 0x40, 0x5d, 0x31, 0x17, 0xf4, 0xf6, 0x3b, 0xb5, 0xb7, 0xf5, + 0xdd, 0xcf, 0x6f, 0x81, 0x9c, 0x55, 0xae, 0x7c, 0x11, 0x06, 0x38, 0xfe, 0x63, 0xfe, 0x0f, 0xc0, + 0x7c, 0x9c, 0x72, 0xc0, 0xdd, 0x7f, 0x6b, 0x50, 0x67, 0xe3, 0x7e, 0x26, 0x6e, 0x42, 0xe8, 0x7b, + 0x98, 0x65, 0x17, 0x01, 0xf4, 0x51, 0x71, 0x62, 0xe5, 0x92, 0xa0, 0x1b, 0xe3, 0x5c, 0xe4, 0xc4, + 0x9c, 0x40, 0x2d, 0xbb, 0x5c, 0xa0, 0x87, 0xc5, 0x01, 0x37, 0x6f, 0x1f, 0x7a, 0xf1, 0xd2, 0x7a, + 0xa2, 0xa1, 0x8b, 0x6c, 0x23, 0x2b, 0x6a, 0x8d, 0x3e, 0x2b, 0x4e, 0x5b, 0x7c, 0x7d, 0xd0, 0x1f, + 0x4f, 0xe9, 0x2d, 0x0b, 0x18, 0xc0, 0x7b, 0xc7, 0x98, 0x2a, 0xea, 0x7b, 0x44, 0xa2, 0x9f, 0xa4, + 0x7a, 0xa3, 0xad, 0x92, 0x0e, 0x8c, 0x5c, 0x03, 0xf4, 0xed, 0x29, 0x3c, 0x25, 0x62, 0x1b, 0x20, + 0x17, 0x58, 0xf4, 0xa8, 0x38, 0x70, 0x44, 0xcc, 0xf5, 0xad, 0xc9, 0x8e, 0x12, 0xe0, 0x35, 0xcc, + 0x4b, 0x29, 0x45, 0x0f, 0x8a, 0x83, 0x86, 0x15, 0x5b, 0xff, 0x78, 0x82, 0x97, 0xcc, 0x7b, 0x06, + 0x55, 0xa1, 0xa0, 0xe8, 0x7e, 0x49, 0x80, 0xaa, 0xcd, 0xfa, 0x83, 0xf1, 0x4e, 0x32, 0xe9, 0x39, + 0xd4, 0x15, 0x55, 0x2d, 0xeb, 0xf8, 0xa8, 0x4e, 0x97, 0x75, 0xbc, 0x48, 0xa2, 0x5f, 0xc3, 0xbc, + 0x34, 0x97, 0x35, 0x64, 0x58, 0x8f, 0xcb, 0x1a, 0x72, 0x53, 0x8a, 0xbb, 0xb0, 0x92, 0x8b, 0xe2, + 0x51, 0x44, 0xfa, 0xfc, 0x17, 0x58, 0xf6, 0x49, 0x47, 0xa4, 0xb8, 0xec, 0x93, 0x8e, 0x0a, 0x2d, + 0xfa, 0x1d, 0xd6, 0x8e, 0x31, 0xcd, 0x45, 0x4f, 0x6a, 0x58, 0x5c, 0x86, 0x35, 0xa2, 0xca, 0x65, + 0x58, 0x05, 0x4a, 0xdc, 0x86, 0x85, 0x14, 0x8b, 0x6f, 0xdf, 0x87, 0x13, 0xc5, 0x4b, 0x20, 0x3c, + 0x9a, 0x52, 0xe4, 0x50, 0x97, 0x5f, 0x6a, 0xd4, 0x15, 0x86, 0xb6, 0xa7, 0x59, 0x73, 0x02, 0xe6, + 0x93, 0xe9, 0x37, 0xe2, 0x61, 0x00, 0xba, 0x43, 0xfa, 0x66, 0x88, 0x69, 0x27, 0xf0, 0xaf, 0x6e, + 0x04, 0x1e, 0x2e, 0x2a, 0x7b, 0xf1, 0xf4, 0xfc, 0xd7, 0x67, 0x9e, 0x4f, 0xbb, 0xc9, 0xb9, 0xe9, + 0x90, 0xfe, 0x8e, 0x8c, 0xd8, 0xc9, 0x22, 0x76, 0x9c, 0xc0, 0xc7, 0x21, 0xdd, 0xf1, 0x08, 0xff, + 0x73, 0x99, 0xdb, 0xb3, 0xff, 0x9a, 0xf1, 0x79, 0x95, 0xef, 0xb2, 0xbd, 0xff, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xc5, 0xda, 0xa9, 0x5e, 0x80, 0x0e, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go b/polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go new file mode 100644 index 0000000000..e73a12be92 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/grpc/workflows/workflow_service.pb.go @@ -0,0 +1,1822 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/workflow_service.proto + +package workflows // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import search "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StartWorkflowResponse struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartWorkflowResponse) Reset() { *m = StartWorkflowResponse{} } +func (m *StartWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*StartWorkflowResponse) ProtoMessage() {} +func (*StartWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{0} +} +func (m *StartWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartWorkflowResponse.Unmarshal(m, b) +} +func (m *StartWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *StartWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartWorkflowResponse.Merge(dst, src) +} +func (m *StartWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_StartWorkflowResponse.Size(m) +} +func (m *StartWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StartWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StartWorkflowResponse proto.InternalMessageInfo + +func (m *StartWorkflowResponse) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type GetWorkflowsRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed,proto3" json:"include_closed,omitempty"` + IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks,proto3" json:"include_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsRequest) Reset() { *m = GetWorkflowsRequest{} } +func (m *GetWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsRequest) ProtoMessage() {} +func (*GetWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{1} +} +func (m *GetWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsRequest.Unmarshal(m, b) +} +func (m *GetWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsRequest.Merge(dst, src) +} +func (m *GetWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsRequest.Size(m) +} +func (m *GetWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsRequest proto.InternalMessageInfo + +func (m *GetWorkflowsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowsRequest) GetCorrelationId() []string { + if m != nil { + return m.CorrelationId + } + return nil +} + +func (m *GetWorkflowsRequest) GetIncludeClosed() bool { + if m != nil { + return m.IncludeClosed + } + return false +} + +func (m *GetWorkflowsRequest) GetIncludeTasks() bool { + if m != nil { + return m.IncludeTasks + } + return false +} + +type GetWorkflowsResponse struct { + WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById,proto3" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsResponse) Reset() { *m = GetWorkflowsResponse{} } +func (m *GetWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsResponse) ProtoMessage() {} +func (*GetWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2} +} +func (m *GetWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsResponse.Unmarshal(m, b) +} +func (m *GetWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsResponse.Merge(dst, src) +} +func (m *GetWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsResponse.Size(m) +} +func (m *GetWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsResponse proto.InternalMessageInfo + +func (m *GetWorkflowsResponse) GetWorkflowsById() map[string]*GetWorkflowsResponse_Workflows { + if m != nil { + return m.WorkflowsById + } + return nil +} + +type GetWorkflowsResponse_Workflows struct { + Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows,proto3" json:"workflows,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsResponse_Workflows) Reset() { *m = GetWorkflowsResponse_Workflows{} } +func (m *GetWorkflowsResponse_Workflows) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsResponse_Workflows) ProtoMessage() {} +func (*GetWorkflowsResponse_Workflows) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2, 0} +} +func (m *GetWorkflowsResponse_Workflows) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Unmarshal(m, b) +} +func (m *GetWorkflowsResponse_Workflows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsResponse_Workflows) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsResponse_Workflows.Merge(dst, src) +} +func (m *GetWorkflowsResponse_Workflows) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Size(m) +} +func (m *GetWorkflowsResponse_Workflows) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsResponse_Workflows.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsResponse_Workflows proto.InternalMessageInfo + +func (m *GetWorkflowsResponse_Workflows) GetWorkflows() []*model.Workflow { + if m != nil { + return m.Workflows + } + return nil +} + +type GetWorkflowStatusRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks,proto3" json:"include_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowStatusRequest) Reset() { *m = GetWorkflowStatusRequest{} } +func (m *GetWorkflowStatusRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowStatusRequest) ProtoMessage() {} +func (*GetWorkflowStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{3} +} +func (m *GetWorkflowStatusRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowStatusRequest.Unmarshal(m, b) +} +func (m *GetWorkflowStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowStatusRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowStatusRequest.Merge(dst, src) +} +func (m *GetWorkflowStatusRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowStatusRequest.Size(m) +} +func (m *GetWorkflowStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowStatusRequest proto.InternalMessageInfo + +func (m *GetWorkflowStatusRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *GetWorkflowStatusRequest) GetIncludeTasks() bool { + if m != nil { + return m.IncludeTasks + } + return false +} + +type GetWorkflowStatusResponse struct { + Workflow *model.Workflow `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowStatusResponse) Reset() { *m = GetWorkflowStatusResponse{} } +func (m *GetWorkflowStatusResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowStatusResponse) ProtoMessage() {} +func (*GetWorkflowStatusResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{4} +} +func (m *GetWorkflowStatusResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowStatusResponse.Unmarshal(m, b) +} +func (m *GetWorkflowStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowStatusResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowStatusResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowStatusResponse.Merge(dst, src) +} +func (m *GetWorkflowStatusResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowStatusResponse.Size(m) +} +func (m *GetWorkflowStatusResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowStatusResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowStatusResponse proto.InternalMessageInfo + +func (m *GetWorkflowStatusResponse) GetWorkflow() *model.Workflow { + if m != nil { + return m.Workflow + } + return nil +} + +type RemoveWorkflowRequest struct { + WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId,proto3" json:"workflod_id,omitempty"` + ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow,proto3" json:"archive_workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveWorkflowRequest) Reset() { *m = RemoveWorkflowRequest{} } +func (m *RemoveWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveWorkflowRequest) ProtoMessage() {} +func (*RemoveWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{5} +} +func (m *RemoveWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveWorkflowRequest.Unmarshal(m, b) +} +func (m *RemoveWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveWorkflowRequest.Merge(dst, src) +} +func (m *RemoveWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RemoveWorkflowRequest.Size(m) +} +func (m *RemoveWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveWorkflowRequest proto.InternalMessageInfo + +func (m *RemoveWorkflowRequest) GetWorkflodId() string { + if m != nil { + return m.WorkflodId + } + return "" +} + +func (m *RemoveWorkflowRequest) GetArchiveWorkflow() bool { + if m != nil { + return m.ArchiveWorkflow + } + return false +} + +type RemoveWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveWorkflowResponse) Reset() { *m = RemoveWorkflowResponse{} } +func (m *RemoveWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RemoveWorkflowResponse) ProtoMessage() {} +func (*RemoveWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{6} +} +func (m *RemoveWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveWorkflowResponse.Unmarshal(m, b) +} +func (m *RemoveWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RemoveWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveWorkflowResponse.Merge(dst, src) +} +func (m *RemoveWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RemoveWorkflowResponse.Size(m) +} +func (m *RemoveWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveWorkflowResponse proto.InternalMessageInfo + +type GetRunningWorkflowsRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRunningWorkflowsRequest) Reset() { *m = GetRunningWorkflowsRequest{} } +func (m *GetRunningWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*GetRunningWorkflowsRequest) ProtoMessage() {} +func (*GetRunningWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{7} +} +func (m *GetRunningWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRunningWorkflowsRequest.Unmarshal(m, b) +} +func (m *GetRunningWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRunningWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *GetRunningWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRunningWorkflowsRequest.Merge(dst, src) +} +func (m *GetRunningWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_GetRunningWorkflowsRequest.Size(m) +} +func (m *GetRunningWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRunningWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRunningWorkflowsRequest proto.InternalMessageInfo + +func (m *GetRunningWorkflowsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetRunningWorkflowsRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *GetRunningWorkflowsRequest) GetStartTime() int64 { + if m != nil { + return m.StartTime + } + return 0 +} + +func (m *GetRunningWorkflowsRequest) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +type GetRunningWorkflowsResponse struct { + WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds,proto3" json:"workflow_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRunningWorkflowsResponse) Reset() { *m = GetRunningWorkflowsResponse{} } +func (m *GetRunningWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*GetRunningWorkflowsResponse) ProtoMessage() {} +func (*GetRunningWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{8} +} +func (m *GetRunningWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRunningWorkflowsResponse.Unmarshal(m, b) +} +func (m *GetRunningWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRunningWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *GetRunningWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRunningWorkflowsResponse.Merge(dst, src) +} +func (m *GetRunningWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_GetRunningWorkflowsResponse.Size(m) +} +func (m *GetRunningWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetRunningWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRunningWorkflowsResponse proto.InternalMessageInfo + +func (m *GetRunningWorkflowsResponse) GetWorkflowIds() []string { + if m != nil { + return m.WorkflowIds + } + return nil +} + +type DecideWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DecideWorkflowRequest) Reset() { *m = DecideWorkflowRequest{} } +func (m *DecideWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*DecideWorkflowRequest) ProtoMessage() {} +func (*DecideWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{9} +} +func (m *DecideWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DecideWorkflowRequest.Unmarshal(m, b) +} +func (m *DecideWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DecideWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *DecideWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DecideWorkflowRequest.Merge(dst, src) +} +func (m *DecideWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_DecideWorkflowRequest.Size(m) +} +func (m *DecideWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DecideWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DecideWorkflowRequest proto.InternalMessageInfo + +func (m *DecideWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type DecideWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DecideWorkflowResponse) Reset() { *m = DecideWorkflowResponse{} } +func (m *DecideWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*DecideWorkflowResponse) ProtoMessage() {} +func (*DecideWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{10} +} +func (m *DecideWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DecideWorkflowResponse.Unmarshal(m, b) +} +func (m *DecideWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DecideWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *DecideWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DecideWorkflowResponse.Merge(dst, src) +} +func (m *DecideWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_DecideWorkflowResponse.Size(m) +} +func (m *DecideWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DecideWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DecideWorkflowResponse proto.InternalMessageInfo + +type PauseWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseWorkflowRequest) Reset() { *m = PauseWorkflowRequest{} } +func (m *PauseWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*PauseWorkflowRequest) ProtoMessage() {} +func (*PauseWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{11} +} +func (m *PauseWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseWorkflowRequest.Unmarshal(m, b) +} +func (m *PauseWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *PauseWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseWorkflowRequest.Merge(dst, src) +} +func (m *PauseWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_PauseWorkflowRequest.Size(m) +} +func (m *PauseWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseWorkflowRequest proto.InternalMessageInfo + +func (m *PauseWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type PauseWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseWorkflowResponse) Reset() { *m = PauseWorkflowResponse{} } +func (m *PauseWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*PauseWorkflowResponse) ProtoMessage() {} +func (*PauseWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{12} +} +func (m *PauseWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseWorkflowResponse.Unmarshal(m, b) +} +func (m *PauseWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *PauseWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseWorkflowResponse.Merge(dst, src) +} +func (m *PauseWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_PauseWorkflowResponse.Size(m) +} +func (m *PauseWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PauseWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseWorkflowResponse proto.InternalMessageInfo + +type ResumeWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeWorkflowRequest) Reset() { *m = ResumeWorkflowRequest{} } +func (m *ResumeWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeWorkflowRequest) ProtoMessage() {} +func (*ResumeWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{13} +} +func (m *ResumeWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeWorkflowRequest.Unmarshal(m, b) +} +func (m *ResumeWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeWorkflowRequest.Merge(dst, src) +} +func (m *ResumeWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_ResumeWorkflowRequest.Size(m) +} +func (m *ResumeWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeWorkflowRequest proto.InternalMessageInfo + +func (m *ResumeWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type ResumeWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeWorkflowResponse) Reset() { *m = ResumeWorkflowResponse{} } +func (m *ResumeWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*ResumeWorkflowResponse) ProtoMessage() {} +func (*ResumeWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{14} +} +func (m *ResumeWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeWorkflowResponse.Unmarshal(m, b) +} +func (m *ResumeWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *ResumeWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeWorkflowResponse.Merge(dst, src) +} +func (m *ResumeWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_ResumeWorkflowResponse.Size(m) +} +func (m *ResumeWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeWorkflowResponse proto.InternalMessageInfo + +type SkipTaskRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName,proto3" json:"task_reference_name,omitempty"` + Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request,proto3" json:"request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } +func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } +func (*SkipTaskRequest) ProtoMessage() {} +func (*SkipTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{15} +} +func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) +} +func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) +} +func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskRequest.Merge(dst, src) +} +func (m *SkipTaskRequest) XXX_Size() int { + return xxx_messageInfo_SkipTaskRequest.Size(m) +} +func (m *SkipTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo + +func (m *SkipTaskRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *SkipTaskRequest) GetTaskReferenceName() string { + if m != nil { + return m.TaskReferenceName + } + return "" +} + +func (m *SkipTaskRequest) GetRequest() *model.SkipTaskRequest { + if m != nil { + return m.Request + } + return nil +} + +type SkipTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskResponse) Reset() { *m = SkipTaskResponse{} } +func (m *SkipTaskResponse) String() string { return proto.CompactTextString(m) } +func (*SkipTaskResponse) ProtoMessage() {} +func (*SkipTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{16} +} +func (m *SkipTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskResponse.Unmarshal(m, b) +} +func (m *SkipTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskResponse.Marshal(b, m, deterministic) +} +func (dst *SkipTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskResponse.Merge(dst, src) +} +func (m *SkipTaskResponse) XXX_Size() int { + return xxx_messageInfo_SkipTaskResponse.Size(m) +} +func (m *SkipTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskResponse proto.InternalMessageInfo + +type RerunWorkflowResponse struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RerunWorkflowResponse) Reset() { *m = RerunWorkflowResponse{} } +func (m *RerunWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RerunWorkflowResponse) ProtoMessage() {} +func (*RerunWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{17} +} +func (m *RerunWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RerunWorkflowResponse.Unmarshal(m, b) +} +func (m *RerunWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RerunWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RerunWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RerunWorkflowResponse.Merge(dst, src) +} +func (m *RerunWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RerunWorkflowResponse.Size(m) +} +func (m *RerunWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RerunWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RerunWorkflowResponse proto.InternalMessageInfo + +func (m *RerunWorkflowResponse) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type RestartWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestartWorkflowRequest) Reset() { *m = RestartWorkflowRequest{} } +func (m *RestartWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RestartWorkflowRequest) ProtoMessage() {} +func (*RestartWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{18} +} +func (m *RestartWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestartWorkflowRequest.Unmarshal(m, b) +} +func (m *RestartWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestartWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RestartWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestartWorkflowRequest.Merge(dst, src) +} +func (m *RestartWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RestartWorkflowRequest.Size(m) +} +func (m *RestartWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RestartWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RestartWorkflowRequest proto.InternalMessageInfo + +func (m *RestartWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type RestartWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestartWorkflowResponse) Reset() { *m = RestartWorkflowResponse{} } +func (m *RestartWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RestartWorkflowResponse) ProtoMessage() {} +func (*RestartWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{19} +} +func (m *RestartWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestartWorkflowResponse.Unmarshal(m, b) +} +func (m *RestartWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestartWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RestartWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestartWorkflowResponse.Merge(dst, src) +} +func (m *RestartWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RestartWorkflowResponse.Size(m) +} +func (m *RestartWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RestartWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RestartWorkflowResponse proto.InternalMessageInfo + +type RetryWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryWorkflowRequest) Reset() { *m = RetryWorkflowRequest{} } +func (m *RetryWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RetryWorkflowRequest) ProtoMessage() {} +func (*RetryWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{20} +} +func (m *RetryWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryWorkflowRequest.Unmarshal(m, b) +} +func (m *RetryWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RetryWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryWorkflowRequest.Merge(dst, src) +} +func (m *RetryWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RetryWorkflowRequest.Size(m) +} +func (m *RetryWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RetryWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryWorkflowRequest proto.InternalMessageInfo + +func (m *RetryWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type RetryWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryWorkflowResponse) Reset() { *m = RetryWorkflowResponse{} } +func (m *RetryWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RetryWorkflowResponse) ProtoMessage() {} +func (*RetryWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{21} +} +func (m *RetryWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryWorkflowResponse.Unmarshal(m, b) +} +func (m *RetryWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RetryWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryWorkflowResponse.Merge(dst, src) +} +func (m *RetryWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RetryWorkflowResponse.Size(m) +} +func (m *RetryWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RetryWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryWorkflowResponse proto.InternalMessageInfo + +type ResetWorkflowCallbacksRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResetWorkflowCallbacksRequest) Reset() { *m = ResetWorkflowCallbacksRequest{} } +func (m *ResetWorkflowCallbacksRequest) String() string { return proto.CompactTextString(m) } +func (*ResetWorkflowCallbacksRequest) ProtoMessage() {} +func (*ResetWorkflowCallbacksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{22} +} +func (m *ResetWorkflowCallbacksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResetWorkflowCallbacksRequest.Unmarshal(m, b) +} +func (m *ResetWorkflowCallbacksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResetWorkflowCallbacksRequest.Marshal(b, m, deterministic) +} +func (dst *ResetWorkflowCallbacksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResetWorkflowCallbacksRequest.Merge(dst, src) +} +func (m *ResetWorkflowCallbacksRequest) XXX_Size() int { + return xxx_messageInfo_ResetWorkflowCallbacksRequest.Size(m) +} +func (m *ResetWorkflowCallbacksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResetWorkflowCallbacksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResetWorkflowCallbacksRequest proto.InternalMessageInfo + +func (m *ResetWorkflowCallbacksRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type ResetWorkflowCallbacksResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResetWorkflowCallbacksResponse) Reset() { *m = ResetWorkflowCallbacksResponse{} } +func (m *ResetWorkflowCallbacksResponse) String() string { return proto.CompactTextString(m) } +func (*ResetWorkflowCallbacksResponse) ProtoMessage() {} +func (*ResetWorkflowCallbacksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{23} +} +func (m *ResetWorkflowCallbacksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResetWorkflowCallbacksResponse.Unmarshal(m, b) +} +func (m *ResetWorkflowCallbacksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResetWorkflowCallbacksResponse.Marshal(b, m, deterministic) +} +func (dst *ResetWorkflowCallbacksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResetWorkflowCallbacksResponse.Merge(dst, src) +} +func (m *ResetWorkflowCallbacksResponse) XXX_Size() int { + return xxx_messageInfo_ResetWorkflowCallbacksResponse.Size(m) +} +func (m *ResetWorkflowCallbacksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ResetWorkflowCallbacksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ResetWorkflowCallbacksResponse proto.InternalMessageInfo + +type TerminateWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + Reason string `protobuf:"bytes,2,opt,name=reason,proto3" json:"reason,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TerminateWorkflowRequest) Reset() { *m = TerminateWorkflowRequest{} } +func (m *TerminateWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*TerminateWorkflowRequest) ProtoMessage() {} +func (*TerminateWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{24} +} +func (m *TerminateWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TerminateWorkflowRequest.Unmarshal(m, b) +} +func (m *TerminateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TerminateWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *TerminateWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TerminateWorkflowRequest.Merge(dst, src) +} +func (m *TerminateWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_TerminateWorkflowRequest.Size(m) +} +func (m *TerminateWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TerminateWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TerminateWorkflowRequest proto.InternalMessageInfo + +func (m *TerminateWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *TerminateWorkflowRequest) GetReason() string { + if m != nil { + return m.Reason + } + return "" +} + +type TerminateWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TerminateWorkflowResponse) Reset() { *m = TerminateWorkflowResponse{} } +func (m *TerminateWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*TerminateWorkflowResponse) ProtoMessage() {} +func (*TerminateWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{25} +} +func (m *TerminateWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TerminateWorkflowResponse.Unmarshal(m, b) +} +func (m *TerminateWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TerminateWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *TerminateWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TerminateWorkflowResponse.Merge(dst, src) +} +func (m *TerminateWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_TerminateWorkflowResponse.Size(m) +} +func (m *TerminateWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TerminateWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TerminateWorkflowResponse proto.InternalMessageInfo + +type WorkflowSummarySearchResult struct { + TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits,proto3" json:"total_hits,omitempty"` + Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowSummarySearchResult) Reset() { *m = WorkflowSummarySearchResult{} } +func (m *WorkflowSummarySearchResult) String() string { return proto.CompactTextString(m) } +func (*WorkflowSummarySearchResult) ProtoMessage() {} +func (*WorkflowSummarySearchResult) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{26} +} +func (m *WorkflowSummarySearchResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowSummarySearchResult.Unmarshal(m, b) +} +func (m *WorkflowSummarySearchResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowSummarySearchResult.Marshal(b, m, deterministic) +} +func (dst *WorkflowSummarySearchResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowSummarySearchResult.Merge(dst, src) +} +func (m *WorkflowSummarySearchResult) XXX_Size() int { + return xxx_messageInfo_WorkflowSummarySearchResult.Size(m) +} +func (m *WorkflowSummarySearchResult) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowSummarySearchResult.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowSummarySearchResult proto.InternalMessageInfo + +func (m *WorkflowSummarySearchResult) GetTotalHits() int64 { + if m != nil { + return m.TotalHits + } + return 0 +} + +func (m *WorkflowSummarySearchResult) GetResults() []*model.WorkflowSummary { + if m != nil { + return m.Results + } + return nil +} + +func init() { + proto.RegisterType((*StartWorkflowResponse)(nil), "conductor.grpc.workflows.StartWorkflowResponse") + proto.RegisterType((*GetWorkflowsRequest)(nil), "conductor.grpc.workflows.GetWorkflowsRequest") + proto.RegisterType((*GetWorkflowsResponse)(nil), "conductor.grpc.workflows.GetWorkflowsResponse") + proto.RegisterMapType((map[string]*GetWorkflowsResponse_Workflows)(nil), "conductor.grpc.workflows.GetWorkflowsResponse.WorkflowsByIdEntry") + proto.RegisterType((*GetWorkflowsResponse_Workflows)(nil), "conductor.grpc.workflows.GetWorkflowsResponse.Workflows") + proto.RegisterType((*GetWorkflowStatusRequest)(nil), "conductor.grpc.workflows.GetWorkflowStatusRequest") + proto.RegisterType((*GetWorkflowStatusResponse)(nil), "conductor.grpc.workflows.GetWorkflowStatusResponse") + proto.RegisterType((*RemoveWorkflowRequest)(nil), "conductor.grpc.workflows.RemoveWorkflowRequest") + proto.RegisterType((*RemoveWorkflowResponse)(nil), "conductor.grpc.workflows.RemoveWorkflowResponse") + proto.RegisterType((*GetRunningWorkflowsRequest)(nil), "conductor.grpc.workflows.GetRunningWorkflowsRequest") + proto.RegisterType((*GetRunningWorkflowsResponse)(nil), "conductor.grpc.workflows.GetRunningWorkflowsResponse") + proto.RegisterType((*DecideWorkflowRequest)(nil), "conductor.grpc.workflows.DecideWorkflowRequest") + proto.RegisterType((*DecideWorkflowResponse)(nil), "conductor.grpc.workflows.DecideWorkflowResponse") + proto.RegisterType((*PauseWorkflowRequest)(nil), "conductor.grpc.workflows.PauseWorkflowRequest") + proto.RegisterType((*PauseWorkflowResponse)(nil), "conductor.grpc.workflows.PauseWorkflowResponse") + proto.RegisterType((*ResumeWorkflowRequest)(nil), "conductor.grpc.workflows.ResumeWorkflowRequest") + proto.RegisterType((*ResumeWorkflowResponse)(nil), "conductor.grpc.workflows.ResumeWorkflowResponse") + proto.RegisterType((*SkipTaskRequest)(nil), "conductor.grpc.workflows.SkipTaskRequest") + proto.RegisterType((*SkipTaskResponse)(nil), "conductor.grpc.workflows.SkipTaskResponse") + proto.RegisterType((*RerunWorkflowResponse)(nil), "conductor.grpc.workflows.RerunWorkflowResponse") + proto.RegisterType((*RestartWorkflowRequest)(nil), "conductor.grpc.workflows.RestartWorkflowRequest") + proto.RegisterType((*RestartWorkflowResponse)(nil), "conductor.grpc.workflows.RestartWorkflowResponse") + proto.RegisterType((*RetryWorkflowRequest)(nil), "conductor.grpc.workflows.RetryWorkflowRequest") + proto.RegisterType((*RetryWorkflowResponse)(nil), "conductor.grpc.workflows.RetryWorkflowResponse") + proto.RegisterType((*ResetWorkflowCallbacksRequest)(nil), "conductor.grpc.workflows.ResetWorkflowCallbacksRequest") + proto.RegisterType((*ResetWorkflowCallbacksResponse)(nil), "conductor.grpc.workflows.ResetWorkflowCallbacksResponse") + proto.RegisterType((*TerminateWorkflowRequest)(nil), "conductor.grpc.workflows.TerminateWorkflowRequest") + proto.RegisterType((*TerminateWorkflowResponse)(nil), "conductor.grpc.workflows.TerminateWorkflowResponse") + proto.RegisterType((*WorkflowSummarySearchResult)(nil), "conductor.grpc.workflows.WorkflowSummarySearchResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WorkflowServiceClient is the client API for WorkflowService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WorkflowServiceClient interface { + // POST / + StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*StartWorkflowResponse, error) + // GET /{name}/correlated/{correlationId} + GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) + // GET /{workflowId} + GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) + // DELETE /{workflodId}/remove + RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*RemoveWorkflowResponse, error) + // GET /running/{name} + GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) + // PUT /decide/{workflowId} + DecideWorkflow(ctx context.Context, in *DecideWorkflowRequest, opts ...grpc.CallOption) (*DecideWorkflowResponse, error) + // PUT /{workflowId}/pause + PauseWorkflow(ctx context.Context, in *PauseWorkflowRequest, opts ...grpc.CallOption) (*PauseWorkflowResponse, error) + // PUT /{workflowId}/pause + ResumeWorkflow(ctx context.Context, in *ResumeWorkflowRequest, opts ...grpc.CallOption) (*ResumeWorkflowResponse, error) + // PUT /{workflowId}/skiptask/{taskReferenceName} + SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*SkipTaskResponse, error) + // POST /{workflowId}/rerun + RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*RerunWorkflowResponse, error) + // POST /{workflowId}/restart + RestartWorkflow(ctx context.Context, in *RestartWorkflowRequest, opts ...grpc.CallOption) (*RestartWorkflowResponse, error) + // POST /{workflowId}retry + RetryWorkflow(ctx context.Context, in *RetryWorkflowRequest, opts ...grpc.CallOption) (*RetryWorkflowResponse, error) + // POST /{workflowId}/resetcallbacks + ResetWorkflowCallbacks(ctx context.Context, in *ResetWorkflowCallbacksRequest, opts ...grpc.CallOption) (*ResetWorkflowCallbacksResponse, error) + // DELETE /{workflowId} + TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*TerminateWorkflowResponse, error) + // GET /search + Search(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) + SearchByTasks(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) +} + +type workflowServiceClient struct { + cc *grpc.ClientConn +} + +func NewWorkflowServiceClient(cc *grpc.ClientConn) WorkflowServiceClient { + return &workflowServiceClient{cc} +} + +func (c *workflowServiceClient) StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*StartWorkflowResponse, error) { + out := new(StartWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/StartWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) { + out := new(GetWorkflowsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) { + out := new(model.Workflow) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetWorkflowStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*RemoveWorkflowResponse, error) { + out := new(RemoveWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RemoveWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) { + out := new(GetRunningWorkflowsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetRunningWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) DecideWorkflow(ctx context.Context, in *DecideWorkflowRequest, opts ...grpc.CallOption) (*DecideWorkflowResponse, error) { + out := new(DecideWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/DecideWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) PauseWorkflow(ctx context.Context, in *PauseWorkflowRequest, opts ...grpc.CallOption) (*PauseWorkflowResponse, error) { + out := new(PauseWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/PauseWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) ResumeWorkflow(ctx context.Context, in *ResumeWorkflowRequest, opts ...grpc.CallOption) (*ResumeWorkflowResponse, error) { + out := new(ResumeWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/ResumeWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*SkipTaskResponse, error) { + out := new(SkipTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/SkipTaskFromWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*RerunWorkflowResponse, error) { + out := new(RerunWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RerunWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RestartWorkflow(ctx context.Context, in *RestartWorkflowRequest, opts ...grpc.CallOption) (*RestartWorkflowResponse, error) { + out := new(RestartWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RestartWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RetryWorkflow(ctx context.Context, in *RetryWorkflowRequest, opts ...grpc.CallOption) (*RetryWorkflowResponse, error) { + out := new(RetryWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RetryWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) ResetWorkflowCallbacks(ctx context.Context, in *ResetWorkflowCallbacksRequest, opts ...grpc.CallOption) (*ResetWorkflowCallbacksResponse, error) { + out := new(ResetWorkflowCallbacksResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/ResetWorkflowCallbacks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*TerminateWorkflowResponse, error) { + out := new(TerminateWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/TerminateWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) Search(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { + out := new(WorkflowSummarySearchResult) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/Search", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) SearchByTasks(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { + out := new(WorkflowSummarySearchResult) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/SearchByTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WorkflowServiceServer is the server API for WorkflowService service. +type WorkflowServiceServer interface { + // POST / + StartWorkflow(context.Context, *model.StartWorkflowRequest) (*StartWorkflowResponse, error) + // GET /{name}/correlated/{correlationId} + GetWorkflows(context.Context, *GetWorkflowsRequest) (*GetWorkflowsResponse, error) + // GET /{workflowId} + GetWorkflowStatus(context.Context, *GetWorkflowStatusRequest) (*model.Workflow, error) + // DELETE /{workflodId}/remove + RemoveWorkflow(context.Context, *RemoveWorkflowRequest) (*RemoveWorkflowResponse, error) + // GET /running/{name} + GetRunningWorkflows(context.Context, *GetRunningWorkflowsRequest) (*GetRunningWorkflowsResponse, error) + // PUT /decide/{workflowId} + DecideWorkflow(context.Context, *DecideWorkflowRequest) (*DecideWorkflowResponse, error) + // PUT /{workflowId}/pause + PauseWorkflow(context.Context, *PauseWorkflowRequest) (*PauseWorkflowResponse, error) + // PUT /{workflowId}/pause + ResumeWorkflow(context.Context, *ResumeWorkflowRequest) (*ResumeWorkflowResponse, error) + // PUT /{workflowId}/skiptask/{taskReferenceName} + SkipTaskFromWorkflow(context.Context, *SkipTaskRequest) (*SkipTaskResponse, error) + // POST /{workflowId}/rerun + RerunWorkflow(context.Context, *model.RerunWorkflowRequest) (*RerunWorkflowResponse, error) + // POST /{workflowId}/restart + RestartWorkflow(context.Context, *RestartWorkflowRequest) (*RestartWorkflowResponse, error) + // POST /{workflowId}retry + RetryWorkflow(context.Context, *RetryWorkflowRequest) (*RetryWorkflowResponse, error) + // POST /{workflowId}/resetcallbacks + ResetWorkflowCallbacks(context.Context, *ResetWorkflowCallbacksRequest) (*ResetWorkflowCallbacksResponse, error) + // DELETE /{workflowId} + TerminateWorkflow(context.Context, *TerminateWorkflowRequest) (*TerminateWorkflowResponse, error) + // GET /search + Search(context.Context, *search.Request) (*WorkflowSummarySearchResult, error) + SearchByTasks(context.Context, *search.Request) (*WorkflowSummarySearchResult, error) +} + +func RegisterWorkflowServiceServer(s *grpc.Server, srv WorkflowServiceServer) { + s.RegisterService(&_WorkflowService_serviceDesc, srv) +} + +func _WorkflowService_StartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.StartWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).StartWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/StartWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).StartWorkflow(ctx, req.(*model.StartWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/GetWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetWorkflows(ctx, req.(*GetWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetWorkflowStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/GetWorkflowStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, req.(*GetWorkflowStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RemoveWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RemoveWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, req.(*RemoveWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetRunningWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRunningWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/GetRunningWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, req.(*GetRunningWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_DecideWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DecideWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).DecideWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/DecideWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).DecideWorkflow(ctx, req.(*DecideWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_PauseWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).PauseWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/PauseWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).PauseWorkflow(ctx, req.(*PauseWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_ResumeWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/ResumeWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, req.(*ResumeWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_SkipTaskFromWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SkipTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/SkipTaskFromWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, req.(*SkipTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RerunWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.RerunWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RerunWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RerunWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RerunWorkflow(ctx, req.(*model.RerunWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RestartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RestartWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RestartWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RestartWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RestartWorkflow(ctx, req.(*RestartWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RetryWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RetryWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RetryWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RetryWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RetryWorkflow(ctx, req.(*RetryWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_ResetWorkflowCallbacks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResetWorkflowCallbacksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/ResetWorkflowCallbacks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, req.(*ResetWorkflowCallbacksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_TerminateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TerminateWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/TerminateWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, req.(*TerminateWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(search.Request) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).Search(ctx, req.(*search.Request)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_SearchByTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(search.Request) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).SearchByTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/SearchByTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).SearchByTasks(ctx, req.(*search.Request)) + } + return interceptor(ctx, in, info, handler) +} + +var _WorkflowService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.workflows.WorkflowService", + HandlerType: (*WorkflowServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "StartWorkflow", + Handler: _WorkflowService_StartWorkflow_Handler, + }, + { + MethodName: "GetWorkflows", + Handler: _WorkflowService_GetWorkflows_Handler, + }, + { + MethodName: "GetWorkflowStatus", + Handler: _WorkflowService_GetWorkflowStatus_Handler, + }, + { + MethodName: "RemoveWorkflow", + Handler: _WorkflowService_RemoveWorkflow_Handler, + }, + { + MethodName: "GetRunningWorkflows", + Handler: _WorkflowService_GetRunningWorkflows_Handler, + }, + { + MethodName: "DecideWorkflow", + Handler: _WorkflowService_DecideWorkflow_Handler, + }, + { + MethodName: "PauseWorkflow", + Handler: _WorkflowService_PauseWorkflow_Handler, + }, + { + MethodName: "ResumeWorkflow", + Handler: _WorkflowService_ResumeWorkflow_Handler, + }, + { + MethodName: "SkipTaskFromWorkflow", + Handler: _WorkflowService_SkipTaskFromWorkflow_Handler, + }, + { + MethodName: "RerunWorkflow", + Handler: _WorkflowService_RerunWorkflow_Handler, + }, + { + MethodName: "RestartWorkflow", + Handler: _WorkflowService_RestartWorkflow_Handler, + }, + { + MethodName: "RetryWorkflow", + Handler: _WorkflowService_RetryWorkflow_Handler, + }, + { + MethodName: "ResetWorkflowCallbacks", + Handler: _WorkflowService_ResetWorkflowCallbacks_Handler, + }, + { + MethodName: "TerminateWorkflow", + Handler: _WorkflowService_TerminateWorkflow_Handler, + }, + { + MethodName: "Search", + Handler: _WorkflowService_Search_Handler, + }, + { + MethodName: "SearchByTasks", + Handler: _WorkflowService_SearchByTasks_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/workflow_service.proto", +} + +func init() { + proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_fc7b0bf1a282d9fc) +} + +var fileDescriptor_workflow_service_fc7b0bf1a282d9fc = []byte{ + // 1121 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xcd, 0x6e, 0xdb, 0x46, + 0x10, 0x86, 0xa4, 0xc4, 0xb6, 0xc6, 0x96, 0x7f, 0x36, 0xb6, 0x43, 0xd3, 0x48, 0xaa, 0xb2, 0x08, + 0xe0, 0x14, 0x28, 0xd5, 0x2a, 0x0d, 0xac, 0xe6, 0x94, 0xda, 0x69, 0x53, 0x5f, 0x82, 0x60, 0x65, + 0xa0, 0x40, 0x2f, 0x2c, 0x45, 0xae, 0x65, 0x42, 0xfc, 0x51, 0x77, 0x97, 0x72, 0x54, 0xf4, 0x54, + 0xf4, 0xd6, 0x57, 0x28, 0xfa, 0x20, 0xbd, 0xf6, 0xc5, 0x0a, 0x2e, 0x97, 0x94, 0x48, 0xad, 0x18, + 0xc9, 0x40, 0x6f, 0xd2, 0xcc, 0x7c, 0x33, 0xb3, 0x33, 0xb3, 0xf3, 0xad, 0x04, 0xa7, 0x43, 0x3a, + 0x76, 0x3a, 0x77, 0x11, 0x1d, 0xdd, 0xf8, 0xd1, 0x9d, 0xc5, 0x08, 0x9d, 0x78, 0x0e, 0x31, 0xc7, + 0x34, 0xe2, 0x11, 0xd2, 0x9c, 0x28, 0x74, 0x63, 0x87, 0x47, 0xd4, 0x4c, 0xcc, 0xcc, 0xcc, 0x8c, + 0xe9, 0x07, 0x02, 0xc6, 0x88, 0x4d, 0x9d, 0xdb, 0xd4, 0x58, 0x3f, 0x0c, 0x22, 0x97, 0xf8, 0xb9, + 0x2b, 0x29, 0x3d, 0x2d, 0x4a, 0x59, 0x1c, 0x04, 0x36, 0x9d, 0x16, 0x95, 0x6c, 0xe4, 0x8d, 0xb9, + 0xcd, 0x46, 0x94, 0xfc, 0x12, 0x13, 0xc6, 0xa5, 0xb2, 0x2d, 0x95, 0xdc, 0xa6, 0x3c, 0x83, 0x2b, + 0x2d, 0x28, 0xa1, 0x71, 0xa8, 0xb4, 0x30, 0x7a, 0x70, 0xd4, 0x4f, 0xf0, 0x3f, 0x4a, 0x2d, 0x26, + 0x6c, 0x1c, 0x85, 0x8c, 0xa0, 0x4f, 0x60, 0x3b, 0x3f, 0xb3, 0xe7, 0x6a, 0xb5, 0x76, 0xed, 0xac, + 0x89, 0x21, 0x13, 0x5d, 0xb9, 0xc6, 0x5f, 0x35, 0x78, 0xf4, 0x96, 0xe4, 0x40, 0x86, 0x53, 0xbf, + 0x08, 0xc1, 0x83, 0xd0, 0x0e, 0x88, 0x44, 0x88, 0xcf, 0xe8, 0x19, 0xec, 0x3a, 0x11, 0xa5, 0xc4, + 0xb7, 0xb9, 0x17, 0x85, 0x89, 0xbf, 0x7a, 0xbb, 0x71, 0xd6, 0xc4, 0xad, 0x39, 0xe9, 0x95, 0x9b, + 0x98, 0x79, 0xa1, 0xe3, 0xc7, 0x2e, 0xb1, 0x1c, 0x3f, 0x62, 0xc4, 0xd5, 0x1a, 0xed, 0xda, 0xd9, + 0x16, 0x6e, 0x49, 0xe9, 0xa5, 0x10, 0xa2, 0xcf, 0x20, 0x13, 0x58, 0x49, 0x51, 0x98, 0xf6, 0x40, + 0x58, 0xed, 0x48, 0xe1, 0x75, 0x22, 0x33, 0xfe, 0xad, 0xc3, 0x61, 0x31, 0x3d, 0x79, 0x30, 0x0f, + 0xf6, 0xf2, 0x5a, 0x5b, 0x83, 0x69, 0x7a, 0xb8, 0xc6, 0xd9, 0x76, 0xf7, 0x5b, 0x73, 0x59, 0x33, + 0x4d, 0x95, 0x23, 0x33, 0x97, 0x5c, 0x4c, 0xaf, 0xdc, 0xef, 0x42, 0x4e, 0xa7, 0xb8, 0x75, 0x37, + 0x2f, 0xd3, 0xdf, 0x40, 0x33, 0x37, 0x42, 0xe7, 0xd0, 0xcc, 0xb5, 0x32, 0xe2, 0xc9, 0x5c, 0x44, + 0xd1, 0x8e, 0xdc, 0x27, 0x9e, 0xd9, 0xea, 0xbf, 0x02, 0x5a, 0x0c, 0x85, 0xf6, 0xa1, 0x31, 0x22, + 0x53, 0x59, 0xe5, 0xe4, 0x23, 0x7a, 0x07, 0x0f, 0x27, 0xb6, 0x1f, 0x13, 0xad, 0xde, 0xae, 0x9d, + 0x6d, 0x77, 0x7b, 0xf7, 0x3d, 0x0e, 0x4e, 0xdd, 0xbc, 0xaa, 0xf7, 0x6a, 0xc6, 0xcf, 0xa0, 0xcd, + 0x19, 0xf7, 0xb9, 0xcd, 0xe3, 0xbc, 0xd1, 0x1f, 0x9b, 0x90, 0xc5, 0x3e, 0xd5, 0x15, 0x7d, 0xc2, + 0x70, 0xa2, 0x88, 0x20, 0x7b, 0xf5, 0x12, 0xb6, 0x32, 0x7f, 0xc2, 0x7f, 0x65, 0xc9, 0x72, 0x53, + 0xc3, 0x81, 0x23, 0x4c, 0x82, 0x68, 0x42, 0x66, 0x53, 0x5d, 0x4e, 0xd9, 0x5d, 0x4c, 0xd9, 0xbd, + 0x72, 0xd1, 0x73, 0xd8, 0x4f, 0x2e, 0xac, 0x37, 0x21, 0x56, 0x1e, 0x38, 0xcd, 0x7a, 0x4f, 0xca, + 0x33, 0x97, 0x86, 0x06, 0xc7, 0xe5, 0x20, 0x69, 0xd6, 0xc6, 0x1f, 0x35, 0xd0, 0xdf, 0x12, 0x8e, + 0xe3, 0x30, 0xf4, 0xc2, 0xe1, 0x4a, 0x17, 0x44, 0x83, 0xcd, 0x09, 0xa1, 0xcc, 0x8b, 0x42, 0x11, + 0xee, 0x21, 0xce, 0xbe, 0xa2, 0x27, 0x00, 0xe2, 0x82, 0x5b, 0xdc, 0x0b, 0x88, 0xb8, 0x0f, 0x0d, + 0xdc, 0x14, 0x92, 0x6b, 0x2f, 0x20, 0xe8, 0x04, 0xb6, 0x48, 0xe8, 0xa6, 0xca, 0x07, 0x42, 0xb9, + 0x49, 0x42, 0x37, 0x51, 0x19, 0xaf, 0xe1, 0x54, 0x99, 0x85, 0xac, 0xed, 0xa7, 0xb0, 0x33, 0xd7, + 0xbe, 0x74, 0x24, 0x9b, 0x78, 0x7b, 0xd6, 0x3f, 0x96, 0x2c, 0x87, 0x37, 0xc4, 0xf1, 0xdc, 0x8a, + 0x3a, 0x2e, 0x59, 0x0e, 0x1a, 0x1c, 0x97, 0x91, 0xb2, 0x38, 0xe7, 0x70, 0xf8, 0xde, 0x8e, 0xd9, + 0xfa, 0x2e, 0x1f, 0xc3, 0x51, 0x09, 0x28, 0x3d, 0xf6, 0x92, 0x6e, 0xb3, 0x38, 0xb8, 0x57, 0x96, + 0x65, 0xa4, 0xf4, 0xf9, 0x77, 0x0d, 0xf6, 0xfa, 0x23, 0x6f, 0x9c, 0xcc, 0xe8, 0xca, 0xf3, 0x6e, + 0xc2, 0xa3, 0x64, 0xce, 0x2d, 0x4a, 0x6e, 0x08, 0x25, 0xa1, 0x43, 0x2c, 0xd1, 0xe7, 0xba, 0x30, + 0x3c, 0xe0, 0xc2, 0x95, 0xd4, 0xbc, 0x4b, 0x9a, 0xfe, 0x0a, 0x36, 0xe5, 0x32, 0x16, 0x7d, 0xdd, + 0xee, 0xb6, 0x17, 0x86, 0xbb, 0x94, 0x03, 0xce, 0x00, 0x06, 0x82, 0xfd, 0x99, 0x6e, 0xbe, 0x10, + 0x34, 0x0e, 0xd7, 0xdf, 0xe5, 0xdf, 0x88, 0x42, 0x14, 0x79, 0x60, 0xc5, 0x1a, 0x9e, 0xc0, 0xe3, + 0x05, 0xe8, 0xac, 0xd5, 0x98, 0x70, 0x3a, 0xbd, 0x4f, 0xab, 0x4b, 0x40, 0xe9, 0xf1, 0x35, 0x3c, + 0xc1, 0x84, 0xcd, 0xd6, 0xc5, 0xa5, 0xed, 0xfb, 0x03, 0xdb, 0x19, 0xad, 0xbc, 0x93, 0x8c, 0x36, + 0x3c, 0x5d, 0xe6, 0x41, 0xc6, 0xe8, 0x83, 0x76, 0x4d, 0x68, 0xe0, 0x85, 0x36, 0x5f, 0x7b, 0xa2, + 0xd0, 0x31, 0x6c, 0x50, 0x62, 0x33, 0x79, 0x8d, 0x9b, 0x58, 0x7e, 0x33, 0x4e, 0xe1, 0x44, 0xe1, + 0x54, 0x46, 0xfc, 0x00, 0xa7, 0xf9, 0xfe, 0x4b, 0xd9, 0xbf, 0x2f, 0x9e, 0x0d, 0xc9, 0x6c, 0xfa, + 0x3c, 0xd9, 0x00, 0x3c, 0xe2, 0xb6, 0x6f, 0xdd, 0x7a, 0x9c, 0x89, 0x98, 0x0d, 0xdc, 0x14, 0x92, + 0x1f, 0x3c, 0xce, 0xd2, 0x29, 0x4a, 0x0c, 0x99, 0x20, 0x55, 0xd5, 0x14, 0x95, 0xbc, 0xe3, 0x0c, + 0xd0, 0xfd, 0x67, 0x17, 0xf6, 0x72, 0x65, 0xfa, 0xb0, 0x41, 0x43, 0x68, 0x15, 0x5e, 0x04, 0xe8, + 0xd9, 0xe2, 0x54, 0x2a, 0x26, 0x45, 0xef, 0x2c, 0xe7, 0x1b, 0xf5, 0x0b, 0x23, 0x80, 0x9d, 0x79, + 0x22, 0x42, 0x5f, 0xac, 0x4a, 0x58, 0x69, 0x3c, 0x73, 0x3d, 0x7e, 0x43, 0x03, 0x38, 0x58, 0x20, + 0x1a, 0xd4, 0x5d, 0xc9, 0x49, 0x81, 0xf7, 0xf4, 0xe5, 0x14, 0x84, 0x18, 0xec, 0x16, 0x39, 0x01, + 0x55, 0x54, 0x45, 0x49, 0x51, 0xfa, 0x97, 0xab, 0x03, 0xe4, 0xc1, 0x7e, 0x4f, 0x1f, 0x62, 0xe5, + 0x45, 0x8f, 0xbe, 0xae, 0x3c, 0xdb, 0x12, 0x76, 0xd2, 0x5f, 0xae, 0x89, 0x92, 0x49, 0x30, 0xd8, + 0x2d, 0x2e, 0xfc, 0xaa, 0x93, 0x2b, 0x49, 0xa5, 0xea, 0xe4, 0x6a, 0x2e, 0x41, 0x63, 0x68, 0x15, + 0x28, 0x01, 0x55, 0xcc, 0x84, 0x8a, 0x74, 0xaa, 0x66, 0x56, 0xc9, 0x35, 0x69, 0x83, 0xe7, 0x19, + 0xa3, 0xba, 0xc1, 0x0a, 0x56, 0xaa, 0x6e, 0xb0, 0x8a, 0x8c, 0x50, 0x00, 0x87, 0xd9, 0xae, 0xff, + 0x9e, 0x46, 0x41, 0x1e, 0xfa, 0x79, 0xc5, 0x8d, 0x2b, 0xf2, 0x86, 0xfe, 0xf9, 0x2a, 0xa6, 0x32, + 0xdc, 0x10, 0x5a, 0x05, 0x1a, 0x51, 0x2c, 0x80, 0x12, 0xcd, 0x7c, 0xb4, 0x98, 0x6a, 0x5a, 0x9a, + 0xc0, 0x5e, 0x89, 0x3a, 0x50, 0x75, 0x71, 0x54, 0x6b, 0xe7, 0xab, 0x35, 0x10, 0xb3, 0xb1, 0x29, + 0xd0, 0x4b, 0xd5, 0xd8, 0xa8, 0x08, 0xac, 0xfa, 0xa4, 0x0a, 0xde, 0x42, 0x7f, 0xd6, 0x04, 0xc1, + 0x2a, 0x68, 0x07, 0x9d, 0x57, 0xe6, 0xbf, 0x9c, 0xea, 0xf4, 0xde, 0xfa, 0x40, 0x99, 0xcd, 0x6f, + 0x70, 0xb0, 0x40, 0x46, 0x55, 0x9b, 0x70, 0x19, 0x1d, 0xea, 0x2f, 0xd6, 0xc2, 0xc8, 0xe8, 0x16, + 0x6c, 0xa4, 0xf4, 0x86, 0x9e, 0x96, 0xe1, 0xf2, 0xd7, 0xf2, 0x0a, 0xab, 0xa8, 0x8a, 0x2f, 0x6f, + 0xa0, 0x95, 0x7e, 0xbf, 0x98, 0x8a, 0x9f, 0x18, 0xff, 0x53, 0x9c, 0x0b, 0x0e, 0xba, 0x13, 0x05, + 0x66, 0x48, 0xf8, 0x8d, 0xef, 0x7d, 0x28, 0xf9, 0xb8, 0x38, 0x28, 0xf1, 0xea, 0xfb, 0xc1, 0x4f, + 0x97, 0x43, 0x8f, 0xdf, 0xc6, 0x03, 0xd3, 0x89, 0x82, 0x8e, 0x44, 0x75, 0x72, 0x54, 0xc7, 0xf1, + 0x3d, 0x12, 0xf2, 0xce, 0x30, 0x12, 0x7f, 0x1c, 0xcc, 0xe4, 0x85, 0xbf, 0x1f, 0xd8, 0x60, 0x43, + 0xdc, 0xc0, 0x17, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0x39, 0x12, 0x2a, 0x80, 0x97, 0x10, 0x00, + 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go b/polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go new file mode 100644 index 0000000000..d47b9b6b61 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/dynamicforkjointask.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/dynamicforkjointask.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type DynamicForkJoinTask struct { + TaskName string `protobuf:"bytes,1,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` + WorkflowName string `protobuf:"bytes,2,opt,name=workflow_name,json=workflowName,proto3" json:"workflow_name,omitempty"` + ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicForkJoinTask) Reset() { *m = DynamicForkJoinTask{} } +func (m *DynamicForkJoinTask) String() string { return proto.CompactTextString(m) } +func (*DynamicForkJoinTask) ProtoMessage() {} +func (*DynamicForkJoinTask) Descriptor() ([]byte, []int) { + return fileDescriptor_dynamicforkjointask_60f4ea3626679478, []int{0} +} +func (m *DynamicForkJoinTask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicForkJoinTask.Unmarshal(m, b) +} +func (m *DynamicForkJoinTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicForkJoinTask.Marshal(b, m, deterministic) +} +func (dst *DynamicForkJoinTask) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicForkJoinTask.Merge(dst, src) +} +func (m *DynamicForkJoinTask) XXX_Size() int { + return xxx_messageInfo_DynamicForkJoinTask.Size(m) +} +func (m *DynamicForkJoinTask) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicForkJoinTask.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicForkJoinTask proto.InternalMessageInfo + +func (m *DynamicForkJoinTask) GetTaskName() string { + if m != nil { + return m.TaskName + } + return "" +} + +func (m *DynamicForkJoinTask) GetWorkflowName() string { + if m != nil { + return m.WorkflowName + } + return "" +} + +func (m *DynamicForkJoinTask) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *DynamicForkJoinTask) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *DynamicForkJoinTask) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func init() { + proto.RegisterType((*DynamicForkJoinTask)(nil), "conductor.proto.DynamicForkJoinTask") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.DynamicForkJoinTask.InputEntry") +} + +func init() { + proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_60f4ea3626679478) +} + +var fileDescriptor_dynamicforkjointask_60f4ea3626679478 = []byte{ + // 325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0x4f, 0x4b, 0x03, 0x31, + 0x10, 0xc5, 0xe9, 0x3f, 0xb1, 0xa9, 0x55, 0x89, 0x28, 0xa5, 0x15, 0x2c, 0x8a, 0xd0, 0x83, 0x24, + 0x50, 0x2f, 0xd2, 0x63, 0xb1, 0x82, 0x1e, 0xa4, 0x14, 0xf1, 0xe0, 0x45, 0x76, 0xd3, 0xd9, 0x35, + 0xee, 0x6e, 0x66, 0x49, 0x13, 0xeb, 0x7e, 0x26, 0xbf, 0xa4, 0x6c, 0xd2, 0x56, 0x29, 0xbd, 0x4d, + 0xde, 0xfc, 0xde, 0xe4, 0x65, 0x42, 0x2e, 0x32, 0x9c, 0x43, 0xca, 0xe7, 0x85, 0x0a, 0x32, 0x29, + 0x22, 0xd4, 0xc9, 0x27, 0x4a, 0x65, 0x82, 0x45, 0xc2, 0x72, 0x8d, 0x06, 0xe9, 0x91, 0x40, 0x35, + 0xb7, 0xc2, 0xa0, 0xf6, 0x42, 0xf7, 0x3c, 0x46, 0x8c, 0x53, 0xe0, 0xee, 0x14, 0xda, 0x88, 0x2f, + 0x8c, 0xb6, 0xc2, 0xf8, 0xee, 0xe5, 0x4f, 0x95, 0x9c, 0xdc, 0xfb, 0x61, 0x0f, 0xa8, 0x93, 0x27, + 0x94, 0xea, 0x25, 0x58, 0x24, 0xb4, 0x47, 0x9a, 0xe5, 0xd0, 0x77, 0x15, 0x64, 0xd0, 0xa9, 0xf4, + 0x2b, 0x83, 0xe6, 0x6c, 0xbf, 0x14, 0x9e, 0x83, 0x0c, 0xe8, 0x15, 0x69, 0x2f, 0x51, 0x27, 0x51, + 0x8a, 0x4b, 0x0f, 0x54, 0x1d, 0x70, 0xb0, 0x16, 0x1d, 0x74, 0x4d, 0x0e, 0x35, 0x44, 0xa0, 0x41, + 0x09, 0xf0, 0x54, 0xcd, 0x51, 0xed, 0x8d, 0xea, 0xb0, 0x09, 0x69, 0x48, 0x95, 0x5b, 0xd3, 0xa9, + 0xf7, 0x6b, 0x83, 0xd6, 0x90, 0xb3, 0xad, 0xfc, 0x6c, 0x47, 0x3a, 0xf6, 0x58, 0x3a, 0x26, 0xca, + 0xe8, 0x62, 0xe6, 0xdd, 0x94, 0x92, 0xba, 0x29, 0x72, 0xe8, 0x34, 0xdc, 0x1d, 0xae, 0xee, 0x4e, + 0x09, 0xf9, 0x03, 0xe9, 0x31, 0xa9, 0x25, 0x50, 0xac, 0xde, 0x52, 0x96, 0xf4, 0x86, 0x34, 0xbe, + 0x82, 0xd4, 0xfa, 0xf8, 0xad, 0xe1, 0x19, 0xf3, 0x9b, 0x62, 0xeb, 0x4d, 0xb1, 0xd7, 0xb2, 0x3b, + 0xf3, 0xd0, 0xa8, 0x7a, 0x57, 0x19, 0xe7, 0xa4, 0x27, 0x30, 0x63, 0x0a, 0x4c, 0x94, 0xca, 0xef, + 0xed, 0xa8, 0xe3, 0xd3, 0x1d, 0x59, 0xa7, 0xe1, 0xdb, 0x28, 0x96, 0xe6, 0xc3, 0x86, 0x4c, 0x60, + 0xc6, 0x57, 0x56, 0xbe, 0xb1, 0x72, 0x91, 0x4a, 0x50, 0x86, 0xc7, 0x18, 0xeb, 0x5c, 0xfc, 0xd3, + 0xdd, 0x47, 0x87, 0x7b, 0x6e, 0xf2, 0xed, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6d, 0xe6, 0x21, + 0x30, 0xf8, 0x01, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go b/polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go new file mode 100644 index 0000000000..9650213be0 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/dynamicforkjointasklist.pb.go @@ -0,0 +1,82 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/dynamicforkjointasklist.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type DynamicForkJoinTaskList struct { + DynamicTasks []*DynamicForkJoinTask `protobuf:"bytes,1,rep,name=dynamic_tasks,json=dynamicTasks,proto3" json:"dynamic_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicForkJoinTaskList) Reset() { *m = DynamicForkJoinTaskList{} } +func (m *DynamicForkJoinTaskList) String() string { return proto.CompactTextString(m) } +func (*DynamicForkJoinTaskList) ProtoMessage() {} +func (*DynamicForkJoinTaskList) Descriptor() ([]byte, []int) { + return fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e, []int{0} +} +func (m *DynamicForkJoinTaskList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicForkJoinTaskList.Unmarshal(m, b) +} +func (m *DynamicForkJoinTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicForkJoinTaskList.Marshal(b, m, deterministic) +} +func (dst *DynamicForkJoinTaskList) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicForkJoinTaskList.Merge(dst, src) +} +func (m *DynamicForkJoinTaskList) XXX_Size() int { + return xxx_messageInfo_DynamicForkJoinTaskList.Size(m) +} +func (m *DynamicForkJoinTaskList) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicForkJoinTaskList.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicForkJoinTaskList proto.InternalMessageInfo + +func (m *DynamicForkJoinTaskList) GetDynamicTasks() []*DynamicForkJoinTask { + if m != nil { + return m.DynamicTasks + } + return nil +} + +func init() { + proto.RegisterType((*DynamicForkJoinTaskList)(nil), "conductor.proto.DynamicForkJoinTaskList") +} + +func init() { + proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e) +} + +var fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e = []byte{ + // 200 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xce, 0xcd, 0x4f, 0x49, + 0xcd, 0xd1, 0x4f, 0xa9, 0xcc, 0x4b, 0xcc, 0xcd, 0x4c, 0x4e, 0xcb, 0x2f, 0xca, 0xce, 0xca, 0xcf, + 0xcc, 0x2b, 0x49, 0x2c, 0xce, 0xce, 0xc9, 0x2c, 0x2e, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, + 0xe2, 0x4f, 0xce, 0xcf, 0x4b, 0x29, 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x08, 0x48, 0xc9, 0xe3, 0xd4, + 0x05, 0x51, 0xa0, 0x94, 0xc2, 0x25, 0xee, 0x02, 0x91, 0x74, 0xcb, 0x2f, 0xca, 0xf6, 0xca, 0xcf, + 0xcc, 0x0b, 0x49, 0x2c, 0xce, 0xf6, 0xc9, 0x2c, 0x2e, 0x11, 0xf2, 0xe4, 0xe2, 0x85, 0xea, 0x8b, + 0x07, 0x69, 0x28, 0x96, 0x60, 0x54, 0x60, 0xd6, 0xe0, 0x36, 0x52, 0xd1, 0x43, 0xb3, 0x44, 0x0f, + 0x8b, 0x01, 0x41, 0x3c, 0x50, 0xad, 0x20, 0x4e, 0xb1, 0x53, 0x09, 0x97, 0x74, 0x72, 0x7e, 0xae, + 0x5e, 0x5e, 0x6a, 0x49, 0x5a, 0x4e, 0x66, 0x05, 0xba, 0x01, 0x4e, 0x92, 0x38, 0x9c, 0x10, 0x90, + 0x14, 0x65, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0xd5, 0xae, + 0x0f, 0xd7, 0xae, 0x9f, 0x9c, 0x93, 0x99, 0x9a, 0x57, 0xa2, 0x9f, 0x9e, 0x9f, 0x5e, 0x54, 0x90, + 0x8c, 0x24, 0x0e, 0xf6, 0x75, 0x12, 0x1b, 0xd8, 0x74, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x76, 0xa8, 0x2e, 0xed, 0x3b, 0x01, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go b/polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go new file mode 100644 index 0000000000..ac1c2a8ca0 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/eventexecution.pb.go @@ -0,0 +1,185 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/eventexecution.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type EventExecution_Status int32 + +const ( + EventExecution_IN_PROGRESS EventExecution_Status = 0 + EventExecution_COMPLETED EventExecution_Status = 1 + EventExecution_FAILED EventExecution_Status = 2 + EventExecution_SKIPPED EventExecution_Status = 3 +) + +var EventExecution_Status_name = map[int32]string{ + 0: "IN_PROGRESS", + 1: "COMPLETED", + 2: "FAILED", + 3: "SKIPPED", +} +var EventExecution_Status_value = map[string]int32{ + "IN_PROGRESS": 0, + "COMPLETED": 1, + "FAILED": 2, + "SKIPPED": 3, +} + +func (x EventExecution_Status) String() string { + return proto.EnumName(EventExecution_Status_name, int32(x)) +} +func (EventExecution_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0, 0} +} + +type EventExecution struct { + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + MessageId string `protobuf:"bytes,2,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Event string `protobuf:"bytes,4,opt,name=event,proto3" json:"event,omitempty"` + Created int64 `protobuf:"varint,5,opt,name=created,proto3" json:"created,omitempty"` + Status EventExecution_Status `protobuf:"varint,6,opt,name=status,proto3,enum=conductor.proto.EventExecution_Status" json:"status,omitempty"` + Action EventHandler_Action_Type `protobuf:"varint,7,opt,name=action,proto3,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` + Output map[string]*_struct.Value `protobuf:"bytes,8,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventExecution) Reset() { *m = EventExecution{} } +func (m *EventExecution) String() string { return proto.CompactTextString(m) } +func (*EventExecution) ProtoMessage() {} +func (*EventExecution) Descriptor() ([]byte, []int) { + return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0} +} +func (m *EventExecution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventExecution.Unmarshal(m, b) +} +func (m *EventExecution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventExecution.Marshal(b, m, deterministic) +} +func (dst *EventExecution) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventExecution.Merge(dst, src) +} +func (m *EventExecution) XXX_Size() int { + return xxx_messageInfo_EventExecution.Size(m) +} +func (m *EventExecution) XXX_DiscardUnknown() { + xxx_messageInfo_EventExecution.DiscardUnknown(m) +} + +var xxx_messageInfo_EventExecution proto.InternalMessageInfo + +func (m *EventExecution) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *EventExecution) GetMessageId() string { + if m != nil { + return m.MessageId + } + return "" +} + +func (m *EventExecution) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventExecution) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *EventExecution) GetCreated() int64 { + if m != nil { + return m.Created + } + return 0 +} + +func (m *EventExecution) GetStatus() EventExecution_Status { + if m != nil { + return m.Status + } + return EventExecution_IN_PROGRESS +} + +func (m *EventExecution) GetAction() EventHandler_Action_Type { + if m != nil { + return m.Action + } + return EventHandler_Action_START_WORKFLOW +} + +func (m *EventExecution) GetOutput() map[string]*_struct.Value { + if m != nil { + return m.Output + } + return nil +} + +func init() { + proto.RegisterType((*EventExecution)(nil), "conductor.proto.EventExecution") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventExecution.OutputEntry") + proto.RegisterEnum("conductor.proto.EventExecution_Status", EventExecution_Status_name, EventExecution_Status_value) +} + +func init() { + proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_680c67ac3fada8e2) +} + +var fileDescriptor_eventexecution_680c67ac3fada8e2 = []byte{ + // 434 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x8b, 0xd3, 0x40, + 0x14, 0xc6, 0x4d, 0xb2, 0x4d, 0xed, 0x0b, 0x76, 0xc3, 0x20, 0x32, 0x54, 0x85, 0xb2, 0x07, 0xa9, + 0x28, 0x13, 0xa8, 0x17, 0xd9, 0x83, 0xd0, 0xdd, 0x46, 0x2d, 0xae, 0x36, 0xa6, 0x8b, 0x07, 0x2f, + 0x4b, 0x3a, 0x79, 0x9b, 0x0d, 0x9b, 0xcc, 0x94, 0x64, 0x66, 0xd9, 0xfe, 0xb9, 0xfe, 0x27, 0xd2, + 0x49, 0x22, 0xdd, 0x22, 0xec, 0x6d, 0xde, 0xf7, 0x7d, 0xbf, 0xe4, 0xbd, 0x37, 0x03, 0xa3, 0x52, + 0xa6, 0x58, 0x04, 0x78, 0x87, 0x42, 0xe1, 0x3d, 0x72, 0xad, 0x72, 0x29, 0xd8, 0xa6, 0x92, 0x4a, + 0x92, 0x63, 0x2e, 0x45, 0xaa, 0xb9, 0x92, 0x55, 0x23, 0x8c, 0xe8, 0x5e, 0xf8, 0x26, 0x11, 0x69, + 0x81, 0x9d, 0xf3, 0x2a, 0x93, 0x32, 0x2b, 0x30, 0x30, 0xd5, 0x5a, 0x5f, 0x07, 0xb5, 0xaa, 0x34, + 0x57, 0x8d, 0x7b, 0xf2, 0xc7, 0x81, 0x61, 0xb8, 0x83, 0xc2, 0xee, 0x0f, 0x64, 0x08, 0x76, 0x9e, + 0x52, 0x6b, 0x6c, 0x4d, 0x06, 0xb1, 0x9d, 0xa7, 0xe4, 0x35, 0x40, 0x89, 0x75, 0x9d, 0x64, 0x78, + 0x95, 0xa7, 0xd4, 0x36, 0xfa, 0xa0, 0x55, 0x16, 0x29, 0x21, 0x70, 0x24, 0x92, 0x12, 0xa9, 0x63, + 0x0c, 0x73, 0x26, 0xcf, 0xa1, 0x67, 0x3a, 0xa1, 0x47, 0x46, 0x6c, 0x0a, 0x42, 0xa1, 0xcf, 0x2b, + 0x4c, 0x14, 0xa6, 0xb4, 0x37, 0xb6, 0x26, 0x4e, 0xdc, 0x95, 0xe4, 0x13, 0xb8, 0xb5, 0x4a, 0x94, + 0xae, 0xa9, 0x3b, 0xb6, 0x26, 0xc3, 0xe9, 0x1b, 0x76, 0x30, 0x1f, 0x7b, 0xd8, 0x23, 0x5b, 0x99, + 0x74, 0xdc, 0x52, 0x64, 0x06, 0x6e, 0xc2, 0x77, 0x06, 0xed, 0x1b, 0xfe, 0xed, 0xff, 0xf9, 0xaf, + 0xed, 0x62, 0x66, 0x26, 0xcb, 0x2e, 0xb7, 0x1b, 0x8c, 0x5b, 0x90, 0x9c, 0x83, 0x2b, 0xb5, 0xda, + 0x68, 0x45, 0x9f, 0x8e, 0x9d, 0x89, 0x37, 0x7d, 0xf7, 0x58, 0x0b, 0x4b, 0x93, 0x0e, 0x85, 0xaa, + 0xb6, 0x71, 0x8b, 0x8e, 0x7e, 0x82, 0xb7, 0x27, 0x13, 0x1f, 0x9c, 0x5b, 0xdc, 0xb6, 0xab, 0xdc, + 0x1d, 0xc9, 0x7b, 0xe8, 0xdd, 0x25, 0x85, 0x46, 0xb3, 0x46, 0x6f, 0xfa, 0x82, 0x35, 0x97, 0xc3, + 0xba, 0xcb, 0x61, 0xbf, 0x76, 0x6e, 0xdc, 0x84, 0x4e, 0xed, 0x8f, 0xd6, 0xc9, 0x0c, 0xdc, 0x66, + 0x58, 0x72, 0x0c, 0xde, 0xe2, 0xc7, 0x55, 0x14, 0x2f, 0xbf, 0xc4, 0xe1, 0x6a, 0xe5, 0x3f, 0x21, + 0xcf, 0x60, 0x70, 0xbe, 0xfc, 0x1e, 0x5d, 0x84, 0x97, 0xe1, 0xdc, 0xb7, 0x08, 0x80, 0xfb, 0x79, + 0xb6, 0xb8, 0x08, 0xe7, 0xbe, 0x4d, 0x3c, 0xe8, 0xaf, 0xbe, 0x2d, 0xa2, 0x28, 0x9c, 0xfb, 0xce, + 0xd9, 0x2d, 0xbc, 0xe4, 0xb2, 0x64, 0x02, 0xd5, 0x75, 0x91, 0xdf, 0x1f, 0xce, 0x75, 0xe6, 0x3f, + 0x1c, 0x2c, 0x5a, 0xff, 0x3e, 0xcd, 0x72, 0x75, 0xa3, 0xd7, 0x8c, 0xcb, 0x32, 0x68, 0xa9, 0xe0, + 0x1f, 0x15, 0xf0, 0x22, 0x47, 0xa1, 0x82, 0x4c, 0x66, 0xd5, 0x86, 0xef, 0xe9, 0xe6, 0x05, 0xae, + 0x5d, 0xf3, 0xd1, 0x0f, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x30, 0x90, 0x3d, 0xc6, 0xbe, 0x02, + 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go b/polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go new file mode 100644 index 0000000000..def177112d --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/eventhandler.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/eventhandler.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type EventHandler_Action_Type int32 + +const ( + EventHandler_Action_START_WORKFLOW EventHandler_Action_Type = 0 + EventHandler_Action_COMPLETE_TASK EventHandler_Action_Type = 1 + EventHandler_Action_FAIL_TASK EventHandler_Action_Type = 2 +) + +var EventHandler_Action_Type_name = map[int32]string{ + 0: "START_WORKFLOW", + 1: "COMPLETE_TASK", + 2: "FAIL_TASK", +} +var EventHandler_Action_Type_value = map[string]int32{ + "START_WORKFLOW": 0, + "COMPLETE_TASK": 1, + "FAIL_TASK": 2, +} + +func (x EventHandler_Action_Type) String() string { + return proto.EnumName(EventHandler_Action_Type_name, int32(x)) +} +func (EventHandler_Action_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2, 0} +} + +type EventHandler struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Event string `protobuf:"bytes,2,opt,name=event,proto3" json:"event,omitempty"` + Condition string `protobuf:"bytes,3,opt,name=condition,proto3" json:"condition,omitempty"` + Actions []*EventHandler_Action `protobuf:"bytes,4,rep,name=actions,proto3" json:"actions,omitempty"` + Active bool `protobuf:"varint,5,opt,name=active,proto3" json:"active,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler) Reset() { *m = EventHandler{} } +func (m *EventHandler) String() string { return proto.CompactTextString(m) } +func (*EventHandler) ProtoMessage() {} +func (*EventHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0} +} +func (m *EventHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler.Unmarshal(m, b) +} +func (m *EventHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler.Marshal(b, m, deterministic) +} +func (dst *EventHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler.Merge(dst, src) +} +func (m *EventHandler) XXX_Size() int { + return xxx_messageInfo_EventHandler.Size(m) +} +func (m *EventHandler) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler proto.InternalMessageInfo + +func (m *EventHandler) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventHandler) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *EventHandler) GetCondition() string { + if m != nil { + return m.Condition + } + return "" +} + +func (m *EventHandler) GetActions() []*EventHandler_Action { + if m != nil { + return m.Actions + } + return nil +} + +func (m *EventHandler) GetActive() bool { + if m != nil { + return m.Active + } + return false +} + +type EventHandler_StartWorkflow struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + InputMessage *any.Any `protobuf:"bytes,5,opt,name=input_message,json=inputMessage,proto3" json:"input_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler_StartWorkflow) Reset() { *m = EventHandler_StartWorkflow{} } +func (m *EventHandler_StartWorkflow) String() string { return proto.CompactTextString(m) } +func (*EventHandler_StartWorkflow) ProtoMessage() {} +func (*EventHandler_StartWorkflow) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 0} +} +func (m *EventHandler_StartWorkflow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler_StartWorkflow.Unmarshal(m, b) +} +func (m *EventHandler_StartWorkflow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler_StartWorkflow.Marshal(b, m, deterministic) +} +func (dst *EventHandler_StartWorkflow) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler_StartWorkflow.Merge(dst, src) +} +func (m *EventHandler_StartWorkflow) XXX_Size() int { + return xxx_messageInfo_EventHandler_StartWorkflow.Size(m) +} +func (m *EventHandler_StartWorkflow) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler_StartWorkflow.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler_StartWorkflow proto.InternalMessageInfo + +func (m *EventHandler_StartWorkflow) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventHandler_StartWorkflow) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *EventHandler_StartWorkflow) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *EventHandler_StartWorkflow) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *EventHandler_StartWorkflow) GetInputMessage() *any.Any { + if m != nil { + return m.InputMessage + } + return nil +} + +type EventHandler_TaskDetails struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName,proto3" json:"task_ref_name,omitempty"` + Output map[string]*_struct.Value `protobuf:"bytes,3,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + OutputMessage *any.Any `protobuf:"bytes,4,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler_TaskDetails) Reset() { *m = EventHandler_TaskDetails{} } +func (m *EventHandler_TaskDetails) String() string { return proto.CompactTextString(m) } +func (*EventHandler_TaskDetails) ProtoMessage() {} +func (*EventHandler_TaskDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 1} +} +func (m *EventHandler_TaskDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler_TaskDetails.Unmarshal(m, b) +} +func (m *EventHandler_TaskDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler_TaskDetails.Marshal(b, m, deterministic) +} +func (dst *EventHandler_TaskDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler_TaskDetails.Merge(dst, src) +} +func (m *EventHandler_TaskDetails) XXX_Size() int { + return xxx_messageInfo_EventHandler_TaskDetails.Size(m) +} +func (m *EventHandler_TaskDetails) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler_TaskDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler_TaskDetails proto.InternalMessageInfo + +func (m *EventHandler_TaskDetails) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *EventHandler_TaskDetails) GetTaskRefName() string { + if m != nil { + return m.TaskRefName + } + return "" +} + +func (m *EventHandler_TaskDetails) GetOutput() map[string]*_struct.Value { + if m != nil { + return m.Output + } + return nil +} + +func (m *EventHandler_TaskDetails) GetOutputMessage() *any.Any { + if m != nil { + return m.OutputMessage + } + return nil +} + +type EventHandler_Action struct { + Action EventHandler_Action_Type `protobuf:"varint,1,opt,name=action,proto3,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` + StartWorkflow *EventHandler_StartWorkflow `protobuf:"bytes,2,opt,name=start_workflow,json=startWorkflow,proto3" json:"start_workflow,omitempty"` + CompleteTask *EventHandler_TaskDetails `protobuf:"bytes,3,opt,name=complete_task,json=completeTask,proto3" json:"complete_task,omitempty"` + FailTask *EventHandler_TaskDetails `protobuf:"bytes,4,opt,name=fail_task,json=failTask,proto3" json:"fail_task,omitempty"` + ExpandInlineJson bool `protobuf:"varint,5,opt,name=expand_inline_json,json=expandInlineJson,proto3" json:"expand_inline_json,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler_Action) Reset() { *m = EventHandler_Action{} } +func (m *EventHandler_Action) String() string { return proto.CompactTextString(m) } +func (*EventHandler_Action) ProtoMessage() {} +func (*EventHandler_Action) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2} +} +func (m *EventHandler_Action) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler_Action.Unmarshal(m, b) +} +func (m *EventHandler_Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler_Action.Marshal(b, m, deterministic) +} +func (dst *EventHandler_Action) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler_Action.Merge(dst, src) +} +func (m *EventHandler_Action) XXX_Size() int { + return xxx_messageInfo_EventHandler_Action.Size(m) +} +func (m *EventHandler_Action) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler_Action.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler_Action proto.InternalMessageInfo + +func (m *EventHandler_Action) GetAction() EventHandler_Action_Type { + if m != nil { + return m.Action + } + return EventHandler_Action_START_WORKFLOW +} + +func (m *EventHandler_Action) GetStartWorkflow() *EventHandler_StartWorkflow { + if m != nil { + return m.StartWorkflow + } + return nil +} + +func (m *EventHandler_Action) GetCompleteTask() *EventHandler_TaskDetails { + if m != nil { + return m.CompleteTask + } + return nil +} + +func (m *EventHandler_Action) GetFailTask() *EventHandler_TaskDetails { + if m != nil { + return m.FailTask + } + return nil +} + +func (m *EventHandler_Action) GetExpandInlineJson() bool { + if m != nil { + return m.ExpandInlineJson + } + return false +} + +func init() { + proto.RegisterType((*EventHandler)(nil), "conductor.proto.EventHandler") + proto.RegisterType((*EventHandler_StartWorkflow)(nil), "conductor.proto.EventHandler.StartWorkflow") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventHandler.StartWorkflow.InputEntry") + proto.RegisterType((*EventHandler_TaskDetails)(nil), "conductor.proto.EventHandler.TaskDetails") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventHandler.TaskDetails.OutputEntry") + proto.RegisterType((*EventHandler_Action)(nil), "conductor.proto.EventHandler.Action") + proto.RegisterEnum("conductor.proto.EventHandler_Action_Type", EventHandler_Action_Type_name, EventHandler_Action_Type_value) +} + +func init() { + proto.RegisterFile("model/eventhandler.proto", fileDescriptor_eventhandler_d75293086a3c9db8) +} + +var fileDescriptor_eventhandler_d75293086a3c9db8 = []byte{ + // 665 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, + 0x18, 0x77, 0x7f, 0x61, 0x4f, 0xe9, 0x9c, 0x17, 0x42, 0xea, 0x24, 0x91, 0x10, 0x4d, 0x30, 0x92, + 0x36, 0x99, 0xd1, 0x28, 0x1a, 0x93, 0xa1, 0x23, 0x4e, 0x06, 0xc3, 0x63, 0x91, 0xc4, 0x37, 0xcd, + 0xad, 0xbd, 0x8d, 0xba, 0xee, 0x6e, 0x69, 0xaf, 0x83, 0x7d, 0x1e, 0x3f, 0x81, 0x9f, 0xc0, 0xf7, + 0x7e, 0x2a, 0x73, 0x77, 0x2d, 0x14, 0x34, 0x28, 0x89, 0xef, 0x9e, 0xbf, 0xbf, 0xe7, 0x77, 0xbf, + 0xe7, 0x69, 0xc1, 0x9a, 0x72, 0x9f, 0x86, 0x0e, 0x9d, 0x53, 0x26, 0x4e, 0x09, 0xf3, 0x43, 0x1a, + 0xd9, 0xb3, 0x88, 0x0b, 0x8e, 0xee, 0x7a, 0x9c, 0xf9, 0x89, 0x27, 0x78, 0x1a, 0x68, 0xae, 0x8f, + 0x39, 0x1f, 0x87, 0xd4, 0x51, 0xde, 0x30, 0x19, 0x39, 0xb1, 0x88, 0x12, 0x4f, 0xa4, 0xd9, 0xfb, + 0xd7, 0xb3, 0x84, 0x2d, 0x74, 0x6a, 0xf3, 0x67, 0x0d, 0x56, 0x3a, 0x72, 0xc0, 0x07, 0x3d, 0x00, + 0x21, 0x28, 0x33, 0x32, 0xa5, 0x56, 0x61, 0xa3, 0xb0, 0x55, 0xc3, 0xca, 0x46, 0xab, 0x50, 0x51, + 0x24, 0xac, 0xa2, 0x0a, 0x6a, 0x07, 0xad, 0x43, 0x4d, 0xd2, 0x08, 0x44, 0xc0, 0x99, 0x55, 0x52, + 0x99, 0xcb, 0x00, 0x7a, 0x0b, 0x4b, 0xc4, 0x93, 0x56, 0x6c, 0x95, 0x37, 0x4a, 0x5b, 0x46, 0xeb, + 0x91, 0x7d, 0x8d, 0xb4, 0x9d, 0x9f, 0x6b, 0xb7, 0x55, 0x31, 0xce, 0x9a, 0xd0, 0x1a, 0x54, 0xa5, + 0x39, 0xa7, 0x56, 0x65, 0xa3, 0xb0, 0xb5, 0x8c, 0x53, 0xaf, 0xf9, 0xa3, 0x08, 0xe6, 0xb1, 0x20, + 0x91, 0x38, 0xe1, 0xd1, 0x64, 0x14, 0xf2, 0xb3, 0x3f, 0x32, 0xb6, 0x60, 0x69, 0x4e, 0xa3, 0x58, + 0x32, 0x93, 0x9c, 0x2b, 0x38, 0x73, 0xd1, 0x63, 0xa8, 0x7b, 0x3c, 0x8a, 0x68, 0x48, 0xe4, 0x1c, + 0x37, 0xf0, 0x53, 0xea, 0x66, 0x2e, 0xda, 0xf5, 0x51, 0x0f, 0x2a, 0x01, 0x9b, 0x25, 0x22, 0x25, + 0xff, 0xe2, 0x66, 0xf2, 0x57, 0x08, 0xd9, 0x5d, 0xd9, 0xd8, 0x61, 0x22, 0x5a, 0x60, 0x0d, 0x82, + 0x5e, 0x81, 0xa9, 0x0c, 0x77, 0x4a, 0xe3, 0x98, 0x8c, 0xf5, 0x9b, 0x8c, 0xd6, 0xaa, 0xad, 0x17, + 0x63, 0x67, 0x8b, 0xb1, 0xdb, 0x6c, 0x81, 0x57, 0x54, 0xe9, 0x81, 0xae, 0x6c, 0x1e, 0x01, 0x5c, + 0xe2, 0xa1, 0x06, 0x94, 0x26, 0x74, 0x91, 0x3e, 0x55, 0x9a, 0x68, 0x1b, 0x2a, 0x73, 0x12, 0x26, + 0x54, 0xbd, 0xd3, 0x68, 0xad, 0xfd, 0x06, 0xf9, 0x59, 0x66, 0xb1, 0x2e, 0xda, 0x29, 0xbe, 0x2c, + 0x34, 0xbf, 0x17, 0xc1, 0x18, 0x90, 0x78, 0xf2, 0x9e, 0x0a, 0x12, 0x84, 0x31, 0x7a, 0x08, 0xc6, + 0x59, 0x4a, 0x5d, 0xca, 0xa1, 0xb1, 0x21, 0x0b, 0x75, 0x7d, 0xb4, 0x09, 0xa6, 0x20, 0xf1, 0xc4, + 0x8d, 0xe8, 0xc8, 0x55, 0x4a, 0xeb, 0x33, 0x30, 0x64, 0x10, 0xd3, 0xd1, 0xa1, 0x14, 0xfc, 0x00, + 0xaa, 0x3c, 0x11, 0x52, 0xb0, 0x92, 0x12, 0xec, 0xf9, 0xcd, 0x82, 0xe5, 0xe6, 0xdb, 0x7d, 0xd5, + 0xa7, 0xf5, 0x4a, 0x41, 0xd0, 0x6b, 0xa8, 0x6b, 0xeb, 0x42, 0xb1, 0xf2, 0x0d, 0x8a, 0x99, 0xba, + 0x36, 0x93, 0xec, 0x13, 0x18, 0x39, 0xcc, 0xff, 0xa2, 0xd9, 0xb7, 0x12, 0x54, 0xf5, 0x85, 0xa2, + 0xb6, 0x3e, 0x4c, 0xce, 0x14, 0x62, 0xbd, 0xf5, 0xe4, 0x5f, 0xee, 0xda, 0x1e, 0x2c, 0x66, 0x14, + 0xa7, 0x8d, 0x08, 0x43, 0x3d, 0x96, 0x17, 0xe3, 0x66, 0x22, 0xa7, 0x44, 0x9e, 0xde, 0xe2, 0xca, + 0xb0, 0x19, 0x5f, 0xf9, 0x0a, 0x0e, 0xc1, 0xf4, 0xf8, 0x74, 0x16, 0x52, 0x41, 0x5d, 0xb9, 0x18, + 0x75, 0xd6, 0xc6, 0xdf, 0xd8, 0xe5, 0xf6, 0x80, 0x57, 0xb2, 0x7e, 0x19, 0x44, 0x7b, 0x50, 0x1b, + 0x91, 0x20, 0xd4, 0x58, 0xe5, 0xdb, 0x62, 0x2d, 0xcb, 0x5e, 0x85, 0xb3, 0x0d, 0x88, 0x9e, 0xcf, + 0x08, 0xf3, 0xdd, 0x80, 0x85, 0x01, 0xa3, 0xee, 0xd7, 0x98, 0xb3, 0xf4, 0x9b, 0x6e, 0xe8, 0x4c, + 0x57, 0x25, 0x3e, 0xc6, 0x9c, 0x6d, 0xbe, 0x81, 0xb2, 0x54, 0x0a, 0x21, 0xa8, 0x1f, 0x0f, 0xda, + 0x78, 0xe0, 0x9e, 0xf4, 0xf1, 0xfe, 0x5e, 0xaf, 0x7f, 0xd2, 0xb8, 0x83, 0xee, 0x81, 0xf9, 0xae, + 0x7f, 0x70, 0xd4, 0xeb, 0x0c, 0x3a, 0xee, 0xa0, 0x7d, 0xbc, 0xdf, 0x28, 0x20, 0x13, 0x6a, 0x7b, + 0xed, 0x6e, 0x4f, 0xbb, 0xc5, 0xdd, 0x00, 0x1e, 0x78, 0x7c, 0x6a, 0x33, 0x2a, 0x46, 0x61, 0x70, + 0x7e, 0x9d, 0xed, 0x6e, 0x3d, 0x4f, 0xf7, 0x68, 0xf8, 0x65, 0x67, 0x1c, 0x88, 0xd3, 0x64, 0x68, + 0x7b, 0x7c, 0xea, 0xa4, 0x3d, 0xce, 0x45, 0x8f, 0xe3, 0x85, 0x01, 0x65, 0xc2, 0x19, 0xf3, 0x71, + 0x34, 0xf3, 0x72, 0x71, 0xf5, 0x53, 0x1e, 0x56, 0x15, 0xe4, 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x15, 0xb8, 0xa4, 0xd6, 0xa4, 0x05, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/polldata.pb.go b/polyglot-clients/gogrpc/conductor/model/polldata.pb.go new file mode 100644 index 0000000000..b2ba7ff6ff --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/polldata.pb.go @@ -0,0 +1,106 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/polldata.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type PollData struct { + QueueName string `protobuf:"bytes,1,opt,name=queue_name,json=queueName,proto3" json:"queue_name,omitempty"` + Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` + WorkerId string `protobuf:"bytes,3,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + LastPollTime int64 `protobuf:"varint,4,opt,name=last_poll_time,json=lastPollTime,proto3" json:"last_poll_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollData) Reset() { *m = PollData{} } +func (m *PollData) String() string { return proto.CompactTextString(m) } +func (*PollData) ProtoMessage() {} +func (*PollData) Descriptor() ([]byte, []int) { + return fileDescriptor_polldata_17cab9e308fb8d52, []int{0} +} +func (m *PollData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollData.Unmarshal(m, b) +} +func (m *PollData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollData.Marshal(b, m, deterministic) +} +func (dst *PollData) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollData.Merge(dst, src) +} +func (m *PollData) XXX_Size() int { + return xxx_messageInfo_PollData.Size(m) +} +func (m *PollData) XXX_DiscardUnknown() { + xxx_messageInfo_PollData.DiscardUnknown(m) +} + +var xxx_messageInfo_PollData proto.InternalMessageInfo + +func (m *PollData) GetQueueName() string { + if m != nil { + return m.QueueName + } + return "" +} + +func (m *PollData) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *PollData) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *PollData) GetLastPollTime() int64 { + if m != nil { + return m.LastPollTime + } + return 0 +} + +func init() { + proto.RegisterType((*PollData)(nil), "conductor.proto.PollData") +} + +func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_17cab9e308fb8d52) } + +var fileDescriptor_polldata_17cab9e308fb8d52 = []byte{ + // 229 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x90, 0xc1, 0x4a, 0x03, 0x31, + 0x10, 0x86, 0x59, 0x2b, 0xa5, 0x3b, 0x88, 0x42, 0x10, 0x59, 0x28, 0x42, 0x11, 0x0f, 0x3d, 0x6d, + 0x0e, 0xde, 0x3c, 0x16, 0x2f, 0x5e, 0xa4, 0x14, 0x4f, 0x5e, 0x96, 0x6c, 0x32, 0xae, 0xc1, 0x4c, + 0x66, 0x8d, 0x13, 0xf4, 0x01, 0x7c, 0x70, 0xd9, 0xb4, 0x8a, 0x78, 0x9c, 0xef, 0xff, 0x99, 0xf9, + 0x18, 0x38, 0x27, 0x76, 0x18, 0xf4, 0xc8, 0x21, 0x38, 0x23, 0xa6, 0x1d, 0x13, 0x0b, 0xab, 0x33, + 0xcb, 0xd1, 0x65, 0x2b, 0x9c, 0xf6, 0xe0, 0xea, 0xab, 0x82, 0xc5, 0x96, 0x43, 0xb8, 0x33, 0x62, + 0xd4, 0x25, 0xc0, 0x5b, 0xc6, 0x8c, 0x5d, 0x34, 0x84, 0x4d, 0xb5, 0xaa, 0xd6, 0xf5, 0xae, 0x2e, + 0xe4, 0xc1, 0x10, 0xaa, 0x0b, 0x98, 0x3b, 0x26, 0xe3, 0x63, 0x73, 0x54, 0xa2, 0xc3, 0xa4, 0x96, + 0x50, 0x7f, 0x70, 0x7a, 0xc5, 0xd4, 0x79, 0xd7, 0xcc, 0x4a, 0xb4, 0xd8, 0x83, 0x7b, 0xa7, 0xae, + 0xe1, 0x34, 0x98, 0x77, 0xe9, 0x26, 0x91, 0x4e, 0x3c, 0x61, 0x73, 0xbc, 0xaa, 0xd6, 0xb3, 0xdd, + 0xc9, 0x44, 0xa7, 0xcb, 0x8f, 0x9e, 0x70, 0x83, 0xb0, 0xb4, 0x4c, 0x6d, 0x44, 0x79, 0x0e, 0xfe, + 0xb3, 0xfd, 0x67, 0xb9, 0x81, 0x1f, 0xc5, 0x6d, 0xff, 0x74, 0x3b, 0x78, 0x79, 0xc9, 0x7d, 0x6b, + 0x99, 0xf4, 0xa1, 0xaf, 0x7f, 0xfb, 0xda, 0x06, 0x8f, 0x51, 0xf4, 0xc0, 0x43, 0x1a, 0xed, 0x1f, + 0x5e, 0xbe, 0xd1, 0xcf, 0xcb, 0xba, 0x9b, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x37, 0x71, + 0xb0, 0x1d, 0x01, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go b/polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go new file mode 100644 index 0000000000..5268688d53 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/rerunworkflowrequest.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/rerunworkflowrequest.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type RerunWorkflowRequest struct { + ReRunFromWorkflowId string `protobuf:"bytes,1,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId,proto3" json:"re_run_from_workflow_id,omitempty"` + WorkflowInput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=workflow_input,json=workflowInput,proto3" json:"workflow_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ReRunFromTaskId string `protobuf:"bytes,3,opt,name=re_run_from_task_id,json=reRunFromTaskId,proto3" json:"re_run_from_task_id,omitempty"` + TaskInput map[string]*_struct.Value `protobuf:"bytes,4,rep,name=task_input,json=taskInput,proto3" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CorrelationId string `protobuf:"bytes,5,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RerunWorkflowRequest) Reset() { *m = RerunWorkflowRequest{} } +func (m *RerunWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RerunWorkflowRequest) ProtoMessage() {} +func (*RerunWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8, []int{0} +} +func (m *RerunWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RerunWorkflowRequest.Unmarshal(m, b) +} +func (m *RerunWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RerunWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RerunWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RerunWorkflowRequest.Merge(dst, src) +} +func (m *RerunWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RerunWorkflowRequest.Size(m) +} +func (m *RerunWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RerunWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RerunWorkflowRequest proto.InternalMessageInfo + +func (m *RerunWorkflowRequest) GetReRunFromWorkflowId() string { + if m != nil { + return m.ReRunFromWorkflowId + } + return "" +} + +func (m *RerunWorkflowRequest) GetWorkflowInput() map[string]*_struct.Value { + if m != nil { + return m.WorkflowInput + } + return nil +} + +func (m *RerunWorkflowRequest) GetReRunFromTaskId() string { + if m != nil { + return m.ReRunFromTaskId + } + return "" +} + +func (m *RerunWorkflowRequest) GetTaskInput() map[string]*_struct.Value { + if m != nil { + return m.TaskInput + } + return nil +} + +func (m *RerunWorkflowRequest) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func init() { + proto.RegisterType((*RerunWorkflowRequest)(nil), "conductor.proto.RerunWorkflowRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.RerunWorkflowRequest.TaskInputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.RerunWorkflowRequest.WorkflowInputEntry") +} + +func init() { + proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8) +} + +var fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8 = []byte{ + // 369 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0x4f, 0xef, 0xd2, 0x30, + 0x18, 0xc7, 0x33, 0xf8, 0x61, 0x42, 0x09, 0x60, 0x8a, 0x41, 0x82, 0x1e, 0x16, 0x13, 0x13, 0x0e, + 0xa4, 0x4b, 0x90, 0x03, 0xe1, 0x48, 0xa2, 0x09, 0x37, 0x33, 0x89, 0x1a, 0x2f, 0xcb, 0xd6, 0x75, + 0x73, 0xd9, 0xd6, 0x07, 0x9e, 0xb5, 0x22, 0xaf, 0xc0, 0xb7, 0x6d, 0xd6, 0x6d, 0x30, 0x91, 0x83, + 0x07, 0x6f, 0xeb, 0xb3, 0xef, 0x9f, 0xcf, 0x9e, 0x8e, 0xd8, 0x39, 0x84, 0x22, 0x73, 0x50, 0xa0, + 0x96, 0x67, 0xc0, 0x34, 0xca, 0xe0, 0x8c, 0xe2, 0xa4, 0x45, 0xa1, 0xd8, 0x11, 0x41, 0x01, 0x1d, + 0x73, 0x90, 0xa1, 0xe6, 0x0a, 0xb0, 0x1a, 0xcc, 0x5f, 0xc7, 0x00, 0x71, 0x26, 0x1c, 0x73, 0x0a, + 0x74, 0xe4, 0x14, 0x0a, 0x35, 0xaf, 0xe5, 0x6f, 0x7e, 0x3d, 0x91, 0x17, 0x6e, 0x99, 0xf6, 0xa5, + 0x4e, 0x73, 0xab, 0x34, 0xba, 0x26, 0x2f, 0x51, 0x78, 0xa8, 0xa5, 0x17, 0x21, 0xe4, 0x5e, 0x53, + 0xe6, 0x25, 0xe1, 0xcc, 0xb2, 0xad, 0x45, 0xdf, 0x9d, 0xa0, 0x70, 0xb5, 0xfc, 0x80, 0x90, 0x37, + 0xd6, 0x7d, 0x48, 0x3d, 0x32, 0xba, 0x29, 0xe5, 0x51, 0xab, 0x59, 0xc7, 0xee, 0x2e, 0x06, 0xab, + 0x0d, 0xbb, 0xc3, 0x62, 0x8f, 0x4a, 0xd9, 0x35, 0xa9, 0xb4, 0xbe, 0x97, 0x0a, 0x2f, 0xee, 0xf0, + 0xdc, 0x9e, 0xd1, 0x25, 0x99, 0xb4, 0xb1, 0x94, 0x5f, 0xa4, 0x25, 0x52, 0xd7, 0x20, 0x8d, 0xaf, + 0x48, 0x07, 0xbf, 0x48, 0xf7, 0x21, 0xfd, 0x44, 0x48, 0xa5, 0x30, 0x28, 0x4f, 0x06, 0x65, 0xfd, + 0x6f, 0x28, 0x26, 0xe1, 0x86, 0xd1, 0x57, 0xcd, 0x99, 0xbe, 0x25, 0x23, 0x0e, 0x88, 0x22, 0xf3, + 0x55, 0x02, 0xb2, 0x6c, 0xef, 0x99, 0xf6, 0x61, 0x6b, 0xba, 0x0f, 0xe7, 0x5f, 0x09, 0xfd, 0xfb, + 0x73, 0xe8, 0x73, 0xd2, 0x4d, 0xc5, 0xa5, 0x5e, 0x61, 0xf9, 0x48, 0x97, 0xa4, 0xf7, 0xc3, 0xcf, + 0xb4, 0x98, 0x75, 0x6c, 0x6b, 0x31, 0x58, 0x4d, 0x59, 0x75, 0x5f, 0xac, 0xb9, 0x2f, 0xf6, 0xb9, + 0x7c, 0xeb, 0x56, 0xa2, 0x6d, 0x67, 0x63, 0xcd, 0x0f, 0x64, 0xf4, 0x27, 0xdd, 0xff, 0x48, 0xdd, + 0x9d, 0xc8, 0x2b, 0x0e, 0x39, 0x93, 0x42, 0x45, 0x59, 0xf2, 0xf3, 0x7e, 0x49, 0xbb, 0xe9, 0xa3, + 0x2d, 0x7d, 0x0c, 0xbe, 0x6d, 0xe3, 0x44, 0x7d, 0xd7, 0x01, 0xe3, 0x90, 0x3b, 0xb5, 0xd7, 0xb9, + 0x7a, 0x1d, 0x9e, 0x25, 0x42, 0x2a, 0x27, 0x86, 0x18, 0x8f, 0xbc, 0x35, 0x37, 0xbf, 0x71, 0xf0, + 0xcc, 0x44, 0xbf, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xce, 0x3a, 0x9b, 0x51, 0xd6, 0x02, 0x00, + 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go b/polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go new file mode 100644 index 0000000000..9d1e094d8e --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/skiptaskrequest.pb.go @@ -0,0 +1,119 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/skiptaskrequest.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SkipTaskRequest struct { + TaskInput map[string]*_struct.Value `protobuf:"bytes,1,rep,name=task_input,json=taskInput,proto3" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TaskOutput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=task_output,json=taskOutput,proto3" json:"task_output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TaskInputMessage *any.Any `protobuf:"bytes,3,opt,name=task_input_message,json=taskInputMessage,proto3" json:"task_input_message,omitempty"` + TaskOutputMessage *any.Any `protobuf:"bytes,4,opt,name=task_output_message,json=taskOutputMessage,proto3" json:"task_output_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } +func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } +func (*SkipTaskRequest) ProtoMessage() {} +func (*SkipTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_skiptaskrequest_fb745ec89a45d156, []int{0} +} +func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) +} +func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) +} +func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskRequest.Merge(dst, src) +} +func (m *SkipTaskRequest) XXX_Size() int { + return xxx_messageInfo_SkipTaskRequest.Size(m) +} +func (m *SkipTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo + +func (m *SkipTaskRequest) GetTaskInput() map[string]*_struct.Value { + if m != nil { + return m.TaskInput + } + return nil +} + +func (m *SkipTaskRequest) GetTaskOutput() map[string]*_struct.Value { + if m != nil { + return m.TaskOutput + } + return nil +} + +func (m *SkipTaskRequest) GetTaskInputMessage() *any.Any { + if m != nil { + return m.TaskInputMessage + } + return nil +} + +func (m *SkipTaskRequest) GetTaskOutputMessage() *any.Any { + if m != nil { + return m.TaskOutputMessage + } + return nil +} + +func init() { + proto.RegisterType((*SkipTaskRequest)(nil), "conductor.proto.SkipTaskRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.SkipTaskRequest.TaskInputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.SkipTaskRequest.TaskOutputEntry") +} + +func init() { + proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_fb745ec89a45d156) +} + +var fileDescriptor_skiptaskrequest_fb745ec89a45d156 = []byte{ + // 348 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xbd, 0x4e, 0xc3, 0x30, + 0x14, 0x85, 0x95, 0x06, 0x90, 0xea, 0x4a, 0xb4, 0x35, 0x08, 0x85, 0x94, 0xa1, 0x62, 0xea, 0x80, + 0x6c, 0x54, 0x16, 0xd4, 0x8d, 0x0a, 0x06, 0x06, 0xfe, 0x42, 0x61, 0x60, 0xa9, 0x12, 0xd7, 0x0d, + 0x51, 0x12, 0x3b, 0xc4, 0x36, 0x22, 0xcf, 0xcc, 0x4b, 0xa0, 0xd8, 0xfd, 0xc3, 0x48, 0x88, 0x81, + 0xcd, 0xbe, 0xf7, 0x9e, 0xef, 0x9e, 0x23, 0x1b, 0xf4, 0x72, 0x3e, 0xa3, 0x19, 0x16, 0x69, 0x52, + 0xc8, 0x50, 0xa4, 0x25, 0x7d, 0x53, 0x54, 0x48, 0x54, 0x94, 0x5c, 0x72, 0xd8, 0x26, 0x9c, 0xcd, + 0x14, 0x91, 0xbc, 0x34, 0x05, 0xff, 0x28, 0xe6, 0x3c, 0xce, 0x28, 0xd6, 0xb7, 0x48, 0xcd, 0xb1, + 0x90, 0xa5, 0x22, 0x8b, 0x71, 0xff, 0xd0, 0xee, 0x86, 0xac, 0x32, 0xad, 0xe3, 0x4f, 0x17, 0xb4, + 0x1f, 0xd3, 0xa4, 0x98, 0x84, 0x22, 0x0d, 0xcc, 0x0e, 0x78, 0x0b, 0x40, 0xbd, 0x72, 0x9a, 0xb0, + 0x42, 0x49, 0xcf, 0xe9, 0xbb, 0x83, 0xd6, 0x10, 0x23, 0x6b, 0x25, 0xb2, 0x54, 0xa8, 0x3e, 0x5f, + 0xd7, 0x8a, 0x2b, 0x26, 0xcb, 0x2a, 0x68, 0xca, 0xe5, 0x1d, 0x3e, 0x80, 0x96, 0xe6, 0x71, 0x25, + 0x6b, 0x60, 0x43, 0x03, 0x4f, 0xff, 0x04, 0xbc, 0xd3, 0x12, 0x43, 0xd4, 0xa6, 0x4c, 0x01, 0x8e, + 0x01, 0x5c, 0x5b, 0x9c, 0xe6, 0x54, 0x88, 0x30, 0xa6, 0x9e, 0xdb, 0x77, 0x06, 0xad, 0xe1, 0x3e, + 0x32, 0x71, 0xd1, 0x32, 0x2e, 0xba, 0x60, 0x55, 0xd0, 0x59, 0xf9, 0xb9, 0x31, 0xd3, 0xf0, 0x12, + 0xec, 0x6d, 0xd8, 0x5a, 0x41, 0xb6, 0x7e, 0x81, 0x74, 0xd7, 0x16, 0x16, 0x14, 0x7f, 0x02, 0x76, + 0xbf, 0x27, 0x87, 0x1d, 0xe0, 0xa6, 0xb4, 0xf2, 0x9c, 0xbe, 0x33, 0x68, 0x06, 0xf5, 0x11, 0x9e, + 0x80, 0xed, 0xf7, 0x30, 0x53, 0xd4, 0x6b, 0x68, 0xf6, 0xc1, 0x0f, 0xf6, 0x73, 0xdd, 0x0d, 0xcc, + 0xd0, 0xa8, 0x71, 0xee, 0xf8, 0x4f, 0xa0, 0x6d, 0xc5, 0xff, 0x0f, 0xec, 0x38, 0x03, 0x3d, 0xc2, + 0x73, 0xc4, 0xa8, 0x9c, 0x67, 0xc9, 0x87, 0xfd, 0x02, 0xe3, 0xae, 0xf5, 0x04, 0xf7, 0xd1, 0xcb, + 0x28, 0x4e, 0xe4, 0xab, 0x8a, 0x10, 0xe1, 0x39, 0x5e, 0xc8, 0xf0, 0x4a, 0x86, 0x49, 0x96, 0x50, + 0x26, 0x71, 0xcc, 0xe3, 0xb2, 0x20, 0x1b, 0x75, 0xfd, 0x77, 0xa3, 0x1d, 0x4d, 0x3d, 0xfb, 0x0a, + 0x00, 0x00, 0xff, 0xff, 0xcb, 0xb6, 0xee, 0xfd, 0xcb, 0x02, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go b/polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go new file mode 100644 index 0000000000..76ef278095 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/startworkflowrequest.pb.go @@ -0,0 +1,137 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/startworkflowrequest.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StartWorkflowRequest struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TaskToDomain map[string]string `protobuf:"bytes,5,rep,name=task_to_domain,json=taskToDomain,proto3" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + WorkflowDef *WorkflowDef `protobuf:"bytes,6,opt,name=workflow_def,json=workflowDef,proto3" json:"workflow_def,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartWorkflowRequest) Reset() { *m = StartWorkflowRequest{} } +func (m *StartWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*StartWorkflowRequest) ProtoMessage() {} +func (*StartWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_startworkflowrequest_3ab5c2434a152277, []int{0} +} +func (m *StartWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartWorkflowRequest.Unmarshal(m, b) +} +func (m *StartWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *StartWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartWorkflowRequest.Merge(dst, src) +} +func (m *StartWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_StartWorkflowRequest.Size(m) +} +func (m *StartWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartWorkflowRequest proto.InternalMessageInfo + +func (m *StartWorkflowRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StartWorkflowRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *StartWorkflowRequest) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *StartWorkflowRequest) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *StartWorkflowRequest) GetTaskToDomain() map[string]string { + if m != nil { + return m.TaskToDomain + } + return nil +} + +func (m *StartWorkflowRequest) GetWorkflowDef() *WorkflowDef { + if m != nil { + return m.WorkflowDef + } + return nil +} + +func init() { + proto.RegisterType((*StartWorkflowRequest)(nil), "conductor.proto.StartWorkflowRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.StartWorkflowRequest.InputEntry") + proto.RegisterMapType((map[string]string)(nil), "conductor.proto.StartWorkflowRequest.TaskToDomainEntry") +} + +func init() { + proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_3ab5c2434a152277) +} + +var fileDescriptor_startworkflowrequest_3ab5c2434a152277 = []byte{ + // 396 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x51, 0xab, 0xd3, 0x30, + 0x14, 0x80, 0xe9, 0xed, 0xed, 0x95, 0x9b, 0x5e, 0xaf, 0x1a, 0x2e, 0xd7, 0x32, 0xf7, 0x50, 0x04, + 0xa1, 0x0f, 0x92, 0xca, 0x7c, 0x50, 0xf6, 0x32, 0x18, 0x53, 0xd8, 0xdb, 0xa8, 0x43, 0x41, 0x90, + 0xd2, 0xa6, 0x69, 0x0d, 0x6d, 0x73, 0xb6, 0x34, 0xdd, 0xdc, 0x1f, 0xf6, 0x77, 0x48, 0xd3, 0xd6, + 0x95, 0x6d, 0x0f, 0xf7, 0x2d, 0xe7, 0x24, 0xdf, 0x97, 0x93, 0x73, 0x82, 0xdc, 0x12, 0x12, 0x56, + 0xf8, 0x95, 0x8a, 0xa4, 0xda, 0x83, 0xcc, 0xd3, 0x02, 0xf6, 0x92, 0x6d, 0x6b, 0x56, 0x29, 0xb2, + 0x91, 0xa0, 0x00, 0xbf, 0xa0, 0x20, 0x92, 0x9a, 0x2a, 0x90, 0x6d, 0x62, 0xf4, 0xba, 0x45, 0xfa, + 0xd3, 0x09, 0x4b, 0xbb, 0x8d, 0x71, 0x06, 0x90, 0x15, 0xcc, 0xd7, 0x51, 0x5c, 0xa7, 0x7e, 0xa5, + 0x64, 0x4d, 0x3b, 0xcf, 0xdb, 0xbf, 0x26, 0x7a, 0xf8, 0xd6, 0x5c, 0xf3, 0xa3, 0x03, 0x83, 0xf6, + 0x1a, 0x8c, 0xd1, 0xb5, 0x88, 0x4a, 0xe6, 0x18, 0xae, 0xe1, 0xdd, 0x06, 0x7a, 0x8d, 0x1d, 0xf4, + 0x6c, 0xc7, 0x64, 0xc5, 0x41, 0x38, 0x57, 0xae, 0xe1, 0x59, 0x41, 0x1f, 0xe2, 0x77, 0xe8, 0x9e, + 0x82, 0x94, 0xac, 0x88, 0x14, 0x07, 0x11, 0xf2, 0xc4, 0x31, 0x35, 0xf7, 0x7c, 0x90, 0x5d, 0x26, + 0xf8, 0x2b, 0xb2, 0xb8, 0xd8, 0xd4, 0xca, 0xb9, 0x76, 0x4d, 0xcf, 0x9e, 0x7c, 0x20, 0x27, 0xaf, + 0x20, 0x97, 0x4a, 0x21, 0xcb, 0x06, 0xf9, 0x22, 0x94, 0x3c, 0x04, 0x2d, 0x8e, 0x7f, 0xa1, 0x7b, + 0x15, 0x55, 0x79, 0xa8, 0x20, 0x4c, 0xa0, 0x8c, 0xb8, 0x70, 0x2c, 0x2d, 0xfc, 0xf4, 0x34, 0xe1, + 0x3a, 0xaa, 0xf2, 0x35, 0x2c, 0x34, 0xd9, 0x7a, 0xef, 0xd4, 0x20, 0x85, 0x67, 0xe8, 0xae, 0xef, + 0x63, 0x98, 0xb0, 0xd4, 0xb9, 0x71, 0x0d, 0xcf, 0x9e, 0x8c, 0xcf, 0xe4, 0xbd, 0x77, 0xc1, 0xd2, + 0xc0, 0xde, 0x1f, 0x83, 0xd1, 0x0a, 0xa1, 0x63, 0xd1, 0xf8, 0x25, 0x32, 0x73, 0x76, 0xe8, 0x3a, + 0xd9, 0x2c, 0xf1, 0x7b, 0x64, 0xed, 0xa2, 0xa2, 0x66, 0xba, 0x8d, 0xf6, 0xe4, 0x91, 0xb4, 0x33, + 0x22, 0xfd, 0x8c, 0xc8, 0xf7, 0x66, 0x37, 0x68, 0x0f, 0x4d, 0xaf, 0x3e, 0x1b, 0xa3, 0x19, 0x7a, + 0x75, 0x56, 0xf5, 0x05, 0xf1, 0xc3, 0x50, 0x7c, 0x3b, 0x10, 0xcc, 0xb7, 0xe8, 0x0d, 0x85, 0x92, + 0x08, 0xa6, 0xd2, 0x82, 0xff, 0x39, 0x7d, 0xca, 0xfc, 0xf1, 0x52, 0xa3, 0x56, 0xf1, 0xcf, 0x69, + 0xc6, 0xd5, 0xef, 0x3a, 0x26, 0x14, 0x4a, 0xbf, 0x63, 0xfd, 0xff, 0xac, 0x4f, 0x0b, 0xce, 0x84, + 0xf2, 0x33, 0xc8, 0xe4, 0x86, 0x0e, 0xf2, 0xfa, 0x2f, 0xc6, 0x37, 0x5a, 0xfd, 0xf1, 0x5f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x26, 0x3f, 0xa7, 0x2d, 0xce, 0x02, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go b/polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go new file mode 100644 index 0000000000..3a331c4ff2 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/subworkflowparams.pb.go @@ -0,0 +1,89 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/subworkflowparams.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SubWorkflowParams struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubWorkflowParams) Reset() { *m = SubWorkflowParams{} } +func (m *SubWorkflowParams) String() string { return proto.CompactTextString(m) } +func (*SubWorkflowParams) ProtoMessage() {} +func (*SubWorkflowParams) Descriptor() ([]byte, []int) { + return fileDescriptor_subworkflowparams_247aeccdfb62062e, []int{0} +} +func (m *SubWorkflowParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubWorkflowParams.Unmarshal(m, b) +} +func (m *SubWorkflowParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubWorkflowParams.Marshal(b, m, deterministic) +} +func (dst *SubWorkflowParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubWorkflowParams.Merge(dst, src) +} +func (m *SubWorkflowParams) XXX_Size() int { + return xxx_messageInfo_SubWorkflowParams.Size(m) +} +func (m *SubWorkflowParams) XXX_DiscardUnknown() { + xxx_messageInfo_SubWorkflowParams.DiscardUnknown(m) +} + +var xxx_messageInfo_SubWorkflowParams proto.InternalMessageInfo + +func (m *SubWorkflowParams) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SubWorkflowParams) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func init() { + proto.RegisterType((*SubWorkflowParams)(nil), "conductor.proto.SubWorkflowParams") +} + +func init() { + proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_247aeccdfb62062e) +} + +var fileDescriptor_subworkflowparams_247aeccdfb62062e = []byte{ + // 183 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcd, 0xcd, 0x4f, 0x49, + 0xcd, 0xd1, 0x2f, 0x2e, 0x4d, 0x2a, 0xcf, 0x2f, 0xca, 0x4e, 0xcb, 0xc9, 0x2f, 0x2f, 0x48, 0x2c, + 0x4a, 0xcc, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x4f, 0xce, 0xcf, 0x4b, 0x29, + 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x08, 0x28, 0x39, 0x72, 0x09, 0x06, 0x97, 0x26, 0x85, 0x43, 0xd5, + 0x06, 0x80, 0xd5, 0x0a, 0x09, 0x71, 0xb1, 0xe4, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a, + 0x70, 0x06, 0x81, 0xd9, 0x42, 0x12, 0x5c, 0xec, 0x65, 0xa9, 0x45, 0xc5, 0x99, 0xf9, 0x79, 0x12, + 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x30, 0xae, 0x53, 0x1e, 0x97, 0x74, 0x72, 0x7e, 0xae, 0x5e, + 0x5e, 0x6a, 0x49, 0x5a, 0x4e, 0x66, 0x85, 0x1e, 0x9a, 0x0d, 0x4e, 0xc2, 0x18, 0xe6, 0x07, 0x24, + 0x45, 0x59, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x43, 0x35, 0xea, + 0xc3, 0x35, 0xea, 0x27, 0xe7, 0x64, 0xa6, 0xe6, 0x95, 0xe8, 0xa7, 0xe7, 0xa7, 0x17, 0x15, 0x24, + 0x23, 0x89, 0x83, 0xfd, 0x96, 0xc4, 0x06, 0x36, 0xd7, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x52, + 0x9c, 0xc5, 0x01, 0xeb, 0x00, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/task.pb.go b/polyglot-clients/gogrpc/conductor/model/task.pb.go new file mode 100644 index 0000000000..5dc07ae9dd --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/task.pb.go @@ -0,0 +1,422 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/task.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Task_Status int32 + +const ( + Task_IN_PROGRESS Task_Status = 0 + Task_CANCELED Task_Status = 1 + Task_FAILED Task_Status = 2 + Task_FAILED_WITH_TERMINAL_ERROR Task_Status = 3 + Task_COMPLETED Task_Status = 4 + Task_COMPLETED_WITH_ERRORS Task_Status = 5 + Task_SCHEDULED Task_Status = 6 + Task_TIMED_OUT Task_Status = 7 + Task_READY_FOR_RERUN Task_Status = 8 + Task_SKIPPED Task_Status = 9 +) + +var Task_Status_name = map[int32]string{ + 0: "IN_PROGRESS", + 1: "CANCELED", + 2: "FAILED", + 3: "FAILED_WITH_TERMINAL_ERROR", + 4: "COMPLETED", + 5: "COMPLETED_WITH_ERRORS", + 6: "SCHEDULED", + 7: "TIMED_OUT", + 8: "READY_FOR_RERUN", + 9: "SKIPPED", +} +var Task_Status_value = map[string]int32{ + "IN_PROGRESS": 0, + "CANCELED": 1, + "FAILED": 2, + "FAILED_WITH_TERMINAL_ERROR": 3, + "COMPLETED": 4, + "COMPLETED_WITH_ERRORS": 5, + "SCHEDULED": 6, + "TIMED_OUT": 7, + "READY_FOR_RERUN": 8, + "SKIPPED": 9, +} + +func (x Task_Status) String() string { + return proto.EnumName(Task_Status_name, int32(x)) +} +func (Task_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_task_0f54bf88f0e3aec0, []int{0, 0} +} + +type Task struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + Status Task_Status `protobuf:"varint,2,opt,name=status,proto3,enum=conductor.proto.Task_Status" json:"status,omitempty"` + InputData map[string]*_struct.Value `protobuf:"bytes,3,rep,name=input_data,json=inputData,proto3" json:"input_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ReferenceTaskName string `protobuf:"bytes,4,opt,name=reference_task_name,json=referenceTaskName,proto3" json:"reference_task_name,omitempty"` + RetryCount int32 `protobuf:"varint,5,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` + Seq int32 `protobuf:"varint,6,opt,name=seq,proto3" json:"seq,omitempty"` + CorrelationId string `protobuf:"bytes,7,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + PollCount int32 `protobuf:"varint,8,opt,name=poll_count,json=pollCount,proto3" json:"poll_count,omitempty"` + TaskDefName string `protobuf:"bytes,9,opt,name=task_def_name,json=taskDefName,proto3" json:"task_def_name,omitempty"` + ScheduledTime int64 `protobuf:"varint,10,opt,name=scheduled_time,json=scheduledTime,proto3" json:"scheduled_time,omitempty"` + StartTime int64 `protobuf:"varint,11,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,12,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + UpdateTime int64 `protobuf:"varint,13,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + StartDelayInSeconds int32 `protobuf:"varint,14,opt,name=start_delay_in_seconds,json=startDelayInSeconds,proto3" json:"start_delay_in_seconds,omitempty"` + RetriedTaskId string `protobuf:"bytes,15,opt,name=retried_task_id,json=retriedTaskId,proto3" json:"retried_task_id,omitempty"` + Retried bool `protobuf:"varint,16,opt,name=retried,proto3" json:"retried,omitempty"` + Executed bool `protobuf:"varint,17,opt,name=executed,proto3" json:"executed,omitempty"` + CallbackFromWorker bool `protobuf:"varint,18,opt,name=callback_from_worker,json=callbackFromWorker,proto3" json:"callback_from_worker,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,19,opt,name=response_timeout_seconds,json=responseTimeoutSeconds,proto3" json:"response_timeout_seconds,omitempty"` + WorkflowInstanceId string `protobuf:"bytes,20,opt,name=workflow_instance_id,json=workflowInstanceId,proto3" json:"workflow_instance_id,omitempty"` + WorkflowType string `protobuf:"bytes,21,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,23,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,24,opt,name=callback_after_seconds,json=callbackAfterSeconds,proto3" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,25,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,26,rep,name=output_data,json=outputData,proto3" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + WorkflowTask *WorkflowTask `protobuf:"bytes,27,opt,name=workflow_task,json=workflowTask,proto3" json:"workflow_task,omitempty"` + Domain string `protobuf:"bytes,28,opt,name=domain,proto3" json:"domain,omitempty"` + InputMessage *any.Any `protobuf:"bytes,29,opt,name=input_message,json=inputMessage,proto3" json:"input_message,omitempty"` + OutputMessage *any.Any `protobuf:"bytes,30,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` + RateLimitPerSecond int32 `protobuf:"varint,31,opt,name=rate_limit_per_second,json=rateLimitPerSecond,proto3" json:"rate_limit_per_second,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Task) Reset() { *m = Task{} } +func (m *Task) String() string { return proto.CompactTextString(m) } +func (*Task) ProtoMessage() {} +func (*Task) Descriptor() ([]byte, []int) { + return fileDescriptor_task_0f54bf88f0e3aec0, []int{0} +} +func (m *Task) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Task.Unmarshal(m, b) +} +func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Task.Marshal(b, m, deterministic) +} +func (dst *Task) XXX_Merge(src proto.Message) { + xxx_messageInfo_Task.Merge(dst, src) +} +func (m *Task) XXX_Size() int { + return xxx_messageInfo_Task.Size(m) +} +func (m *Task) XXX_DiscardUnknown() { + xxx_messageInfo_Task.DiscardUnknown(m) +} + +var xxx_messageInfo_Task proto.InternalMessageInfo + +func (m *Task) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *Task) GetStatus() Task_Status { + if m != nil { + return m.Status + } + return Task_IN_PROGRESS +} + +func (m *Task) GetInputData() map[string]*_struct.Value { + if m != nil { + return m.InputData + } + return nil +} + +func (m *Task) GetReferenceTaskName() string { + if m != nil { + return m.ReferenceTaskName + } + return "" +} + +func (m *Task) GetRetryCount() int32 { + if m != nil { + return m.RetryCount + } + return 0 +} + +func (m *Task) GetSeq() int32 { + if m != nil { + return m.Seq + } + return 0 +} + +func (m *Task) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *Task) GetPollCount() int32 { + if m != nil { + return m.PollCount + } + return 0 +} + +func (m *Task) GetTaskDefName() string { + if m != nil { + return m.TaskDefName + } + return "" +} + +func (m *Task) GetScheduledTime() int64 { + if m != nil { + return m.ScheduledTime + } + return 0 +} + +func (m *Task) GetStartTime() int64 { + if m != nil { + return m.StartTime + } + return 0 +} + +func (m *Task) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +func (m *Task) GetUpdateTime() int64 { + if m != nil { + return m.UpdateTime + } + return 0 +} + +func (m *Task) GetStartDelayInSeconds() int32 { + if m != nil { + return m.StartDelayInSeconds + } + return 0 +} + +func (m *Task) GetRetriedTaskId() string { + if m != nil { + return m.RetriedTaskId + } + return "" +} + +func (m *Task) GetRetried() bool { + if m != nil { + return m.Retried + } + return false +} + +func (m *Task) GetExecuted() bool { + if m != nil { + return m.Executed + } + return false +} + +func (m *Task) GetCallbackFromWorker() bool { + if m != nil { + return m.CallbackFromWorker + } + return false +} + +func (m *Task) GetResponseTimeoutSeconds() int32 { + if m != nil { + return m.ResponseTimeoutSeconds + } + return 0 +} + +func (m *Task) GetWorkflowInstanceId() string { + if m != nil { + return m.WorkflowInstanceId + } + return "" +} + +func (m *Task) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *Task) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *Task) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *Task) GetCallbackAfterSeconds() int64 { + if m != nil { + return m.CallbackAfterSeconds + } + return 0 +} + +func (m *Task) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *Task) GetOutputData() map[string]*_struct.Value { + if m != nil { + return m.OutputData + } + return nil +} + +func (m *Task) GetWorkflowTask() *WorkflowTask { + if m != nil { + return m.WorkflowTask + } + return nil +} + +func (m *Task) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *Task) GetInputMessage() *any.Any { + if m != nil { + return m.InputMessage + } + return nil +} + +func (m *Task) GetOutputMessage() *any.Any { + if m != nil { + return m.OutputMessage + } + return nil +} + +func (m *Task) GetRateLimitPerSecond() int32 { + if m != nil { + return m.RateLimitPerSecond + } + return 0 +} + +func init() { + proto.RegisterType((*Task)(nil), "conductor.proto.Task") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.InputDataEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.OutputDataEntry") + proto.RegisterEnum("conductor.proto.Task_Status", Task_Status_name, Task_Status_value) +} + +func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_0f54bf88f0e3aec0) } + +var fileDescriptor_task_0f54bf88f0e3aec0 = []byte{ + // 1004 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x73, 0xda, 0x46, + 0x14, 0x2d, 0xb1, 0xcd, 0xc7, 0xc5, 0x80, 0xb2, 0xb6, 0xf1, 0x1a, 0xdb, 0x31, 0xe3, 0xd6, 0x1d, + 0x1e, 0x3a, 0x90, 0x3a, 0x99, 0x4e, 0x9a, 0x3e, 0x61, 0x90, 0x1b, 0x4d, 0x6d, 0xc3, 0x08, 0x5c, + 0x4f, 0xfb, 0xb2, 0xb3, 0x96, 0x16, 0xa2, 0x41, 0xd2, 0xd2, 0xd5, 0xaa, 0x09, 0xbf, 0xa7, 0xbf, + 0xa3, 0xff, 0xad, 0xb3, 0xbb, 0x48, 0xa1, 0x4e, 0xa6, 0x4f, 0x7d, 0xdb, 0x3d, 0xe7, 0xdc, 0xcb, + 0xbd, 0x47, 0x7b, 0x2f, 0x60, 0x45, 0xdc, 0x67, 0x61, 0x4f, 0xd2, 0x64, 0xd1, 0x5d, 0x0a, 0x2e, + 0x39, 0x6a, 0x78, 0x3c, 0xf6, 0x53, 0x4f, 0x72, 0x61, 0x80, 0x16, 0x36, 0x92, 0x0f, 0x5c, 0x2c, + 0x66, 0x21, 0xff, 0xf0, 0x49, 0xda, 0x3a, 0x99, 0x73, 0x3e, 0x0f, 0x59, 0x4f, 0xdf, 0x1e, 0xd3, + 0x59, 0x2f, 0x91, 0x22, 0xf5, 0xe4, 0x9a, 0x3d, 0x7a, 0xca, 0xd2, 0x78, 0x65, 0xa8, 0xf3, 0xbf, + 0x6a, 0xb0, 0x3d, 0xa5, 0xc9, 0x02, 0x1d, 0x43, 0x45, 0xe5, 0x23, 0x72, 0xb5, 0x64, 0xb8, 0xd0, + 0x2e, 0x74, 0x2a, 0x6e, 0x59, 0x01, 0xd3, 0xd5, 0x92, 0xa1, 0xd7, 0x50, 0x4c, 0x24, 0x95, 0x69, + 0x82, 0x9f, 0xb5, 0x0b, 0x9d, 0xfa, 0xe5, 0x49, 0xf7, 0x49, 0x69, 0x5d, 0x95, 0xa3, 0x3b, 0xd1, + 0x1a, 0x77, 0xad, 0x45, 0x03, 0x80, 0x20, 0x5e, 0xa6, 0x92, 0xf8, 0x54, 0x52, 0xbc, 0xd5, 0xde, + 0xea, 0x54, 0x2f, 0xbf, 0xf9, 0x72, 0xa4, 0xa3, 0x74, 0x43, 0x2a, 0xa9, 0x1d, 0x4b, 0xb1, 0x72, + 0x2b, 0x41, 0x76, 0x47, 0x5d, 0xd8, 0x13, 0x6c, 0xc6, 0x04, 0x8b, 0x3d, 0x46, 0x74, 0x85, 0x31, + 0x8d, 0x18, 0xde, 0xd6, 0x15, 0x3e, 0xcf, 0x29, 0x95, 0xe5, 0x8e, 0x46, 0x0c, 0x9d, 0x41, 0x55, + 0x30, 0x29, 0x56, 0xc4, 0xe3, 0x69, 0x2c, 0xf1, 0x4e, 0xbb, 0xd0, 0xd9, 0x71, 0x41, 0x43, 0x03, + 0x85, 0x20, 0x0b, 0xb6, 0x12, 0xf6, 0x07, 0x2e, 0x6a, 0x42, 0x1d, 0xd1, 0x05, 0xd4, 0x3d, 0x2e, + 0x04, 0x0b, 0xa9, 0x0c, 0x78, 0x4c, 0x02, 0x1f, 0x97, 0x74, 0xf6, 0xda, 0x06, 0xea, 0xf8, 0xe8, + 0x14, 0x60, 0xc9, 0xc3, 0x70, 0x9d, 0xb8, 0xac, 0xe3, 0x2b, 0x0a, 0x31, 0x79, 0xcf, 0xa1, 0xa6, + 0xcb, 0xf3, 0xd9, 0xcc, 0x94, 0x58, 0xd1, 0x49, 0xaa, 0x0a, 0x1c, 0xb2, 0x99, 0x2e, 0xee, 0x02, + 0xea, 0x89, 0xf7, 0x9e, 0xf9, 0x69, 0xc8, 0x7c, 0x22, 0x83, 0x88, 0x61, 0x68, 0x17, 0x3a, 0x5b, + 0x6e, 0x2d, 0x47, 0xa7, 0x41, 0xc4, 0xd4, 0x2f, 0x25, 0x92, 0x0a, 0x69, 0x24, 0x55, 0x2d, 0xa9, + 0x68, 0x44, 0xd3, 0x47, 0x50, 0x66, 0xf1, 0x3a, 0x7e, 0x57, 0x93, 0x25, 0x16, 0x9b, 0xc8, 0x33, + 0xa8, 0xa6, 0x4b, 0x9f, 0x4a, 0x66, 0xd8, 0x9a, 0x66, 0xc1, 0x40, 0x5a, 0xf0, 0x0a, 0x9a, 0x26, + 0xb5, 0xcf, 0x42, 0xba, 0x22, 0x41, 0x4c, 0x12, 0xa6, 0xbe, 0x48, 0x82, 0xeb, 0xba, 0xa1, 0x3d, + 0xcd, 0x0e, 0x15, 0xe9, 0xc4, 0x13, 0x43, 0xa1, 0x6f, 0xa1, 0xa1, 0x0c, 0x0c, 0x54, 0xd1, 0xaa, + 0xc5, 0xc0, 0xc7, 0x0d, 0xe3, 0xd0, 0x1a, 0x56, 0xee, 0x3b, 0x3e, 0xc2, 0x50, 0x5a, 0x03, 0xd8, + 0x6a, 0x17, 0x3a, 0x65, 0x37, 0xbb, 0xa2, 0x16, 0x94, 0xd9, 0x47, 0xe6, 0xa5, 0x92, 0xf9, 0xf8, + 0xb9, 0xa6, 0xf2, 0x3b, 0x7a, 0x09, 0xfb, 0x1e, 0x0d, 0xc3, 0x47, 0xea, 0x2d, 0xc8, 0x4c, 0xf0, + 0x88, 0xa8, 0xf7, 0xcd, 0x04, 0x46, 0x5a, 0x87, 0x32, 0xee, 0x5a, 0xf0, 0xe8, 0x41, 0x33, 0xe8, + 0x0d, 0x60, 0xc1, 0x92, 0x25, 0x8f, 0x13, 0xd3, 0x27, 0x4f, 0x65, 0xde, 0xc6, 0x9e, 0x6e, 0xa3, + 0x99, 0xf1, 0x53, 0x43, 0x67, 0x9d, 0xbc, 0x84, 0xfd, 0x6c, 0x7a, 0x48, 0x10, 0x27, 0x92, 0xaa, + 0x57, 0x15, 0xf8, 0x78, 0x5f, 0xb7, 0x83, 0x32, 0xce, 0x59, 0x53, 0x8e, 0x8f, 0xbe, 0x86, 0x5a, + 0x1e, 0xa1, 0x67, 0xe3, 0x40, 0x4b, 0x77, 0x33, 0x50, 0xcf, 0xc7, 0x21, 0x94, 0x32, 0x63, 0x9a, + 0x9a, 0x2e, 0x4a, 0xe3, 0xc8, 0x0f, 0x70, 0x28, 0x18, 0x4d, 0x78, 0x4c, 0x66, 0x5c, 0x90, 0x20, + 0xf6, 0x78, 0xb4, 0x0c, 0x99, 0x7a, 0x50, 0xf8, 0x50, 0x0b, 0x0f, 0x0c, 0x7d, 0xcd, 0x85, 0xb3, + 0x41, 0xa2, 0xd7, 0xd0, 0xcc, 0x3d, 0xa1, 0x33, 0xc9, 0x44, 0xde, 0x1f, 0xd6, 0x9f, 0x34, 0x77, + 0xac, 0xaf, 0xc8, 0xac, 0xbb, 0x63, 0xa8, 0x18, 0xef, 0x54, 0x21, 0x47, 0x66, 0x86, 0x0d, 0xe0, + 0xf8, 0xe8, 0x1a, 0xaa, 0x3c, 0x95, 0xf9, 0x38, 0xb6, 0xf4, 0x38, 0x5e, 0x7c, 0x79, 0x1c, 0x47, + 0x5a, 0xf8, 0x69, 0x1e, 0x81, 0xe7, 0x00, 0xba, 0xda, 0x34, 0x84, 0x26, 0x0b, 0x7c, 0xdc, 0x2e, + 0x74, 0xaa, 0x97, 0xa7, 0x9f, 0x65, 0x7a, 0xc8, 0x1c, 0xa2, 0xc9, 0x62, 0xc3, 0x2f, 0xb5, 0x6c, + 0x9a, 0x50, 0xf4, 0x79, 0x44, 0x83, 0x18, 0x9f, 0x18, 0xbb, 0xcc, 0x0d, 0xfd, 0x08, 0x35, 0xb3, + 0x31, 0x22, 0x96, 0x24, 0x74, 0xce, 0xf0, 0xa9, 0xce, 0xbd, 0xdf, 0x35, 0x0b, 0xac, 0x9b, 0x2d, + 0xb0, 0x6e, 0x3f, 0x5e, 0xb9, 0xbb, 0x5a, 0x7a, 0x6b, 0x94, 0xe8, 0x27, 0xa8, 0xaf, 0xdb, 0xcb, + 0x62, 0x5f, 0xfc, 0x47, 0x6c, 0xcd, 0x68, 0xb3, 0xe0, 0xef, 0xe1, 0x40, 0xa8, 0xa1, 0x09, 0x83, + 0x28, 0x90, 0x64, 0x99, 0xdb, 0x8d, 0xcf, 0xf4, 0x6b, 0x42, 0x8a, 0xbc, 0x51, 0xdc, 0x38, 0x33, + 0xbb, 0x35, 0x85, 0xfa, 0xbf, 0x97, 0x96, 0x5a, 0x2c, 0x0b, 0xb6, 0x5a, 0xef, 0x4e, 0x75, 0x44, + 0xdf, 0xc1, 0xce, 0x9f, 0x34, 0x4c, 0x99, 0xde, 0x9a, 0xd5, 0xcb, 0xe6, 0x67, 0xa5, 0xfc, 0xaa, + 0x58, 0xd7, 0x88, 0xde, 0x3e, 0x7b, 0x53, 0x68, 0xdd, 0x43, 0xe3, 0x89, 0xf7, 0xff, 0x47, 0xda, + 0xf3, 0xbf, 0x0b, 0x50, 0x34, 0xcb, 0x19, 0x35, 0xa0, 0xea, 0xdc, 0x91, 0xb1, 0x3b, 0xfa, 0xd9, + 0xb5, 0x27, 0x13, 0xeb, 0x2b, 0xb4, 0x0b, 0xe5, 0x41, 0xff, 0x6e, 0x60, 0xdf, 0xd8, 0x43, 0xab, + 0x80, 0x00, 0x8a, 0xd7, 0x7d, 0x47, 0x9d, 0x9f, 0xa1, 0x17, 0xd0, 0x32, 0x67, 0xf2, 0xe0, 0x4c, + 0xdf, 0x91, 0xa9, 0xed, 0xde, 0x3a, 0x77, 0xfd, 0x1b, 0x62, 0xbb, 0xee, 0xc8, 0xb5, 0xb6, 0x50, + 0x0d, 0x2a, 0x83, 0xd1, 0xed, 0xf8, 0xc6, 0x9e, 0xda, 0x43, 0x6b, 0x1b, 0x1d, 0xc1, 0x41, 0x7e, + 0x35, 0x11, 0x5a, 0x38, 0xb1, 0x76, 0x94, 0x72, 0x32, 0x78, 0x67, 0x0f, 0xef, 0x55, 0xe2, 0xa2, + 0xba, 0x4e, 0x9d, 0x5b, 0x7b, 0x48, 0x46, 0xf7, 0x53, 0xab, 0x84, 0xf6, 0xa0, 0xe1, 0xda, 0xfd, + 0xe1, 0x6f, 0xe4, 0x7a, 0xe4, 0x12, 0xd7, 0x76, 0xef, 0xef, 0xac, 0x32, 0xaa, 0x42, 0x69, 0xf2, + 0x8b, 0x33, 0x1e, 0xdb, 0x43, 0xab, 0x72, 0x45, 0xe1, 0xd8, 0xe3, 0x51, 0x37, 0x66, 0x72, 0x16, + 0x06, 0x1f, 0x9f, 0xbe, 0xb4, 0xab, 0xa2, 0x7a, 0x54, 0xe3, 0xc7, 0xdf, 0xdf, 0xce, 0x03, 0xf9, + 0x3e, 0x7d, 0xec, 0x7a, 0x3c, 0xea, 0xad, 0xb5, 0xbd, 0x5c, 0xdb, 0xf3, 0xc2, 0x80, 0xc5, 0xb2, + 0x37, 0xe7, 0x73, 0xb1, 0xf4, 0x36, 0x70, 0xfd, 0xa7, 0xfa, 0x58, 0xd4, 0xa9, 0x5e, 0xfd, 0x13, + 0x00, 0x00, 0xff, 0xff, 0xe9, 0xb6, 0x2c, 0x87, 0x87, 0x07, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/taskdef.pb.go b/polyglot-clients/gogrpc/conductor/model/taskdef.pb.go new file mode 100644 index 0000000000..a51239e06e --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/taskdef.pb.go @@ -0,0 +1,254 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/taskdef.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskDef_RetryLogic int32 + +const ( + TaskDef_FIXED TaskDef_RetryLogic = 0 + TaskDef_EXPONENTIAL_BACKOFF TaskDef_RetryLogic = 1 +) + +var TaskDef_RetryLogic_name = map[int32]string{ + 0: "FIXED", + 1: "EXPONENTIAL_BACKOFF", +} +var TaskDef_RetryLogic_value = map[string]int32{ + "FIXED": 0, + "EXPONENTIAL_BACKOFF": 1, +} + +func (x TaskDef_RetryLogic) String() string { + return proto.EnumName(TaskDef_RetryLogic_name, int32(x)) +} +func (TaskDef_RetryLogic) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_taskdef_dbc1866a3715b3e8, []int{0, 0} +} + +type TaskDef_TimeoutPolicy int32 + +const ( + TaskDef_RETRY TaskDef_TimeoutPolicy = 0 + TaskDef_TIME_OUT_WF TaskDef_TimeoutPolicy = 1 + TaskDef_ALERT_ONLY TaskDef_TimeoutPolicy = 2 +) + +var TaskDef_TimeoutPolicy_name = map[int32]string{ + 0: "RETRY", + 1: "TIME_OUT_WF", + 2: "ALERT_ONLY", +} +var TaskDef_TimeoutPolicy_value = map[string]int32{ + "RETRY": 0, + "TIME_OUT_WF": 1, + "ALERT_ONLY": 2, +} + +func (x TaskDef_TimeoutPolicy) String() string { + return proto.EnumName(TaskDef_TimeoutPolicy_name, int32(x)) +} +func (TaskDef_TimeoutPolicy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_taskdef_dbc1866a3715b3e8, []int{0, 1} +} + +type TaskDef struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + RetryCount int32 `protobuf:"varint,3,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` + TimeoutSeconds int64 `protobuf:"varint,4,opt,name=timeout_seconds,json=timeoutSeconds,proto3" json:"timeout_seconds,omitempty"` + InputKeys []string `protobuf:"bytes,5,rep,name=input_keys,json=inputKeys,proto3" json:"input_keys,omitempty"` + OutputKeys []string `protobuf:"bytes,6,rep,name=output_keys,json=outputKeys,proto3" json:"output_keys,omitempty"` + TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,proto3,enum=conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` + RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,proto3,enum=conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` + RetryDelaySeconds int32 `protobuf:"varint,9,opt,name=retry_delay_seconds,json=retryDelaySeconds,proto3" json:"retry_delay_seconds,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,10,opt,name=response_timeout_seconds,json=responseTimeoutSeconds,proto3" json:"response_timeout_seconds,omitempty"` + ConcurrentExecLimit int32 `protobuf:"varint,11,opt,name=concurrent_exec_limit,json=concurrentExecLimit,proto3" json:"concurrent_exec_limit,omitempty"` + InputTemplate map[string]*_struct.Value `protobuf:"bytes,12,rep,name=input_template,json=inputTemplate,proto3" json:"input_template,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + RateLimitPerSecond int32 `protobuf:"varint,13,opt,name=rate_limit_per_second,json=rateLimitPerSecond,proto3" json:"rate_limit_per_second,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskDef) Reset() { *m = TaskDef{} } +func (m *TaskDef) String() string { return proto.CompactTextString(m) } +func (*TaskDef) ProtoMessage() {} +func (*TaskDef) Descriptor() ([]byte, []int) { + return fileDescriptor_taskdef_dbc1866a3715b3e8, []int{0} +} +func (m *TaskDef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskDef.Unmarshal(m, b) +} +func (m *TaskDef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskDef.Marshal(b, m, deterministic) +} +func (dst *TaskDef) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskDef.Merge(dst, src) +} +func (m *TaskDef) XXX_Size() int { + return xxx_messageInfo_TaskDef.Size(m) +} +func (m *TaskDef) XXX_DiscardUnknown() { + xxx_messageInfo_TaskDef.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskDef proto.InternalMessageInfo + +func (m *TaskDef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TaskDef) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *TaskDef) GetRetryCount() int32 { + if m != nil { + return m.RetryCount + } + return 0 +} + +func (m *TaskDef) GetTimeoutSeconds() int64 { + if m != nil { + return m.TimeoutSeconds + } + return 0 +} + +func (m *TaskDef) GetInputKeys() []string { + if m != nil { + return m.InputKeys + } + return nil +} + +func (m *TaskDef) GetOutputKeys() []string { + if m != nil { + return m.OutputKeys + } + return nil +} + +func (m *TaskDef) GetTimeoutPolicy() TaskDef_TimeoutPolicy { + if m != nil { + return m.TimeoutPolicy + } + return TaskDef_RETRY +} + +func (m *TaskDef) GetRetryLogic() TaskDef_RetryLogic { + if m != nil { + return m.RetryLogic + } + return TaskDef_FIXED +} + +func (m *TaskDef) GetRetryDelaySeconds() int32 { + if m != nil { + return m.RetryDelaySeconds + } + return 0 +} + +func (m *TaskDef) GetResponseTimeoutSeconds() int32 { + if m != nil { + return m.ResponseTimeoutSeconds + } + return 0 +} + +func (m *TaskDef) GetConcurrentExecLimit() int32 { + if m != nil { + return m.ConcurrentExecLimit + } + return 0 +} + +func (m *TaskDef) GetInputTemplate() map[string]*_struct.Value { + if m != nil { + return m.InputTemplate + } + return nil +} + +func (m *TaskDef) GetRateLimitPerSecond() int32 { + if m != nil { + return m.RateLimitPerSecond + } + return 0 +} + +func init() { + proto.RegisterType((*TaskDef)(nil), "conductor.proto.TaskDef") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskDef.InputTemplateEntry") + proto.RegisterEnum("conductor.proto.TaskDef_RetryLogic", TaskDef_RetryLogic_name, TaskDef_RetryLogic_value) + proto.RegisterEnum("conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) +} + +func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_dbc1866a3715b3e8) } + +var fileDescriptor_taskdef_dbc1866a3715b3e8 = []byte{ + // 593 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x53, 0x51, 0x6f, 0xd3, 0x4c, + 0x10, 0xac, 0x9b, 0xa6, 0xfd, 0xb2, 0xf9, 0x92, 0x86, 0x8b, 0x5a, 0xac, 0x02, 0xc2, 0x2a, 0x12, + 0x44, 0x02, 0xd9, 0x10, 0x5e, 0xaa, 0xf2, 0xd4, 0x36, 0x8e, 0x14, 0x35, 0x6d, 0x22, 0x63, 0xa0, + 0xe5, 0xc5, 0x72, 0x2e, 0x1b, 0x63, 0xc5, 0xf6, 0x59, 0xe7, 0x33, 0xaa, 0xff, 0x23, 0x3f, 0x0a, + 0xdd, 0xd9, 0x69, 0xd3, 0xa0, 0xbe, 0xdd, 0xcd, 0xcc, 0xcd, 0xee, 0x8e, 0xd7, 0xd0, 0x8d, 0xd9, + 0x1c, 0x23, 0x4b, 0xf8, 0xd9, 0x72, 0x8e, 0x0b, 0x33, 0xe5, 0x4c, 0x30, 0xb2, 0x4f, 0x59, 0x32, + 0xcf, 0xa9, 0x60, 0xbc, 0x04, 0x8e, 0x5e, 0x06, 0x8c, 0x05, 0x11, 0x5a, 0xea, 0x36, 0xcb, 0x17, + 0x56, 0x26, 0x78, 0x4e, 0x45, 0xc9, 0x1e, 0xff, 0xd9, 0x85, 0x3d, 0xd7, 0xcf, 0x96, 0x03, 0x5c, + 0x10, 0x02, 0x3b, 0x89, 0x1f, 0xa3, 0xae, 0x19, 0x5a, 0xaf, 0xe1, 0xa8, 0x33, 0x31, 0xa0, 0x39, + 0xc7, 0x8c, 0xf2, 0x30, 0x15, 0x21, 0x4b, 0xf4, 0x6d, 0x45, 0xad, 0x43, 0xe4, 0x35, 0x34, 0x39, + 0x0a, 0x5e, 0x78, 0x94, 0xe5, 0x89, 0xd0, 0x6b, 0x86, 0xd6, 0xab, 0x3b, 0xa0, 0xa0, 0x0b, 0x89, + 0x90, 0x77, 0xb0, 0x2f, 0xc2, 0x18, 0x59, 0x2e, 0xbc, 0x0c, 0x65, 0x77, 0x99, 0xbe, 0x63, 0x68, + 0xbd, 0x9a, 0xd3, 0xae, 0xe0, 0xaf, 0x25, 0x4a, 0x5e, 0x01, 0x84, 0x49, 0x9a, 0x0b, 0x6f, 0x89, + 0x45, 0xa6, 0xd7, 0x8d, 0x5a, 0xaf, 0xe1, 0x34, 0x14, 0x72, 0x89, 0x45, 0x26, 0x0b, 0xb1, 0x5c, + 0xdc, 0xf3, 0xbb, 0x8a, 0x87, 0x12, 0x52, 0x82, 0x2b, 0x58, 0x39, 0x7a, 0x29, 0x8b, 0x42, 0x5a, + 0xe8, 0x7b, 0x86, 0xd6, 0x6b, 0xf7, 0xdf, 0x9a, 0x1b, 0x99, 0x98, 0xd5, 0xc4, 0xa6, 0x5b, 0xca, + 0xa7, 0x4a, 0xed, 0xb4, 0xc4, 0xfa, 0x95, 0x0c, 0x56, 0x83, 0x45, 0x2c, 0x08, 0xa9, 0xfe, 0x9f, + 0xf2, 0x7a, 0xf3, 0xa4, 0x97, 0x23, 0xb5, 0x63, 0x29, 0xad, 0xa6, 0x57, 0x67, 0x62, 0x42, 0xb7, + 0x74, 0x99, 0x63, 0xe4, 0x17, 0xf7, 0x09, 0x34, 0x54, 0x4c, 0xcf, 0x14, 0x35, 0x90, 0xcc, 0x2a, + 0x84, 0x13, 0xd0, 0x39, 0x66, 0x29, 0x4b, 0x32, 0xf4, 0x36, 0x63, 0x03, 0xf5, 0xe8, 0x70, 0xc5, + 0xbb, 0x8f, 0xe3, 0xeb, 0xc3, 0x01, 0x65, 0x09, 0xcd, 0x39, 0xc7, 0x44, 0x78, 0x78, 0x87, 0xd4, + 0x8b, 0xc2, 0x38, 0x14, 0x7a, 0x53, 0x3d, 0xeb, 0x3e, 0x90, 0xf6, 0x1d, 0xd2, 0xb1, 0xa4, 0x88, + 0x03, 0xed, 0x32, 0x72, 0x81, 0x71, 0x1a, 0xf9, 0x02, 0xf5, 0xff, 0x8d, 0x5a, 0xaf, 0xd9, 0x7f, + 0xff, 0xe4, 0x98, 0x23, 0x29, 0x77, 0x2b, 0xb5, 0x9d, 0x08, 0x5e, 0x38, 0xad, 0x70, 0x1d, 0x23, + 0x9f, 0xe0, 0x80, 0xfb, 0x02, 0xcb, 0xe2, 0x5e, 0x8a, 0xbc, 0xea, 0x5f, 0x6f, 0xa9, 0x3e, 0x88, + 0x24, 0x55, 0xf5, 0x29, 0xf2, 0xb2, 0xf7, 0xa3, 0x1b, 0x20, 0xff, 0xfa, 0x92, 0x0e, 0xd4, 0x96, + 0x58, 0x54, 0xeb, 0x28, 0x8f, 0xe4, 0x03, 0xd4, 0x7f, 0xfb, 0x51, 0x8e, 0x6a, 0x0f, 0x9b, 0xfd, + 0x43, 0xb3, 0xdc, 0x6d, 0x73, 0xb5, 0xdb, 0xe6, 0x77, 0xc9, 0x3a, 0xa5, 0xe8, 0x74, 0xfb, 0x44, + 0x3b, 0xfe, 0x08, 0xf0, 0xf0, 0x61, 0x48, 0x03, 0xea, 0xc3, 0xd1, 0x8d, 0x3d, 0xe8, 0x6c, 0x91, + 0xe7, 0xd0, 0xb5, 0x6f, 0xa6, 0x93, 0x6b, 0xfb, 0xda, 0x1d, 0x9d, 0x8d, 0xbd, 0xf3, 0xb3, 0x8b, + 0xcb, 0xc9, 0x70, 0xd8, 0xd1, 0x8e, 0xbf, 0x40, 0xeb, 0xd1, 0x5a, 0xc8, 0x47, 0x8e, 0xed, 0x3a, + 0xb7, 0x9d, 0x2d, 0xb2, 0x0f, 0x4d, 0x77, 0x74, 0x65, 0x7b, 0x93, 0x6f, 0xae, 0xf7, 0x63, 0xd8, + 0xd1, 0x48, 0x1b, 0xe0, 0x6c, 0x6c, 0x3b, 0xae, 0x37, 0xb9, 0x1e, 0xdf, 0x76, 0xb6, 0xcf, 0xe7, + 0xf0, 0x82, 0xb2, 0xd8, 0x4c, 0x50, 0x2c, 0xa2, 0xf0, 0x6e, 0x33, 0xc4, 0xf3, 0x46, 0x95, 0xe2, + 0x74, 0xf6, 0xf3, 0x34, 0x08, 0xc5, 0xaf, 0x7c, 0x66, 0x52, 0x16, 0x5b, 0x95, 0xdc, 0xba, 0x97, + 0x5b, 0x34, 0x0a, 0x31, 0x11, 0x56, 0xc0, 0x02, 0x9e, 0xd2, 0x35, 0x5c, 0xfd, 0xf1, 0xb3, 0x5d, + 0xe5, 0xf6, 0xf9, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc2, 0xd8, 0x2b, 0x35, 0x01, 0x04, 0x00, + 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go b/polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go new file mode 100644 index 0000000000..f8c999074d --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/taskexeclog.pb.go @@ -0,0 +1,98 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/taskexeclog.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskExecLog struct { + Log string `protobuf:"bytes,1,opt,name=log,proto3" json:"log,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + CreatedTime int64 `protobuf:"varint,3,opt,name=created_time,json=createdTime,proto3" json:"created_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskExecLog) Reset() { *m = TaskExecLog{} } +func (m *TaskExecLog) String() string { return proto.CompactTextString(m) } +func (*TaskExecLog) ProtoMessage() {} +func (*TaskExecLog) Descriptor() ([]byte, []int) { + return fileDescriptor_taskexeclog_e9c8274b44d54689, []int{0} +} +func (m *TaskExecLog) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskExecLog.Unmarshal(m, b) +} +func (m *TaskExecLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskExecLog.Marshal(b, m, deterministic) +} +func (dst *TaskExecLog) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskExecLog.Merge(dst, src) +} +func (m *TaskExecLog) XXX_Size() int { + return xxx_messageInfo_TaskExecLog.Size(m) +} +func (m *TaskExecLog) XXX_DiscardUnknown() { + xxx_messageInfo_TaskExecLog.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskExecLog proto.InternalMessageInfo + +func (m *TaskExecLog) GetLog() string { + if m != nil { + return m.Log + } + return "" +} + +func (m *TaskExecLog) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *TaskExecLog) GetCreatedTime() int64 { + if m != nil { + return m.CreatedTime + } + return 0 +} + +func init() { + proto.RegisterType((*TaskExecLog)(nil), "conductor.proto.TaskExecLog") +} + +func init() { + proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_e9c8274b44d54689) +} + +var fileDescriptor_taskexeclog_e9c8274b44d54689 = []byte{ + // 205 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0x31, 0x4b, 0xc4, 0x40, + 0x10, 0x85, 0x89, 0x81, 0x13, 0xf7, 0x14, 0x65, 0x9b, 0x0b, 0xd8, 0x9c, 0x56, 0x57, 0xed, 0x16, + 0x76, 0x96, 0x07, 0x16, 0x82, 0x85, 0x84, 0x54, 0x5a, 0x84, 0x64, 0x76, 0xdc, 0x2c, 0xd9, 0xcd, + 0x84, 0xcd, 0x04, 0xf2, 0xf3, 0x25, 0x31, 0x48, 0xb8, 0x6e, 0xe6, 0x83, 0xf7, 0x3e, 0x9e, 0x38, + 0x04, 0x32, 0xe8, 0x35, 0x57, 0x43, 0x8b, 0x13, 0x82, 0x27, 0xab, 0xfa, 0x48, 0x4c, 0xf2, 0x1e, + 0xa8, 0x33, 0x23, 0x30, 0xc5, 0x3f, 0xf0, 0xfc, 0x2d, 0xf6, 0x45, 0x35, 0xb4, 0x6f, 0x13, 0xc2, + 0x07, 0x59, 0xf9, 0x20, 0x52, 0x4f, 0x36, 0x4b, 0x8e, 0xc9, 0xe9, 0x26, 0x9f, 0x4f, 0x79, 0x10, + 0xd7, 0x73, 0x4d, 0xe9, 0x4c, 0x76, 0xb5, 0xd0, 0xdd, 0xfc, 0xbe, 0x1b, 0xf9, 0x24, 0x6e, 0x21, + 0x62, 0xc5, 0x68, 0x4a, 0x76, 0x01, 0xb3, 0xf4, 0x98, 0x9c, 0xd2, 0x7c, 0xbf, 0xb2, 0xc2, 0x05, + 0x3c, 0x37, 0xe2, 0x11, 0x28, 0xa8, 0x0e, 0xf9, 0xc7, 0xbb, 0x49, 0x5d, 0xb8, 0xcf, 0x77, 0x1b, + 0xf3, 0x67, 0xfd, 0xf5, 0x6a, 0x1d, 0x37, 0x63, 0xad, 0x80, 0x82, 0x5e, 0x23, 0xfa, 0x3f, 0xa2, + 0xc1, 0x3b, 0xec, 0x58, 0x5b, 0xb2, 0xb1, 0x87, 0x0d, 0x5f, 0x96, 0xd6, 0xbb, 0xa5, 0xf1, 0xe5, + 0x37, 0x00, 0x00, 0xff, 0xff, 0x78, 0x61, 0x87, 0x8e, 0xf9, 0x00, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/taskresult.pb.go b/polyglot-clients/gogrpc/conductor/model/taskresult.pb.go new file mode 100644 index 0000000000..26eb4017f9 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/taskresult.pb.go @@ -0,0 +1,192 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/taskresult.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskResult_Status int32 + +const ( + TaskResult_IN_PROGRESS TaskResult_Status = 0 + TaskResult_FAILED TaskResult_Status = 1 + TaskResult_FAILED_WITH_TERMINAL_ERROR TaskResult_Status = 2 + TaskResult_COMPLETED TaskResult_Status = 3 + TaskResult_SCHEDULED TaskResult_Status = 4 +) + +var TaskResult_Status_name = map[int32]string{ + 0: "IN_PROGRESS", + 1: "FAILED", + 2: "FAILED_WITH_TERMINAL_ERROR", + 3: "COMPLETED", + 4: "SCHEDULED", +} +var TaskResult_Status_value = map[string]int32{ + "IN_PROGRESS": 0, + "FAILED": 1, + "FAILED_WITH_TERMINAL_ERROR": 2, + "COMPLETED": 3, + "SCHEDULED": 4, +} + +func (x TaskResult_Status) String() string { + return proto.EnumName(TaskResult_Status_name, int32(x)) +} +func (TaskResult_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0, 0} +} + +type TaskResult struct { + WorkflowInstanceId string `protobuf:"bytes,1,opt,name=workflow_instance_id,json=workflowInstanceId,proto3" json:"workflow_instance_id,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,3,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,4,opt,name=callback_after_seconds,json=callbackAfterSeconds,proto3" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,5,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + Status TaskResult_Status `protobuf:"varint,6,opt,name=status,proto3,enum=conductor.proto.TaskResult_Status" json:"status,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,7,rep,name=output_data,json=outputData,proto3" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + OutputMessage *any.Any `protobuf:"bytes,8,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskResult) Reset() { *m = TaskResult{} } +func (m *TaskResult) String() string { return proto.CompactTextString(m) } +func (*TaskResult) ProtoMessage() {} +func (*TaskResult) Descriptor() ([]byte, []int) { + return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0} +} +func (m *TaskResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskResult.Unmarshal(m, b) +} +func (m *TaskResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskResult.Marshal(b, m, deterministic) +} +func (dst *TaskResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskResult.Merge(dst, src) +} +func (m *TaskResult) XXX_Size() int { + return xxx_messageInfo_TaskResult.Size(m) +} +func (m *TaskResult) XXX_DiscardUnknown() { + xxx_messageInfo_TaskResult.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskResult proto.InternalMessageInfo + +func (m *TaskResult) GetWorkflowInstanceId() string { + if m != nil { + return m.WorkflowInstanceId + } + return "" +} + +func (m *TaskResult) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *TaskResult) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *TaskResult) GetCallbackAfterSeconds() int64 { + if m != nil { + return m.CallbackAfterSeconds + } + return 0 +} + +func (m *TaskResult) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *TaskResult) GetStatus() TaskResult_Status { + if m != nil { + return m.Status + } + return TaskResult_IN_PROGRESS +} + +func (m *TaskResult) GetOutputData() map[string]*_struct.Value { + if m != nil { + return m.OutputData + } + return nil +} + +func (m *TaskResult) GetOutputMessage() *any.Any { + if m != nil { + return m.OutputMessage + } + return nil +} + +func init() { + proto.RegisterType((*TaskResult)(nil), "conductor.proto.TaskResult") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskResult.OutputDataEntry") + proto.RegisterEnum("conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) +} + +func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_ccaec941f8ac2f31) } + +var fileDescriptor_taskresult_ccaec941f8ac2f31 = []byte{ + // 517 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xdf, 0x6e, 0xda, 0x30, + 0x14, 0xc6, 0x17, 0xa0, 0x69, 0x39, 0xac, 0x05, 0x59, 0x8c, 0x66, 0x74, 0x9a, 0x10, 0x57, 0x48, + 0x9b, 0x92, 0x89, 0x4d, 0xd3, 0xc4, 0xae, 0x68, 0x49, 0xd7, 0x48, 0x50, 0x90, 0xa1, 0x9b, 0xb4, + 0x9b, 0xc8, 0x38, 0x26, 0x8b, 0x08, 0x31, 0xb2, 0x9d, 0x75, 0x3c, 0xf0, 0xde, 0x63, 0x4a, 0x1c, + 0xda, 0x8a, 0x49, 0xbd, 0xf3, 0x39, 0xbf, 0xef, 0x7c, 0x3a, 0x7f, 0x64, 0x68, 0x6d, 0x78, 0xc0, + 0x62, 0x47, 0x11, 0xb9, 0x16, 0x4c, 0xa6, 0xb1, 0xb2, 0xb7, 0x82, 0x2b, 0x8e, 0xea, 0x94, 0x27, + 0x41, 0x4a, 0x15, 0x17, 0x3a, 0xd1, 0x7e, 0x13, 0x72, 0x1e, 0xc6, 0xcc, 0xc9, 0xa3, 0x65, 0xba, + 0x72, 0xa4, 0x12, 0x29, 0x2d, 0xe4, 0xed, 0xd7, 0x87, 0x94, 0x24, 0x3b, 0x8d, 0xba, 0x7f, 0x2b, + 0x00, 0x0b, 0x22, 0xd7, 0x38, 0xb7, 0x47, 0x1f, 0xa0, 0x79, 0xcf, 0xc5, 0x7a, 0x15, 0xf3, 0x7b, + 0x3f, 0x4a, 0xa4, 0x22, 0x09, 0x65, 0x7e, 0x14, 0x58, 0x46, 0xc7, 0xe8, 0x55, 0x31, 0xda, 0x33, + 0xaf, 0x40, 0x5e, 0x80, 0xce, 0xe1, 0x38, 0x6b, 0x2f, 0x13, 0x95, 0x72, 0x91, 0x99, 0x85, 0x5e, + 0x80, 0x3e, 0xc3, 0xb9, 0x60, 0x44, 0xf2, 0xc4, 0x5f, 0x71, 0xe1, 0x47, 0x09, 0xe5, 0x9b, 0x6d, + 0xcc, 0x54, 0xc4, 0x13, 0xab, 0x9c, 0x0b, 0x5f, 0x69, 0x7c, 0xcd, 0x85, 0xf7, 0x04, 0xa2, 0x4f, + 0xd0, 0xa2, 0x24, 0x8e, 0x97, 0x84, 0xae, 0x7d, 0xb2, 0x52, 0x4c, 0xf8, 0x92, 0x65, 0xe3, 0x4a, + 0xab, 0xd2, 0x31, 0x7a, 0x65, 0xdc, 0xdc, 0xd3, 0x61, 0x06, 0xe7, 0x9a, 0xa1, 0x0b, 0xa8, 0x66, + 0xcd, 0x31, 0x91, 0x35, 0x72, 0x94, 0xfb, 0x9f, 0xe8, 0x84, 0x17, 0xa0, 0x01, 0x98, 0x52, 0x11, + 0x95, 0x4a, 0xcb, 0xec, 0x18, 0xbd, 0xb3, 0x7e, 0xd7, 0x3e, 0xd8, 0x9f, 0xfd, 0xb8, 0x02, 0x7b, + 0x9e, 0x2b, 0x71, 0x51, 0x81, 0xc6, 0x50, 0xe3, 0xa9, 0xda, 0xa6, 0xca, 0x0f, 0x88, 0x22, 0xd6, + 0x71, 0xa7, 0xdc, 0xab, 0xf5, 0xdf, 0x3d, 0x67, 0x30, 0xcd, 0xe5, 0x23, 0xa2, 0x88, 0x9b, 0x28, + 0xb1, 0xc3, 0xc0, 0x1f, 0x12, 0xe8, 0x2b, 0x9c, 0x15, 0x6e, 0x1b, 0x26, 0x25, 0x09, 0x99, 0x75, + 0xd2, 0x31, 0x7a, 0xb5, 0x7e, 0xd3, 0xd6, 0x27, 0xb2, 0xf7, 0x27, 0xb2, 0x87, 0xc9, 0x0e, 0x9f, + 0x6a, 0xed, 0x44, 0x4b, 0xdb, 0x77, 0x50, 0x3f, 0xf0, 0x46, 0x0d, 0x28, 0xaf, 0xd9, 0xae, 0x38, + 0x4f, 0xf6, 0x44, 0xef, 0xe1, 0xe8, 0x37, 0x89, 0x53, 0x96, 0x5f, 0xa3, 0xd6, 0x6f, 0xfd, 0x67, + 0xfc, 0x3d, 0xa3, 0x58, 0x8b, 0x06, 0xa5, 0x2f, 0x46, 0x97, 0x82, 0xa9, 0x67, 0x46, 0x75, 0xa8, + 0x79, 0xb7, 0xfe, 0x0c, 0x4f, 0xbf, 0x61, 0x77, 0x3e, 0x6f, 0xbc, 0x40, 0x00, 0xe6, 0xf5, 0xd0, + 0x1b, 0xbb, 0xa3, 0x86, 0x81, 0xde, 0x42, 0x5b, 0xbf, 0xfd, 0x1f, 0xde, 0xe2, 0xc6, 0x5f, 0xb8, + 0x78, 0xe2, 0xdd, 0x0e, 0xc7, 0xbe, 0x8b, 0xf1, 0x14, 0x37, 0x4a, 0xe8, 0x14, 0xaa, 0x57, 0xd3, + 0xc9, 0x6c, 0xec, 0x2e, 0xdc, 0x51, 0xa3, 0x9c, 0x85, 0xf3, 0xab, 0x1b, 0x77, 0x74, 0x97, 0x55, + 0x57, 0x2e, 0x43, 0xb8, 0xa0, 0x7c, 0x63, 0x27, 0x4c, 0xad, 0xe2, 0xe8, 0xcf, 0xe1, 0xfa, 0x2e, + 0x5f, 0x3e, 0xee, 0x6f, 0xb6, 0xfc, 0x39, 0x08, 0x23, 0xf5, 0x2b, 0x5d, 0xda, 0x94, 0x6f, 0x9c, + 0xa2, 0xc2, 0x79, 0xa8, 0x70, 0x68, 0x1c, 0xb1, 0x44, 0x39, 0x21, 0x0f, 0xc5, 0x96, 0x3e, 0xc9, + 0xe7, 0x7f, 0x65, 0x69, 0xe6, 0x86, 0x1f, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xff, 0xd5, 0x82, + 0xee, 0x3b, 0x03, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go b/polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go new file mode 100644 index 0000000000..0127dd8f70 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/tasksummary.pb.go @@ -0,0 +1,217 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/tasksummary.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskSummary struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + WorkflowType string `protobuf:"bytes,2,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + ScheduledTime string `protobuf:"bytes,4,opt,name=scheduled_time,json=scheduledTime,proto3" json:"scheduled_time,omitempty"` + StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + Status Task_Status `protobuf:"varint,8,opt,name=status,proto3,enum=conductor.proto.Task_Status" json:"status,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,9,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + ExecutionTime int64 `protobuf:"varint,10,opt,name=execution_time,json=executionTime,proto3" json:"execution_time,omitempty"` + QueueWaitTime int64 `protobuf:"varint,11,opt,name=queue_wait_time,json=queueWaitTime,proto3" json:"queue_wait_time,omitempty"` + TaskDefName string `protobuf:"bytes,12,opt,name=task_def_name,json=taskDefName,proto3" json:"task_def_name,omitempty"` + TaskType string `protobuf:"bytes,13,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + Input string `protobuf:"bytes,14,opt,name=input,proto3" json:"input,omitempty"` + Output string `protobuf:"bytes,15,opt,name=output,proto3" json:"output,omitempty"` + TaskId string `protobuf:"bytes,16,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskSummary) Reset() { *m = TaskSummary{} } +func (m *TaskSummary) String() string { return proto.CompactTextString(m) } +func (*TaskSummary) ProtoMessage() {} +func (*TaskSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_tasksummary_ab439d130c50da04, []int{0} +} +func (m *TaskSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskSummary.Unmarshal(m, b) +} +func (m *TaskSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskSummary.Marshal(b, m, deterministic) +} +func (dst *TaskSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskSummary.Merge(dst, src) +} +func (m *TaskSummary) XXX_Size() int { + return xxx_messageInfo_TaskSummary.Size(m) +} +func (m *TaskSummary) XXX_DiscardUnknown() { + xxx_messageInfo_TaskSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskSummary proto.InternalMessageInfo + +func (m *TaskSummary) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *TaskSummary) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *TaskSummary) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *TaskSummary) GetScheduledTime() string { + if m != nil { + return m.ScheduledTime + } + return "" +} + +func (m *TaskSummary) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *TaskSummary) GetUpdateTime() string { + if m != nil { + return m.UpdateTime + } + return "" +} + +func (m *TaskSummary) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +func (m *TaskSummary) GetStatus() Task_Status { + if m != nil { + return m.Status + } + return Task_IN_PROGRESS +} + +func (m *TaskSummary) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *TaskSummary) GetExecutionTime() int64 { + if m != nil { + return m.ExecutionTime + } + return 0 +} + +func (m *TaskSummary) GetQueueWaitTime() int64 { + if m != nil { + return m.QueueWaitTime + } + return 0 +} + +func (m *TaskSummary) GetTaskDefName() string { + if m != nil { + return m.TaskDefName + } + return "" +} + +func (m *TaskSummary) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *TaskSummary) GetInput() string { + if m != nil { + return m.Input + } + return "" +} + +func (m *TaskSummary) GetOutput() string { + if m != nil { + return m.Output + } + return "" +} + +func (m *TaskSummary) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func init() { + proto.RegisterType((*TaskSummary)(nil), "conductor.proto.TaskSummary") +} + +func init() { + proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ab439d130c50da04) +} + +var fileDescriptor_tasksummary_ab439d130c50da04 = []byte{ + // 446 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xcf, 0x8b, 0x13, 0x31, + 0x14, 0xc7, 0xa9, 0xbb, 0xdb, 0x1f, 0xaf, 0x3b, 0xed, 0x32, 0xa8, 0x1d, 0x5d, 0x65, 0xcb, 0x8a, + 0xd2, 0xd3, 0x14, 0x54, 0x3c, 0x78, 0x5c, 0x44, 0xe8, 0x45, 0xa4, 0x5b, 0x10, 0xbc, 0x0c, 0x69, + 0xf2, 0xa6, 0x0d, 0x9d, 0x24, 0x63, 0x26, 0xa1, 0xdb, 0x3f, 0xcf, 0xff, 0x4c, 0xf2, 0x32, 0x5b, + 0xcb, 0x1e, 0xf3, 0xf9, 0x7e, 0xf2, 0x92, 0xf7, 0x12, 0x98, 0x28, 0x23, 0xb0, 0x9a, 0x3b, 0xd6, + 0xec, 0x1a, 0xaf, 0x14, 0xb3, 0x87, 0xbc, 0xb6, 0xc6, 0x99, 0x74, 0xcc, 0x8d, 0x16, 0x9e, 0x3b, + 0x63, 0x23, 0x78, 0x7d, 0xf5, 0xdf, 0x8c, 0xe4, 0xf6, 0xef, 0x39, 0x0c, 0x57, 0xac, 0xd9, 0xdd, + 0xc7, 0x8d, 0xe9, 0x0d, 0x0c, 0xf7, 0xc6, 0xee, 0xca, 0xca, 0xec, 0x0b, 0x29, 0xb2, 0xce, 0xb4, + 0x33, 0x1b, 0x2c, 0xe1, 0x11, 0x2d, 0x44, 0xfa, 0x0e, 0x92, 0xa3, 0xe0, 0x0e, 0x35, 0x66, 0xcf, + 0x48, 0xb9, 0x7c, 0x84, 0xab, 0x43, 0x8d, 0xe9, 0x7b, 0x18, 0x71, 0x63, 0x2d, 0x56, 0xcc, 0x49, + 0xa3, 0x43, 0xa1, 0x33, 0xb2, 0x92, 0x13, 0xba, 0x10, 0x41, 0x6b, 0xf8, 0x16, 0x85, 0xaf, 0x50, + 0x14, 0x4e, 0x2a, 0xcc, 0xce, 0xa3, 0x76, 0xa4, 0x2b, 0xa9, 0x30, 0x7d, 0x0b, 0xd0, 0x38, 0x66, + 0x5d, 0x54, 0x2e, 0x48, 0x19, 0x10, 0xa1, 0xf8, 0x06, 0x86, 0xbe, 0x16, 0xcc, 0x61, 0xcc, 0xbb, + 0xf1, 0xca, 0x11, 0x91, 0xf0, 0x0a, 0xfa, 0xa8, 0xdb, 0x03, 0x7a, 0x94, 0xf6, 0x50, 0xc7, 0xd2, + 0x9f, 0xa1, 0xdb, 0x38, 0xe6, 0x7c, 0x93, 0xf5, 0xa7, 0x9d, 0xd9, 0xe8, 0xe3, 0x9b, 0xfc, 0xc9, + 0xc8, 0xf2, 0x30, 0x9c, 0xfc, 0x9e, 0x9c, 0x65, 0xeb, 0xa6, 0x5f, 0x60, 0x62, 0x91, 0x35, 0x46, + 0x17, 0xa5, 0xb1, 0x85, 0xd4, 0xdc, 0xa8, 0xba, 0xc2, 0xd0, 0x54, 0x36, 0xa0, 0xfa, 0x2f, 0x62, + 0xfc, 0xdd, 0xd8, 0xc5, 0x49, 0x18, 0xfa, 0xc5, 0x07, 0xe4, 0x9e, 0x86, 0x42, 0xd7, 0x81, 0x69, + 0x67, 0x76, 0xb6, 0x4c, 0x8e, 0x94, 0x2e, 0xf5, 0x01, 0xc6, 0x7f, 0x3c, 0x7a, 0x2c, 0xf6, 0x4c, + 0xb6, 0x4d, 0x0f, 0xa3, 0x47, 0xf8, 0x17, 0x93, 0xb1, 0xf1, 0x5b, 0x48, 0xc2, 0x4b, 0x16, 0x02, + 0xcb, 0x42, 0x33, 0x85, 0xd9, 0x25, 0x1d, 0x3e, 0x0c, 0xf0, 0x1b, 0x96, 0x3f, 0x98, 0xc2, 0xf4, + 0x1a, 0x06, 0xe4, 0xd0, 0x53, 0x25, 0x94, 0xf7, 0x03, 0xa0, 0x67, 0x7a, 0x0e, 0x17, 0x52, 0xd7, + 0xde, 0x65, 0x23, 0x0a, 0xe2, 0x22, 0x7d, 0x09, 0x5d, 0xe3, 0x5d, 0xc0, 0x63, 0xc2, 0xed, 0x2a, + 0x9d, 0x40, 0x8f, 0x4a, 0x49, 0x91, 0x5d, 0xc5, 0x20, 0x2c, 0x17, 0xe2, 0x6e, 0x0b, 0xd7, 0xdc, + 0xa8, 0x5c, 0xa3, 0x2b, 0x2b, 0xf9, 0xf0, 0x74, 0x82, 0x77, 0xc9, 0xc9, 0xff, 0xfa, 0xb9, 0xfe, + 0xfd, 0x75, 0x23, 0xdd, 0xd6, 0xaf, 0x73, 0x6e, 0xd4, 0xbc, 0xdd, 0x32, 0x3f, 0x6e, 0x99, 0xf3, + 0x4a, 0xa2, 0x76, 0xf3, 0x8d, 0xd9, 0xd8, 0x9a, 0x9f, 0x70, 0xfa, 0xb8, 0xeb, 0x2e, 0x55, 0xfc, + 0xf4, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x32, 0xdb, 0x34, 0x28, 0xf2, 0x02, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/workflow.pb.go b/polyglot-clients/gogrpc/conductor/model/workflow.pb.go new file mode 100644 index 0000000000..d0c963b5a9 --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/workflow.pb.go @@ -0,0 +1,289 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflow.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Workflow_WorkflowStatus int32 + +const ( + Workflow_RUNNING Workflow_WorkflowStatus = 0 + Workflow_COMPLETED Workflow_WorkflowStatus = 1 + Workflow_FAILED Workflow_WorkflowStatus = 2 + Workflow_TIMED_OUT Workflow_WorkflowStatus = 3 + Workflow_TERMINATED Workflow_WorkflowStatus = 4 + Workflow_PAUSED Workflow_WorkflowStatus = 5 +) + +var Workflow_WorkflowStatus_name = map[int32]string{ + 0: "RUNNING", + 1: "COMPLETED", + 2: "FAILED", + 3: "TIMED_OUT", + 4: "TERMINATED", + 5: "PAUSED", +} +var Workflow_WorkflowStatus_value = map[string]int32{ + "RUNNING": 0, + "COMPLETED": 1, + "FAILED": 2, + "TIMED_OUT": 3, + "TERMINATED": 4, + "PAUSED": 5, +} + +func (x Workflow_WorkflowStatus) String() string { + return proto.EnumName(Workflow_WorkflowStatus_name, int32(x)) +} +func (Workflow_WorkflowStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflow_d126dd6e3df866dc, []int{0, 0} +} + +type Workflow struct { + Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,proto3,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + EndTime int64 `protobuf:"varint,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + ParentWorkflowId string `protobuf:"bytes,4,opt,name=parent_workflow_id,json=parentWorkflowId,proto3" json:"parent_workflow_id,omitempty"` + ParentWorkflowTaskId string `protobuf:"bytes,5,opt,name=parent_workflow_task_id,json=parentWorkflowTaskId,proto3" json:"parent_workflow_task_id,omitempty"` + Tasks []*Task `protobuf:"bytes,6,rep,name=tasks,proto3" json:"tasks,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,8,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Output map[string]*_struct.Value `protobuf:"bytes,9,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + WorkflowType string `protobuf:"bytes,10,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + Version int32 `protobuf:"varint,11,opt,name=version,proto3" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,12,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + ReRunFromWorkflowId string `protobuf:"bytes,13,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId,proto3" json:"re_run_from_workflow_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,14,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + SchemaVersion int32 `protobuf:"varint,15,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` + Event string `protobuf:"bytes,16,opt,name=event,proto3" json:"event,omitempty"` + TaskToDomain map[string]string `protobuf:"bytes,17,rep,name=task_to_domain,json=taskToDomain,proto3" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + FailedReferenceTaskNames []string `protobuf:"bytes,18,rep,name=failed_reference_task_names,json=failedReferenceTaskNames,proto3" json:"failed_reference_task_names,omitempty"` + WorkflowDefinition *WorkflowDef `protobuf:"bytes,19,opt,name=workflow_definition,json=workflowDefinition,proto3" json:"workflow_definition,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Workflow) Reset() { *m = Workflow{} } +func (m *Workflow) String() string { return proto.CompactTextString(m) } +func (*Workflow) ProtoMessage() {} +func (*Workflow) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_d126dd6e3df866dc, []int{0} +} +func (m *Workflow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Workflow.Unmarshal(m, b) +} +func (m *Workflow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Workflow.Marshal(b, m, deterministic) +} +func (dst *Workflow) XXX_Merge(src proto.Message) { + xxx_messageInfo_Workflow.Merge(dst, src) +} +func (m *Workflow) XXX_Size() int { + return xxx_messageInfo_Workflow.Size(m) +} +func (m *Workflow) XXX_DiscardUnknown() { + xxx_messageInfo_Workflow.DiscardUnknown(m) +} + +var xxx_messageInfo_Workflow proto.InternalMessageInfo + +func (m *Workflow) GetStatus() Workflow_WorkflowStatus { + if m != nil { + return m.Status + } + return Workflow_RUNNING +} + +func (m *Workflow) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +func (m *Workflow) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *Workflow) GetParentWorkflowId() string { + if m != nil { + return m.ParentWorkflowId + } + return "" +} + +func (m *Workflow) GetParentWorkflowTaskId() string { + if m != nil { + return m.ParentWorkflowTaskId + } + return "" +} + +func (m *Workflow) GetTasks() []*Task { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *Workflow) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *Workflow) GetOutput() map[string]*_struct.Value { + if m != nil { + return m.Output + } + return nil +} + +func (m *Workflow) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *Workflow) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *Workflow) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *Workflow) GetReRunFromWorkflowId() string { + if m != nil { + return m.ReRunFromWorkflowId + } + return "" +} + +func (m *Workflow) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *Workflow) GetSchemaVersion() int32 { + if m != nil { + return m.SchemaVersion + } + return 0 +} + +func (m *Workflow) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *Workflow) GetTaskToDomain() map[string]string { + if m != nil { + return m.TaskToDomain + } + return nil +} + +func (m *Workflow) GetFailedReferenceTaskNames() []string { + if m != nil { + return m.FailedReferenceTaskNames + } + return nil +} + +func (m *Workflow) GetWorkflowDefinition() *WorkflowDef { + if m != nil { + return m.WorkflowDefinition + } + return nil +} + +func init() { + proto.RegisterType((*Workflow)(nil), "conductor.proto.Workflow") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.InputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.OutputEntry") + proto.RegisterMapType((map[string]string)(nil), "conductor.proto.Workflow.TaskToDomainEntry") + proto.RegisterEnum("conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) +} + +func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_d126dd6e3df866dc) } + +var fileDescriptor_workflow_d126dd6e3df866dc = []byte{ + // 727 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0x5f, 0x4f, 0xe3, 0x46, + 0x14, 0xc5, 0x6b, 0xb2, 0x09, 0xe4, 0x86, 0x64, 0xbd, 0x43, 0xb6, 0xb8, 0xec, 0x4a, 0x8d, 0xb6, + 0x5d, 0xc9, 0xd2, 0x22, 0x47, 0x4a, 0xff, 0xa8, 0x42, 0x42, 0x2d, 0x34, 0xa1, 0xb2, 0x44, 0x42, + 0x30, 0x06, 0xa4, 0xbe, 0x58, 0x8e, 0x3d, 0x0e, 0x56, 0xec, 0x99, 0x68, 0x3c, 0x86, 0xe6, 0x6b, + 0xf6, 0x13, 0x55, 0x33, 0x63, 0x13, 0x07, 0x9a, 0xb7, 0x7d, 0xcb, 0xdc, 0xfb, 0x3b, 0x27, 0x77, + 0xce, 0xcc, 0x18, 0xba, 0x29, 0x0d, 0x71, 0xd2, 0x7f, 0xa2, 0x6c, 0x11, 0x25, 0xf4, 0xc9, 0x5a, + 0x32, 0xca, 0x29, 0x7a, 0x1b, 0x50, 0x12, 0xe6, 0x01, 0xa7, 0x4c, 0x15, 0x8e, 0x0e, 0x37, 0xb1, + 0x10, 0x47, 0x45, 0x43, 0x57, 0x0d, 0xee, 0x67, 0x8b, 0xa2, 0xf2, 0x71, 0x4e, 0xe9, 0x3c, 0xc1, + 0x7d, 0xb9, 0x9a, 0xe5, 0x51, 0x3f, 0xe3, 0x2c, 0x0f, 0xb8, 0xea, 0x7e, 0xfa, 0xb7, 0x09, 0x7b, + 0xf7, 0x85, 0x0b, 0xfa, 0x03, 0x1a, 0x19, 0xf7, 0x79, 0x9e, 0x19, 0x5a, 0x4f, 0x33, 0x3b, 0x03, + 0xd3, 0x7a, 0xf1, 0xbf, 0x56, 0x89, 0x3e, 0xff, 0xb8, 0x91, 0xbc, 0x53, 0xe8, 0xd0, 0x77, 0xb0, + 0x87, 0x49, 0xe8, 0xf1, 0x38, 0xc5, 0xc6, 0x4e, 0x4f, 0x33, 0x6b, 0xce, 0x2e, 0x26, 0xa1, 0x1b, + 0xa7, 0x18, 0x7d, 0x0f, 0xad, 0x72, 0x5c, 0x2f, 0x0e, 0x8d, 0x5a, 0x4f, 0x33, 0x9b, 0x0e, 0x94, + 0x25, 0x3b, 0x44, 0xc7, 0x80, 0x96, 0x3e, 0xc3, 0x84, 0x7b, 0x55, 0xee, 0x8d, 0xe4, 0x74, 0xd5, + 0xb9, 0x5f, 0xd3, 0xbf, 0xc0, 0xe1, 0x4b, 0x5a, 0x6c, 0x5a, 0x48, 0xea, 0x52, 0xd2, 0xdd, 0x94, + 0xb8, 0x7e, 0xb6, 0xb0, 0x43, 0xf4, 0x05, 0xea, 0x02, 0xcb, 0x8c, 0x46, 0xaf, 0x66, 0xb6, 0x06, + 0xef, 0x5f, 0xed, 0x50, 0x70, 0x8e, 0x62, 0xd0, 0x09, 0xd4, 0x63, 0xb2, 0xcc, 0xb9, 0xb1, 0x27, + 0xe1, 0x1f, 0xb7, 0xc7, 0x61, 0x0b, 0x6c, 0x44, 0x38, 0x5b, 0x39, 0x4a, 0x82, 0x4e, 0xa1, 0x41, + 0x73, 0x2e, 0xc4, 0x4d, 0x29, 0xfe, 0xbc, 0x5d, 0x7c, 0x25, 0x39, 0xa5, 0x2e, 0x44, 0xe8, 0x07, + 0x68, 0xaf, 0xf7, 0xb5, 0x5a, 0x62, 0x03, 0xe4, 0xa6, 0xf6, 0xcb, 0xa2, 0xbb, 0x5a, 0x62, 0x64, + 0xc0, 0xee, 0x23, 0x66, 0x59, 0x4c, 0x89, 0xd1, 0xea, 0x69, 0x66, 0xdd, 0x29, 0x97, 0xe8, 0x33, + 0x74, 0x02, 0xca, 0x18, 0x4e, 0x7c, 0x1e, 0x53, 0x22, 0x42, 0xd9, 0x97, 0xfa, 0x76, 0xa5, 0x6a, + 0x87, 0xe8, 0x67, 0x38, 0x64, 0xd8, 0x63, 0x39, 0xf1, 0x22, 0x46, 0xd3, 0x8d, 0xdc, 0xdb, 0x92, + 0x3f, 0x60, 0xd8, 0xc9, 0xc9, 0x05, 0xa3, 0x69, 0x25, 0xfa, 0x5f, 0x85, 0xca, 0xcf, 0x28, 0xf1, + 0x22, 0xca, 0xbc, 0x98, 0x04, 0x34, 0x5d, 0x26, 0x58, 0x58, 0x1a, 0x1d, 0xa9, 0x7a, 0xaf, 0xda, + 0x17, 0x94, 0xd9, 0x95, 0xa6, 0x18, 0x2a, 0x0b, 0x1e, 0x70, 0xea, 0x7b, 0xe5, 0xd4, 0x6f, 0xe5, + 0xd4, 0x6d, 0x55, 0xbd, 0x2b, 0x66, 0xef, 0x42, 0x1d, 0x3f, 0x62, 0xc2, 0x0d, 0x5d, 0x9a, 0xa9, + 0x05, 0xba, 0x86, 0x8e, 0x3c, 0x5f, 0x4e, 0xbd, 0x90, 0xa6, 0x7e, 0x4c, 0x8c, 0x77, 0x32, 0xd7, + 0x2f, 0xdb, 0x73, 0x15, 0x47, 0xe9, 0xd2, 0xa1, 0xa4, 0x55, 0xba, 0xfb, 0xbc, 0x52, 0x42, 0xa7, + 0xf0, 0x21, 0xf2, 0xe3, 0x04, 0x87, 0x1e, 0xc3, 0x11, 0x66, 0x98, 0x04, 0x58, 0xdd, 0x21, 0xe2, + 0xa7, 0x38, 0x33, 0x50, 0xaf, 0x66, 0x36, 0x1d, 0x43, 0x21, 0x4e, 0x49, 0x08, 0xd3, 0x89, 0xe8, + 0xa3, 0x31, 0x1c, 0x3c, 0x07, 0x16, 0xe2, 0x28, 0x26, 0xb1, 0x8c, 0xe0, 0xa0, 0xa7, 0x99, 0xad, + 0xc1, 0xc7, 0xad, 0x63, 0x0d, 0x71, 0xe4, 0xa0, 0xa7, 0xf5, 0xa2, 0xd0, 0x1d, 0x4d, 0x01, 0xd6, + 0xb7, 0x08, 0xe9, 0x50, 0x5b, 0xe0, 0x95, 0x7c, 0x87, 0x4d, 0x47, 0xfc, 0x44, 0xc7, 0x50, 0x7f, + 0xf4, 0x93, 0x5c, 0xbd, 0xab, 0xd6, 0xe0, 0x5b, 0x4b, 0xbd, 0x6b, 0xab, 0x7c, 0xd7, 0xd6, 0x9d, + 0xe8, 0x3a, 0x0a, 0x3a, 0xd9, 0xf9, 0x4d, 0x3b, 0xba, 0x86, 0x56, 0xe5, 0x6a, 0x7d, 0x15, 0xcb, + 0xdf, 0xe1, 0xdd, 0xab, 0x54, 0xff, 0xc7, 0xb8, 0x5b, 0x35, 0x6e, 0x56, 0x0c, 0x3e, 0x05, 0xd0, + 0xd9, 0xfc, 0x74, 0xa0, 0x16, 0xec, 0x3a, 0xb7, 0x93, 0x89, 0x3d, 0xf9, 0x4b, 0xff, 0x06, 0xb5, + 0xa1, 0xf9, 0xe7, 0xd5, 0x78, 0x7a, 0x39, 0x72, 0x47, 0x43, 0x5d, 0x43, 0x00, 0x8d, 0x8b, 0x33, + 0xfb, 0x72, 0x34, 0xd4, 0x77, 0x44, 0xcb, 0xb5, 0xc7, 0xa3, 0xa1, 0x77, 0x75, 0xeb, 0xea, 0x35, + 0xd4, 0x01, 0x70, 0x47, 0xce, 0xd8, 0x9e, 0x9c, 0x09, 0xf4, 0x8d, 0x40, 0xa7, 0x67, 0xb7, 0x37, + 0xa3, 0xa1, 0x5e, 0x3f, 0xc7, 0xf0, 0x21, 0xa0, 0xa9, 0x45, 0x30, 0x8f, 0x92, 0xf8, 0x9f, 0x97, + 0x27, 0x71, 0x0e, 0xe5, 0x04, 0xd3, 0xd9, 0xdf, 0x27, 0xf3, 0x98, 0x3f, 0xe4, 0x33, 0x2b, 0xa0, + 0x69, 0xbf, 0xe0, 0xfb, 0xcf, 0x7c, 0x3f, 0x48, 0x62, 0x4c, 0x78, 0x7f, 0x4e, 0xe7, 0x6c, 0x19, + 0x54, 0xea, 0xf2, 0x23, 0x3b, 0x6b, 0x48, 0xbb, 0x9f, 0xfe, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1f, + 0xe2, 0x8d, 0x7f, 0xb4, 0x05, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go b/polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go new file mode 100644 index 0000000000..ed8cfe2a7e --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/workflowdef.pb.go @@ -0,0 +1,161 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflowdef.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WorkflowDef struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + Tasks []*WorkflowTask `protobuf:"bytes,4,rep,name=tasks,proto3" json:"tasks,omitempty"` + InputParameters []string `protobuf:"bytes,5,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty"` + OutputParameters map[string]*_struct.Value `protobuf:"bytes,6,rep,name=output_parameters,json=outputParameters,proto3" json:"output_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + FailureWorkflow string `protobuf:"bytes,7,opt,name=failure_workflow,json=failureWorkflow,proto3" json:"failure_workflow,omitempty"` + SchemaVersion int32 `protobuf:"varint,8,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` + Restartable bool `protobuf:"varint,9,opt,name=restartable,proto3" json:"restartable,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowDef) Reset() { *m = WorkflowDef{} } +func (m *WorkflowDef) String() string { return proto.CompactTextString(m) } +func (*WorkflowDef) ProtoMessage() {} +func (*WorkflowDef) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowdef_3a04d4bf8b36be23, []int{0} +} +func (m *WorkflowDef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowDef.Unmarshal(m, b) +} +func (m *WorkflowDef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowDef.Marshal(b, m, deterministic) +} +func (dst *WorkflowDef) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowDef.Merge(dst, src) +} +func (m *WorkflowDef) XXX_Size() int { + return xxx_messageInfo_WorkflowDef.Size(m) +} +func (m *WorkflowDef) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowDef.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowDef proto.InternalMessageInfo + +func (m *WorkflowDef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WorkflowDef) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *WorkflowDef) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowDef) GetTasks() []*WorkflowTask { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *WorkflowDef) GetInputParameters() []string { + if m != nil { + return m.InputParameters + } + return nil +} + +func (m *WorkflowDef) GetOutputParameters() map[string]*_struct.Value { + if m != nil { + return m.OutputParameters + } + return nil +} + +func (m *WorkflowDef) GetFailureWorkflow() string { + if m != nil { + return m.FailureWorkflow + } + return "" +} + +func (m *WorkflowDef) GetSchemaVersion() int32 { + if m != nil { + return m.SchemaVersion + } + return 0 +} + +func (m *WorkflowDef) GetRestartable() bool { + if m != nil { + return m.Restartable + } + return false +} + +func init() { + proto.RegisterType((*WorkflowDef)(nil), "conductor.proto.WorkflowDef") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowDef.OutputParametersEntry") +} + +func init() { + proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_3a04d4bf8b36be23) +} + +var fileDescriptor_workflowdef_3a04d4bf8b36be23 = []byte{ + // 404 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xdf, 0x6b, 0xd5, 0x30, + 0x14, 0xc7, 0xe9, 0xba, 0x6e, 0xbb, 0x29, 0xf3, 0x5e, 0x03, 0x6a, 0x98, 0x0a, 0x45, 0x10, 0x2a, + 0x48, 0x0a, 0x77, 0x2f, 0xb2, 0xc7, 0x31, 0x9f, 0x1d, 0x45, 0x26, 0xe8, 0x43, 0x49, 0xd3, 0xd3, + 0xde, 0xd2, 0xb4, 0x29, 0xf9, 0xb1, 0xb9, 0xbf, 0xda, 0x7f, 0x41, 0x9a, 0xb6, 0xb3, 0x16, 0x7d, + 0xcb, 0xf9, 0x7c, 0xcf, 0x2f, 0xbe, 0x27, 0xe8, 0x55, 0x2b, 0x0b, 0x10, 0xc9, 0x83, 0x54, 0x4d, + 0x29, 0xe4, 0x43, 0x01, 0x25, 0xed, 0x95, 0x34, 0x12, 0x6f, 0xb9, 0xec, 0x0a, 0xcb, 0x8d, 0x54, + 0x23, 0xb8, 0x20, 0x7f, 0x67, 0x1a, 0xa6, 0x9b, 0x49, 0x79, 0x53, 0x49, 0x59, 0x09, 0x48, 0x5c, + 0x94, 0xdb, 0x32, 0xd1, 0x46, 0x59, 0x6e, 0x46, 0xf5, 0xdd, 0x2f, 0x1f, 0x85, 0xdf, 0xa6, 0xa2, + 0x1b, 0x28, 0x31, 0x46, 0xc7, 0x1d, 0x6b, 0x81, 0x78, 0x91, 0x17, 0x6f, 0x52, 0xf7, 0xc6, 0x11, + 0x0a, 0x0b, 0xd0, 0x5c, 0xd5, 0xbd, 0xa9, 0x65, 0x47, 0x8e, 0x9c, 0xb4, 0x44, 0x98, 0xa0, 0xd3, + 0x7b, 0x50, 0x7a, 0x50, 0xfd, 0xc8, 0x8b, 0x83, 0x74, 0x0e, 0xf1, 0x25, 0x0a, 0x86, 0x5d, 0x34, + 0x39, 0x8e, 0xfc, 0x38, 0xdc, 0xbf, 0xa5, 0xab, 0xc5, 0xe9, 0x3c, 0xfc, 0x2b, 0xd3, 0x4d, 0x3a, + 0xe6, 0xe2, 0x0f, 0x68, 0x57, 0x77, 0xbd, 0x35, 0x59, 0xcf, 0x14, 0x6b, 0xc1, 0x80, 0xd2, 0x24, + 0x88, 0xfc, 0x78, 0x93, 0x6e, 0x1d, 0xbf, 0x7d, 0xc2, 0x38, 0x43, 0xcf, 0xa5, 0x35, 0xab, 0xdc, + 0x13, 0x37, 0x6b, 0xff, 0xdf, 0x59, 0x37, 0x50, 0xd2, 0x2f, 0xae, 0xea, 0x4f, 0xa7, 0xcf, 0x9d, + 0x51, 0x8f, 0xe9, 0x4e, 0xae, 0xf0, 0xb0, 0x4b, 0xc9, 0x6a, 0x61, 0x15, 0x64, 0xb3, 0xb9, 0xe4, + 0xd4, 0x39, 0xb0, 0x9d, 0xf8, 0xdc, 0x15, 0xbf, 0x47, 0xcf, 0x34, 0x3f, 0x40, 0xcb, 0xb2, 0xd9, + 0x8c, 0x33, 0x67, 0xc6, 0xf9, 0x48, 0xef, 0x26, 0x4b, 0x22, 0x14, 0x2a, 0xd0, 0x86, 0x29, 0xc3, + 0x72, 0x01, 0x64, 0x13, 0x79, 0xf1, 0x59, 0xba, 0x44, 0x17, 0x3f, 0xd0, 0x8b, 0x7f, 0xae, 0x87, + 0x77, 0xc8, 0x6f, 0xe0, 0x71, 0x3a, 0xce, 0xf0, 0xc4, 0x1f, 0x51, 0x70, 0xcf, 0x84, 0x05, 0x77, + 0x95, 0x70, 0xff, 0x92, 0x8e, 0xd7, 0xa6, 0xf3, 0xb5, 0xe9, 0xdd, 0xa0, 0xa6, 0x63, 0xd2, 0xd5, + 0xd1, 0x27, 0xef, 0xfa, 0x80, 0x5e, 0x73, 0xd9, 0xd2, 0x0e, 0x4c, 0x29, 0xea, 0x9f, 0x6b, 0x8f, + 0xae, 0xcf, 0x17, 0x26, 0xdd, 0xe6, 0xdf, 0xaf, 0xaa, 0xda, 0x1c, 0x6c, 0x4e, 0xb9, 0x6c, 0x93, + 0xa9, 0x24, 0x79, 0x2a, 0x49, 0xb8, 0xa8, 0xa1, 0x33, 0x49, 0x25, 0x2b, 0xd5, 0xf3, 0x05, 0x77, + 0x9f, 0x31, 0x3f, 0x71, 0x1d, 0x2f, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x70, 0x4f, 0x0f, 0xeb, + 0xc6, 0x02, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go b/polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go new file mode 100644 index 0000000000..06837d555e --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/workflowsummary.pb.go @@ -0,0 +1,200 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflowsummary.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WorkflowSummary struct { + WorkflowType string `protobuf:"bytes,1,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + CorrelationId string `protobuf:"bytes,4,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,proto3,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + Input string `protobuf:"bytes,9,opt,name=input,proto3" json:"input,omitempty"` + Output string `protobuf:"bytes,10,opt,name=output,proto3" json:"output,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,11,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + ExecutionTime int64 `protobuf:"varint,12,opt,name=execution_time,json=executionTime,proto3" json:"execution_time,omitempty"` + Event string `protobuf:"bytes,13,opt,name=event,proto3" json:"event,omitempty"` + FailedReferenceTaskNames string `protobuf:"bytes,14,opt,name=failed_reference_task_names,json=failedReferenceTaskNames,proto3" json:"failed_reference_task_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowSummary) Reset() { *m = WorkflowSummary{} } +func (m *WorkflowSummary) String() string { return proto.CompactTextString(m) } +func (*WorkflowSummary) ProtoMessage() {} +func (*WorkflowSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowsummary_3f8ed40c0bd9261f, []int{0} +} +func (m *WorkflowSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowSummary.Unmarshal(m, b) +} +func (m *WorkflowSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowSummary.Marshal(b, m, deterministic) +} +func (dst *WorkflowSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowSummary.Merge(dst, src) +} +func (m *WorkflowSummary) XXX_Size() int { + return xxx_messageInfo_WorkflowSummary.Size(m) +} +func (m *WorkflowSummary) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowSummary proto.InternalMessageInfo + +func (m *WorkflowSummary) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *WorkflowSummary) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowSummary) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *WorkflowSummary) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *WorkflowSummary) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *WorkflowSummary) GetUpdateTime() string { + if m != nil { + return m.UpdateTime + } + return "" +} + +func (m *WorkflowSummary) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +func (m *WorkflowSummary) GetStatus() Workflow_WorkflowStatus { + if m != nil { + return m.Status + } + return Workflow_RUNNING +} + +func (m *WorkflowSummary) GetInput() string { + if m != nil { + return m.Input + } + return "" +} + +func (m *WorkflowSummary) GetOutput() string { + if m != nil { + return m.Output + } + return "" +} + +func (m *WorkflowSummary) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *WorkflowSummary) GetExecutionTime() int64 { + if m != nil { + return m.ExecutionTime + } + return 0 +} + +func (m *WorkflowSummary) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *WorkflowSummary) GetFailedReferenceTaskNames() string { + if m != nil { + return m.FailedReferenceTaskNames + } + return "" +} + +func init() { + proto.RegisterType((*WorkflowSummary)(nil), "conductor.proto.WorkflowSummary") +} + +func init() { + proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_3f8ed40c0bd9261f) +} + +var fileDescriptor_workflowsummary_3f8ed40c0bd9261f = []byte{ + // 428 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0x51, 0x8b, 0xd3, 0x40, + 0x10, 0xc7, 0x89, 0x77, 0x6d, 0xaf, 0x7b, 0xd7, 0x1e, 0x2e, 0x55, 0x57, 0x8b, 0x58, 0x14, 0x21, + 0x4f, 0x09, 0x28, 0xf8, 0x20, 0x08, 0x72, 0x0f, 0x42, 0x5f, 0x44, 0x72, 0x05, 0xc1, 0x97, 0xb0, + 0xdd, 0x4c, 0xea, 0xd2, 0x64, 0x27, 0xec, 0x6e, 0xee, 0xda, 0x4f, 0xe2, 0xd7, 0x95, 0xcc, 0x36, + 0xa5, 0xd7, 0xb7, 0xcc, 0xef, 0xff, 0x9b, 0xdd, 0xcc, 0xb0, 0x6c, 0x5e, 0x63, 0x01, 0x55, 0xfa, + 0x88, 0x76, 0x5b, 0x56, 0xf8, 0xe8, 0xda, 0xba, 0x96, 0x76, 0x9f, 0x34, 0x16, 0x3d, 0xf2, 0x5b, + 0x85, 0xa6, 0x68, 0x95, 0x47, 0x1b, 0xc0, 0x9b, 0xd9, 0x53, 0x3b, 0xd0, 0xf7, 0xff, 0x2e, 0xd9, + 0xed, 0xef, 0x03, 0xba, 0x0f, 0x07, 0xf0, 0x0f, 0x6c, 0xd2, 0x5b, 0xb9, 0xdf, 0x37, 0x20, 0xa2, + 0x45, 0x14, 0x8f, 0xb3, 0x9b, 0x1e, 0xae, 0xf6, 0x0d, 0x70, 0xc1, 0x46, 0x0f, 0x60, 0x9d, 0x46, + 0x23, 0x9e, 0x2d, 0xa2, 0x78, 0x90, 0xf5, 0x25, 0x7f, 0xc7, 0xae, 0x8f, 0xed, 0xba, 0x10, 0x17, + 0xd4, 0xcc, 0x7a, 0xb4, 0x2c, 0xf8, 0x47, 0x36, 0x55, 0x68, 0x2d, 0x54, 0xd2, 0x6b, 0x34, 0x9d, + 0x73, 0x49, 0xce, 0xe4, 0x84, 0x2e, 0x0b, 0xfe, 0x96, 0x31, 0xe7, 0xa5, 0xf5, 0xb9, 0xd7, 0x35, + 0x88, 0x01, 0x29, 0x63, 0x22, 0x2b, 0x5d, 0x43, 0x77, 0x4d, 0xdb, 0x14, 0xd2, 0x43, 0xc8, 0x87, + 0xe1, 0x9a, 0x80, 0x48, 0x78, 0xcd, 0xae, 0xc0, 0x14, 0x21, 0x1d, 0x51, 0x3a, 0x02, 0x53, 0x50, + 0xf4, 0x9d, 0x0d, 0x9d, 0x97, 0xbe, 0x75, 0xe2, 0x6a, 0x11, 0xc5, 0xd3, 0x4f, 0x71, 0x72, 0xb6, + 0xad, 0xa4, 0xdf, 0xc9, 0xf1, 0xe3, 0x9e, 0xfc, 0xec, 0xd0, 0xc7, 0x67, 0x6c, 0xa0, 0x4d, 0xd3, + 0x7a, 0x31, 0xa6, 0x93, 0x43, 0xc1, 0x5f, 0xb2, 0x21, 0xb6, 0xbe, 0xc3, 0x8c, 0xf0, 0xa1, 0xe2, + 0x5f, 0xd8, 0x2b, 0x0b, 0xd2, 0xa1, 0xc9, 0x4b, 0xb4, 0xb9, 0x36, 0x0a, 0xeb, 0xa6, 0x82, 0x6e, + 0x4e, 0x71, 0x4d, 0xe2, 0x8b, 0x10, 0xff, 0x40, 0xbb, 0x3c, 0x09, 0xbb, 0x4d, 0xc1, 0x0e, 0x54, + 0x4b, 0x7b, 0xa2, 0x41, 0x6e, 0x16, 0x51, 0x7c, 0x91, 0x4d, 0x8e, 0x94, 0xc6, 0x99, 0xb1, 0x01, + 0x3c, 0x80, 0xf1, 0x62, 0x12, 0x7e, 0x86, 0x0a, 0xfe, 0x8d, 0xcd, 0x4b, 0xa9, 0x2b, 0x28, 0x72, + 0x0b, 0x25, 0x58, 0x30, 0x0a, 0x72, 0x2f, 0xdd, 0x36, 0x37, 0xb2, 0x06, 0x27, 0xa6, 0xe4, 0x8a, + 0xa0, 0x64, 0xbd, 0xb1, 0x92, 0x6e, 0xfb, 0xb3, 0xcb, 0xef, 0x2a, 0x36, 0x57, 0x58, 0x27, 0x06, + 0x7c, 0x59, 0xe9, 0xdd, 0xf9, 0x82, 0xee, 0x9e, 0x9f, 0xbd, 0x9a, 0x5f, 0xeb, 0x3f, 0x5f, 0x37, + 0xda, 0xff, 0x6d, 0xd7, 0x89, 0xc2, 0x3a, 0x3d, 0xb4, 0xa5, 0xc7, 0xb6, 0x54, 0x55, 0x1a, 0x8c, + 0x4f, 0x37, 0xb8, 0xb1, 0x8d, 0x3a, 0xe1, 0xf4, 0x2c, 0xd7, 0x43, 0x3a, 0xf5, 0xf3, 0xff, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xa0, 0xee, 0x86, 0xf0, 0xd4, 0x02, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go b/polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go new file mode 100644 index 0000000000..1e24eb2f4b --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/model/workflowtask.pb.go @@ -0,0 +1,308 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflowtask.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WorkflowTask struct { + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName,proto3" json:"task_reference_name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + InputParameters map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` + DynamicTaskNameParam string `protobuf:"bytes,6,opt,name=dynamic_task_name_param,json=dynamicTaskNameParam,proto3" json:"dynamic_task_name_param,omitempty"` + CaseValueParam string `protobuf:"bytes,7,opt,name=case_value_param,json=caseValueParam,proto3" json:"case_value_param,omitempty"` + CaseExpression string `protobuf:"bytes,8,opt,name=case_expression,json=caseExpression,proto3" json:"case_expression,omitempty"` + DecisionCases map[string]*WorkflowTask_WorkflowTaskList `protobuf:"bytes,9,rep,name=decision_cases,json=decisionCases,proto3" json:"decision_cases,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + DynamicForkTasksParam string `protobuf:"bytes,10,opt,name=dynamic_fork_tasks_param,json=dynamicForkTasksParam,proto3" json:"dynamic_fork_tasks_param,omitempty"` + DynamicForkTasksInputParamName string `protobuf:"bytes,11,opt,name=dynamic_fork_tasks_input_param_name,json=dynamicForkTasksInputParamName,proto3" json:"dynamic_fork_tasks_input_param_name,omitempty"` + DefaultCase []*WorkflowTask `protobuf:"bytes,12,rep,name=default_case,json=defaultCase,proto3" json:"default_case,omitempty"` + ForkTasks []*WorkflowTask_WorkflowTaskList `protobuf:"bytes,13,rep,name=fork_tasks,json=forkTasks,proto3" json:"fork_tasks,omitempty"` + StartDelay int32 `protobuf:"varint,14,opt,name=start_delay,json=startDelay,proto3" json:"start_delay,omitempty"` + SubWorkflowParam *SubWorkflowParams `protobuf:"bytes,15,opt,name=sub_workflow_param,json=subWorkflowParam,proto3" json:"sub_workflow_param,omitempty"` + JoinOn []string `protobuf:"bytes,16,rep,name=join_on,json=joinOn,proto3" json:"join_on,omitempty"` + Sink string `protobuf:"bytes,17,opt,name=sink,proto3" json:"sink,omitempty"` + Optional bool `protobuf:"varint,18,opt,name=optional,proto3" json:"optional,omitempty"` + TaskDefinition *TaskDef `protobuf:"bytes,19,opt,name=task_definition,json=taskDefinition,proto3" json:"task_definition,omitempty"` + RateLimited bool `protobuf:"varint,20,opt,name=rate_limited,json=rateLimited,proto3" json:"rate_limited,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTask) Reset() { *m = WorkflowTask{} } +func (m *WorkflowTask) String() string { return proto.CompactTextString(m) } +func (*WorkflowTask) ProtoMessage() {} +func (*WorkflowTask) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowtask_9ea0dc5eed4f592b, []int{0} +} +func (m *WorkflowTask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTask.Unmarshal(m, b) +} +func (m *WorkflowTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTask.Marshal(b, m, deterministic) +} +func (dst *WorkflowTask) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTask.Merge(dst, src) +} +func (m *WorkflowTask) XXX_Size() int { + return xxx_messageInfo_WorkflowTask.Size(m) +} +func (m *WorkflowTask) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTask.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTask proto.InternalMessageInfo + +func (m *WorkflowTask) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WorkflowTask) GetTaskReferenceName() string { + if m != nil { + return m.TaskReferenceName + } + return "" +} + +func (m *WorkflowTask) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *WorkflowTask) GetInputParameters() map[string]*_struct.Value { + if m != nil { + return m.InputParameters + } + return nil +} + +func (m *WorkflowTask) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *WorkflowTask) GetDynamicTaskNameParam() string { + if m != nil { + return m.DynamicTaskNameParam + } + return "" +} + +func (m *WorkflowTask) GetCaseValueParam() string { + if m != nil { + return m.CaseValueParam + } + return "" +} + +func (m *WorkflowTask) GetCaseExpression() string { + if m != nil { + return m.CaseExpression + } + return "" +} + +func (m *WorkflowTask) GetDecisionCases() map[string]*WorkflowTask_WorkflowTaskList { + if m != nil { + return m.DecisionCases + } + return nil +} + +func (m *WorkflowTask) GetDynamicForkTasksParam() string { + if m != nil { + return m.DynamicForkTasksParam + } + return "" +} + +func (m *WorkflowTask) GetDynamicForkTasksInputParamName() string { + if m != nil { + return m.DynamicForkTasksInputParamName + } + return "" +} + +func (m *WorkflowTask) GetDefaultCase() []*WorkflowTask { + if m != nil { + return m.DefaultCase + } + return nil +} + +func (m *WorkflowTask) GetForkTasks() []*WorkflowTask_WorkflowTaskList { + if m != nil { + return m.ForkTasks + } + return nil +} + +func (m *WorkflowTask) GetStartDelay() int32 { + if m != nil { + return m.StartDelay + } + return 0 +} + +func (m *WorkflowTask) GetSubWorkflowParam() *SubWorkflowParams { + if m != nil { + return m.SubWorkflowParam + } + return nil +} + +func (m *WorkflowTask) GetJoinOn() []string { + if m != nil { + return m.JoinOn + } + return nil +} + +func (m *WorkflowTask) GetSink() string { + if m != nil { + return m.Sink + } + return "" +} + +func (m *WorkflowTask) GetOptional() bool { + if m != nil { + return m.Optional + } + return false +} + +func (m *WorkflowTask) GetTaskDefinition() *TaskDef { + if m != nil { + return m.TaskDefinition + } + return nil +} + +func (m *WorkflowTask) GetRateLimited() bool { + if m != nil { + return m.RateLimited + } + return false +} + +type WorkflowTask_WorkflowTaskList struct { + Tasks []*WorkflowTask `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTask_WorkflowTaskList) Reset() { *m = WorkflowTask_WorkflowTaskList{} } +func (m *WorkflowTask_WorkflowTaskList) String() string { return proto.CompactTextString(m) } +func (*WorkflowTask_WorkflowTaskList) ProtoMessage() {} +func (*WorkflowTask_WorkflowTaskList) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowtask_9ea0dc5eed4f592b, []int{0, 0} +} +func (m *WorkflowTask_WorkflowTaskList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Unmarshal(m, b) +} +func (m *WorkflowTask_WorkflowTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Marshal(b, m, deterministic) +} +func (dst *WorkflowTask_WorkflowTaskList) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTask_WorkflowTaskList.Merge(dst, src) +} +func (m *WorkflowTask_WorkflowTaskList) XXX_Size() int { + return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Size(m) +} +func (m *WorkflowTask_WorkflowTaskList) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTask_WorkflowTaskList.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTask_WorkflowTaskList proto.InternalMessageInfo + +func (m *WorkflowTask_WorkflowTaskList) GetTasks() []*WorkflowTask { + if m != nil { + return m.Tasks + } + return nil +} + +func init() { + proto.RegisterType((*WorkflowTask)(nil), "conductor.proto.WorkflowTask") + proto.RegisterMapType((map[string]*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.DecisionCasesEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowTask.InputParametersEntry") + proto.RegisterType((*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.WorkflowTaskList") +} + +func init() { + proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_9ea0dc5eed4f592b) +} + +var fileDescriptor_workflowtask_9ea0dc5eed4f592b = []byte{ + // 708 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0xd3, 0x3a, + 0x14, 0x56, 0xd7, 0x75, 0x5b, 0x4f, 0xbb, 0xb6, 0xf3, 0x76, 0xef, 0xac, 0xde, 0xbb, 0x7b, 0xcb, + 0x78, 0xa0, 0x0f, 0x28, 0x45, 0x9d, 0x10, 0x68, 0x4f, 0x30, 0x3a, 0x10, 0x62, 0xc0, 0x14, 0x10, + 0x93, 0x26, 0xa1, 0x28, 0x4d, 0x9c, 0x62, 0x9a, 0xc6, 0x91, 0xed, 0xb0, 0xf5, 0xaf, 0xf0, 0x6b, + 0x91, 0x8f, 0x93, 0x36, 0xeb, 0xa6, 0x09, 0xde, 0xec, 0xef, 0x7c, 0xdf, 0x39, 0xe7, 0xb3, 0x7d, + 0x0c, 0x74, 0x26, 0x42, 0x16, 0x0f, 0xae, 0x84, 0x9c, 0x46, 0xb1, 0xb8, 0xd2, 0xbe, 0x9a, 0x3a, + 0xa9, 0x14, 0x5a, 0x90, 0x76, 0x20, 0x92, 0x30, 0x0b, 0xb4, 0x90, 0x16, 0xe8, 0xee, 0x5a, 0xaa, + 0xa1, 0x84, 0x2c, 0xca, 0xc1, 0x03, 0x0b, 0xaa, 0x6c, 0x5c, 0xa4, 0x48, 0x7d, 0xe9, 0xcf, 0x54, + 0x1e, 0xfe, 0x77, 0x22, 0xc4, 0x24, 0x66, 0x03, 0xdc, 0x8d, 0xb3, 0x68, 0xa0, 0xb4, 0xcc, 0x02, + 0x6d, 0xa3, 0x87, 0x3f, 0x01, 0x9a, 0x17, 0xb9, 0xec, 0xb3, 0xaf, 0xa6, 0x84, 0xc0, 0x7a, 0xe2, + 0xcf, 0x18, 0xad, 0xf4, 0x2a, 0xfd, 0xba, 0x8b, 0x6b, 0xe2, 0xc0, 0xae, 0x29, 0xe9, 0x49, 0x16, + 0x31, 0xc9, 0x92, 0x80, 0x79, 0x48, 0x59, 0x43, 0xca, 0x8e, 0x09, 0xb9, 0x45, 0xe4, 0x83, 0xe1, + 0xf7, 0xa0, 0x11, 0x32, 0x15, 0x48, 0x9e, 0x6a, 0x2e, 0x12, 0x5a, 0x45, 0x5e, 0x19, 0x22, 0x5f, + 0xa1, 0xc3, 0x93, 0x34, 0xd3, 0x1e, 0xb6, 0xca, 0x34, 0x93, 0x8a, 0xae, 0xf7, 0xaa, 0xfd, 0xc6, + 0x70, 0xe8, 0xac, 0x98, 0x76, 0xca, 0xed, 0x39, 0x6f, 0x8d, 0xea, 0x7c, 0x21, 0x3a, 0x4d, 0xb4, + 0x9c, 0xbb, 0x6d, 0x7e, 0x13, 0x35, 0x26, 0xf4, 0x3c, 0x65, 0xb4, 0x66, 0x4d, 0x98, 0x35, 0x79, + 0x0a, 0xfb, 0xe1, 0x3c, 0xf1, 0x67, 0x3c, 0xf0, 0xd0, 0x8c, 0xb1, 0x60, 0xcb, 0xd3, 0x0d, 0xa4, + 0xed, 0xe5, 0x61, 0x53, 0xc7, 0xd8, 0xc0, 0x7c, 0xa4, 0x0f, 0x9d, 0xc0, 0x57, 0xcc, 0xfb, 0xe1, + 0xc7, 0x59, 0xc1, 0xdf, 0x44, 0x7e, 0xcb, 0xe0, 0x5f, 0x0c, 0x6c, 0x99, 0x8f, 0xa0, 0x8d, 0x4c, + 0x76, 0x9d, 0x4a, 0xa6, 0x94, 0x71, 0xbe, 0xb5, 0x24, 0x9e, 0x2e, 0x50, 0x72, 0x01, 0xad, 0x90, + 0x05, 0xdc, 0xac, 0x3d, 0x13, 0x52, 0xb4, 0x8e, 0xd6, 0x9f, 0xdc, 0x6f, 0x7d, 0x94, 0x6b, 0x5e, + 0x19, 0x89, 0x35, 0xbe, 0x1d, 0x96, 0x31, 0xf2, 0x0c, 0x68, 0x61, 0x31, 0x12, 0x72, 0x8a, 0x3e, + 0x55, 0xde, 0x33, 0x60, 0x2b, 0x7f, 0xe5, 0xf1, 0xd7, 0x42, 0x4e, 0x4d, 0x52, 0x65, 0x5b, 0x7f, + 0x07, 0x0f, 0xef, 0x10, 0x96, 0x6e, 0xc8, 0x5e, 0x78, 0x03, 0x73, 0xfc, 0xb7, 0x9a, 0x63, 0x79, + 0x27, 0x78, 0xfb, 0x2f, 0xa0, 0x19, 0xb2, 0xc8, 0xcf, 0x62, 0x8d, 0xee, 0x68, 0x13, 0xcd, 0x1d, + 0xdc, 0x6b, 0xce, 0xbc, 0x0e, 0x94, 0x18, 0x23, 0xe4, 0x3d, 0xc0, 0xb2, 0x0d, 0xba, 0x8d, 0x7a, + 0xe7, 0xfe, 0xc3, 0x29, 0x6f, 0xce, 0xb8, 0xd2, 0x6e, 0x3d, 0x2a, 0xda, 0x23, 0xff, 0x43, 0x43, + 0x69, 0x5f, 0x6a, 0x2f, 0x64, 0xb1, 0x3f, 0xa7, 0xad, 0x5e, 0xa5, 0x5f, 0x73, 0x01, 0xa1, 0x91, + 0x41, 0xc8, 0x39, 0x10, 0x95, 0x8d, 0xbd, 0x62, 0x7c, 0xf2, 0x13, 0x6b, 0xf7, 0x2a, 0xfd, 0xc6, + 0xf0, 0xf0, 0x56, 0xdd, 0x4f, 0xd9, 0xb8, 0xa8, 0x86, 0xa6, 0x95, 0xdb, 0x51, 0x2b, 0x10, 0xd9, + 0x87, 0xcd, 0xef, 0x82, 0x27, 0x9e, 0x48, 0x68, 0xa7, 0x57, 0xed, 0xd7, 0xdd, 0x0d, 0xb3, 0xfd, + 0x98, 0x98, 0x97, 0xa9, 0x78, 0x32, 0xa5, 0x3b, 0xf6, 0x65, 0x9a, 0x35, 0xe9, 0xc2, 0x96, 0xc0, + 0xb1, 0xf0, 0x63, 0x4a, 0x7a, 0x95, 0xfe, 0x96, 0xbb, 0xd8, 0x93, 0x97, 0xd0, 0xc6, 0xd7, 0x1a, + 0xb2, 0x88, 0x27, 0x1c, 0xc7, 0x69, 0x17, 0xfb, 0xa2, 0xb7, 0xfa, 0x32, 0x66, 0x47, 0x2c, 0x72, + 0x5b, 0xda, 0x2e, 0x72, 0x3e, 0x79, 0x00, 0x4d, 0xe9, 0x6b, 0xe6, 0xc5, 0x7c, 0xc6, 0x35, 0x0b, + 0xe9, 0x1e, 0x96, 0x68, 0x18, 0xec, 0xcc, 0x42, 0xdd, 0x37, 0xd0, 0x59, 0x3d, 0x40, 0x72, 0x04, + 0x35, 0x7b, 0xfe, 0x95, 0xdf, 0xb9, 0x3f, 0xcb, 0xed, 0x5e, 0xc2, 0xde, 0x5d, 0x13, 0x4a, 0x3a, + 0x50, 0x9d, 0xb2, 0x79, 0xfe, 0xa9, 0x98, 0x25, 0x79, 0x0c, 0x35, 0x1c, 0x29, 0xfc, 0x45, 0x1a, + 0xc3, 0xbf, 0x1d, 0xfb, 0x4d, 0x39, 0xc5, 0x37, 0xe5, 0xe0, 0x64, 0xb9, 0x96, 0x74, 0xbc, 0xf6, + 0xbc, 0xd2, 0x4d, 0x81, 0xdc, 0x1e, 0x81, 0x3b, 0x32, 0x8f, 0x6e, 0x66, 0xfe, 0xd3, 0x87, 0xb3, + 0xac, 0x78, 0xc2, 0xe1, 0x9f, 0x40, 0xcc, 0x9c, 0x84, 0xe9, 0x28, 0xe6, 0xd7, 0xab, 0x79, 0x4e, + 0x5a, 0x65, 0xed, 0xf9, 0xf8, 0xf2, 0x78, 0xc2, 0xf5, 0xb7, 0x6c, 0xec, 0x04, 0x62, 0x36, 0xc8, + 0x35, 0x83, 0x85, 0x66, 0x10, 0xc4, 0x9c, 0x25, 0x7a, 0x30, 0x11, 0x13, 0x99, 0x06, 0x25, 0x1c, + 0x7f, 0xef, 0xf1, 0x06, 0xa6, 0x3c, 0xfa, 0x15, 0x00, 0x00, 0xff, 0xff, 0xa9, 0x25, 0x52, 0x75, + 0x0d, 0x06, 0x00, 0x00, +} diff --git a/polyglot-clients/gogrpc/conductor/worker.go b/polyglot-clients/gogrpc/conductor/worker.go new file mode 100644 index 0000000000..63a67fb8bd --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/worker.go @@ -0,0 +1,177 @@ +package conductor + +import ( + "context" + "fmt" + "os" + "runtime" + "sync" + "sync/atomic" + "time" + + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" + "github.com/netflix/conductor/client/gogrpc/conductor/model" +) + +// An Executor is a struct that executes the logic required to resolve +// a task. Each Worker instance uses an Executor to run the polled tasks. +type Executor interface { + // Execute attempt to resolve the given Task and returns a TaskResult + // with its output. The given Context carries a deadline which must be + // enforced by the implementation. + // This function will be called by the Worker for each incoming Task, + // and must be threadsafe as it can be called by several goroutines + // concurrently. + Execute(context.Context, *model.Task) (*model.TaskResult, error) + + // ConnectionError is called by a Worker whenever there's an error with + // a GRPC connection. The GRPC error is passed in as its only argument. + // If this function returns nil, the Worker will continue retrying the + // connection; if it returns a non-nill error, the Worker will stop its + // execution and return the given error as the result of the Worker.Run + // function. + ConnectionError(error) error +} + +// A Worker uses a TaskClient to poll the Conductor server for new tasks and +// executes them using an Executor instance, returning the result of the task +// to the upstream server. +// The Worker struct must be created manually with the desired settings, and then +// ran with Worker.Run. +// Client implementations usually run a single Worker per process, or one worker per Task Type +// if a process needs to execute tasks of different types. The Concurrency +// field allows the worker to execute tasks concurrently in several goroutines. +type Worker struct { + // TaskType is the identifier for the type of tasks that this worker can + // execute. This will be send to Conductor when polling for new tasks. + TaskType string + + // TaskTimeout is the total duration that a task will be executed for. This + // includes the time required to poll, execute and return the task's results. + // If not set, tasks will not timeout. + TaskTimeout time.Duration + + // Identifier is an unique identifier for this worker. If not set, it defaults + // to the local hostname. + Identifier string + + // Concurrency is the amount of goroutines that wil poll for tasks and execute + // them concurrently. If not set, it defaults to GOMAXPROCS, a sensible default. + Concurrency int + + // Executor is an instance of an Executor that will actually run the logic required + // for each task. See conductor.Executor. + Executor Executor + + // Client is an instance of a conductor.Client that implements a Task service. + // See conductor.Client + Client TasksClient + + waitThreads sync.WaitGroup + active int32 // atomic + shutdown chan struct{} + shutdownFlag sync.Once + result error +} + +// Run executes the main loop of the Worker, spawning several gorutines to poll and +// resolve tasks from a Conductor server. +// This is a blocking call that will not return until Worker.Shutdown is called from +// another goroutine. When shutting down cleanly, this function returns nil; otherwise +// an error is returned if there's been a problem with the GRPC connection and the Worker +// cannot continue running. +func (worker *Worker) Run() error { + if worker.TaskType == "" { + return fmt.Errorf("conductor: missing field 'TaskType'") + } + if worker.Executor == nil { + return fmt.Errorf("conductor: missing field 'Executor'") + } + if worker.Client == nil { + return fmt.Errorf("conductor: missing field 'Client'") + } + if worker.Identifier == "" { + hostname, err := os.Hostname() + if err != nil { + return err + } + worker.Identifier = fmt.Sprintf("%s (conductor-go)", hostname) + } + if worker.Concurrency == 0 { + worker.Concurrency = runtime.GOMAXPROCS(0) + } + + worker.active = 0 + worker.result = nil + worker.shutdown = make(chan struct{}) + worker.waitThreads.Add(worker.Concurrency) + + for i := 0; i < worker.Concurrency; i++ { + go worker.thread() + } + + worker.waitThreads.Wait() + return worker.result +} + +// Shutdown stops this worker gracefully. This function is thread-safe and may +// be called from any goroutine. Only the first call to Shutdown will have +// an effect. +func (worker *Worker) Shutdown() { + worker.shutdownOnce(nil) +} + +func (worker *Worker) shutdownOnce(err error) { + worker.shutdownFlag.Do(func() { + worker.result = err + close(worker.shutdown) + worker.waitThreads.Wait() + worker.Client.Shutdown() + }) +} + +func (worker *Worker) onError(err error) { + userErr := worker.Executor.ConnectionError(err) + if userErr != nil { + worker.shutdownOnce(userErr) + } +} + +func (worker *Worker) runTask(req *tasks.PollRequest) error { + ctx, cancel := context.WithTimeout(context.Background(), worker.TaskTimeout) + defer cancel() + + task, err := worker.Client.Tasks().Poll(ctx, req) + if err != nil { + return err + } + + result, err := worker.Executor.Execute(ctx, task.Task) + // TODO: what if the task failed? + if err == nil { + request := tasks.UpdateTaskRequest{Result: result} + _, err := worker.Client.Tasks().UpdateTask(context.Background(), &request) + if err != nil { + return err + } + } + return nil +} + +func (worker *Worker) thread() { + defer worker.waitThreads.Done() + + pollRequest := &tasks.PollRequest{ + TaskType: worker.TaskType, + WorkerId: worker.Identifier, + } + + for range worker.shutdown { + atomic.AddInt32(&worker.active, 1) + err := worker.runTask(pollRequest) + if err != nil { + worker.onError(err) + } + atomic.AddInt32(&worker.active, -1) + } +} diff --git a/polyglot-clients/gogrpc/conductor/worker_test.go b/polyglot-clients/gogrpc/conductor/worker_test.go new file mode 100644 index 0000000000..39e6416f2b --- /dev/null +++ b/polyglot-clients/gogrpc/conductor/worker_test.go @@ -0,0 +1,197 @@ +package conductor + +import ( + "context" + "flag" + "fmt" + "io" + "math/rand" + "sync" + "testing" + "time" + + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" + + "github.com/netflix/conductor/client/gogrpc/conductor/model" + "google.golang.org/grpc" + + "github.com/stretchr/testify/assert" +) + +var doTrace = flag.Bool("dotrace", false, "print tracing information") + +func trace(format string, args ...interface{}) { + if *doTrace { + fmt.Printf(format, args...) + } +} + +type fakeTaskService struct { + latency time.Duration + shutdown chan struct{} + mu sync.Mutex + completed map[string]bool + result error +} + +func randomTaskID() string { + return fmt.Sprintf("task-%08x", rand.Int63()) +} + +var ErrNotImplemented = fmt.Errorf("API call not implemented") + +func (s *fakeTaskService) newTask(req *tasks.PollRequest) (*model.Task, error) { + id := randomTaskID() + + s.mu.Lock() + s.completed[id] = false + s.mu.Unlock() + + return &model.Task{ + TaskType: req.GetTaskType(), + Status: model.Task_SCHEDULED, + TaskId: id, + }, nil +} + +func (s *fakeTaskService) updateTask(res *model.TaskResult) (*tasks.UpdateTaskResponse, error) { + id := res.GetTaskId() + + s.mu.Lock() + if _, found := s.completed[id]; !found { + panic("missing task: " + id) + } + s.completed[id] = true + s.mu.Unlock() + + return &tasks.UpdateTaskResponse{ + TaskId: id, + }, nil +} + +func (s *fakeTaskService) Poll(ctx context.Context, in *tasks.PollRequest, opts ...grpc.CallOption) (*tasks.PollResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) BatchPoll(context.Context, *tasks.BatchPollRequest, ...grpc.CallOption) (tasks.TaskService_BatchPollClient, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetPendingTaskForWorkflow(context.Context, *tasks.PendingTaskRequest, ...grpc.CallOption) (*tasks.PendingTaskResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetTasksInProgress(ctx context.Context, in *tasks.TasksInProgressRequest, opts ...grpc.CallOption) (*tasks.TasksInProgressResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) UpdateTask(ctx context.Context, in *tasks.UpdateTaskRequest, opts ...grpc.CallOption) (*tasks.UpdateTaskResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) AckTask(ctx context.Context, in *tasks.AckTaskRequest, opts ...grpc.CallOption) (*tasks.AckTaskResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) AddLog(ctx context.Context, in *tasks.AddLogRequest, opts ...grpc.CallOption) (*tasks.AddLogResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetQueueAllInfo(ctx context.Context, in *tasks.QueueAllInfoRequest, opts ...grpc.CallOption) (*tasks.QueueAllInfoResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetQueueInfo(ctx context.Context, in *tasks.QueueInfoRequest, opts ...grpc.CallOption) (*tasks.QueueInfoResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetTaskLogs(ctx context.Context, in *tasks.GetTaskLogsRequest, opts ...grpc.CallOption) (*tasks.GetTaskLogsResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetTask(ctx context.Context, in *tasks.GetTaskRequest, opts ...grpc.CallOption) (*tasks.GetTaskResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) RemoveTaskFromQueue(ctx context.Context, in *tasks.RemoveTaskRequest, opts ...grpc.CallOption) (*tasks.RemoveTaskResponse, error) { + return nil, ErrNotImplemented +} + +func (s *fakeTaskService) GetQueueSizesForTasks(ctx context.Context, in *tasks.QueueSizesRequest, opts ...grpc.CallOption) (*tasks.QueueSizesResponse, error) { + return nil, ErrNotImplemented +} + + +type fakeTaskClient struct { + tasks *fakeTaskService +} + +func (c *fakeTaskClient) Tasks() tasks.TaskServiceClient { + return c.tasks +} + +func (c *fakeTaskClient) forceShutdown(err error) { + c.tasks.result = err + close(c.tasks.shutdown) +} + +func (c *fakeTaskClient) Shutdown() { + c.tasks.result = io.EOF + close(c.tasks.shutdown) +} + +func newFakeTaskClient(latency time.Duration) *fakeTaskClient { + return &fakeTaskClient{ + tasks: &fakeTaskService{ + shutdown: make(chan struct{}), + latency: latency, + }, + } +} + +type slowExecutor struct { + mu sync.Mutex + recv []*model.Task + delay time.Duration +} + +func (exe *slowExecutor) Execute(ctx context.Context, m *model.Task) (*model.TaskResult, error) { + exe.mu.Lock() + exe.recv = append(exe.recv, m) + exe.mu.Unlock() + + time.Sleep(exe.delay) + return &model.TaskResult{ + TaskId: m.GetTaskId(), + Status: model.TaskResult_COMPLETED, + }, nil +} + +func (exe *slowExecutor) ConnectionError(err error) error { + panic(err) +} + +func TestWorkerInterface(t *testing.T) { + mock := newFakeTaskClient(200 * time.Millisecond) + exec := &slowExecutor{ + delay: 100 * time.Millisecond, + } + + worker := &Worker{ + TaskType: "fake-task", + Concurrency: 4, + Executor: exec, + Client: mock, + } + + time.AfterFunc(1*time.Second, func() { + worker.Shutdown() + }) + + assert.NoError(t, worker.Run()) + + for id, completed := range mock.tasks.completed { + assert.Truef(t, completed, "task %s was not reported as completed", id) + } + assert.Equal(t, len(mock.tasks.completed), len(exec.recv)) +} diff --git a/polyglot-clients/gogrpc/go.mod b/polyglot-clients/gogrpc/go.mod new file mode 100644 index 0000000000..ff2ebf1cfb --- /dev/null +++ b/polyglot-clients/gogrpc/go.mod @@ -0,0 +1,6 @@ +module github.com/netflix/conductor/client/gogrpc + +require ( + github.com/stretchr/testify v1.2.1 + google.golang.org/grpc v1.15.0 +) diff --git a/polyglot-clients/gogrpc/go.sum b/polyglot-clients/gogrpc/go.sum new file mode 100644 index 0000000000..5db68e5cb1 --- /dev/null +++ b/polyglot-clients/gogrpc/go.sum @@ -0,0 +1,21 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/golang/protobuf v1.1.0 h1:0iH4Ffd/meGoXqF2lSAhZHt8X+cPgkfn/cb6Cce5Vpc= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/netflix/conductor v1.11.5 h1:HxVBurFWxFtz56wepyiXFkLunRISlXTKfBlkuAnXPd0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/square/goprotowrap v0.0.0-20180504135057-6f414ea4a80c h1:iIIx5xujWT8vyoW+umx0/JSpNvZQgQUv9krqlN2AOTA= +github.com/square/goprotowrap v0.0.0-20180504135057-6f414ea4a80c/go.mod h1:ss+tcSDAsyytwf1fIIsDTBbLS5uMvktdl8DvEZwELx4= +github.com/stretchr/testify v1.2.1 h1:52QO5WkIUcHGIR7EnGagH88x1bUzqGXTC5/1bDTUQ7U= +github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d h1:g9qWBGx4puODJTMVyoPrpoxPFgVGd+z1DZwjfRu4d0I= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33 h1:I6FyU15t786LL7oL/hn43zqTuEGr4PN7F4XJ1p4E3Y8= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +google.golang.org/genproto v0.0.0-20180831171423-11092d34479b h1:lohp5blsw53GBXtLyLNaTXPXS9pJ1tiTw61ZHUoE9Qw= +google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/grpc v1.14.0 h1:ArxJuB1NWfPY6r9Gp9gqwplT0Ge7nqv9msgu03lHLmo= +google.golang.org/grpc v1.14.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= diff --git a/polyglot-clients/gogrpc/tools.go b/polyglot-clients/gogrpc/tools.go new file mode 100644 index 0000000000..5b4c8025ad --- /dev/null +++ b/polyglot-clients/gogrpc/tools.go @@ -0,0 +1,9 @@ +// +build tools + +package tools + +import ( + _ "github.com/golang/protobuf/protoc-gen-go" + _ "github.com/square/goprotowrap/cmd/protowrap" + _ "github.com/kazegusuri/grpcurl" +) diff --git a/polyglot-clients/python/.gitignore b/polyglot-clients/python/.gitignore new file mode 100644 index 0000000000..3ce1d246a4 --- /dev/null +++ b/polyglot-clients/python/.gitignore @@ -0,0 +1 @@ +.pytest_cache diff --git a/polyglot-clients/python/README.md b/polyglot-clients/python/README.md new file mode 100644 index 0000000000..da64aeac37 --- /dev/null +++ b/polyglot-clients/python/README.md @@ -0,0 +1,68 @@ +# Python client for Conductor +Python client for Conductor provides two sets of functions: + +1. Workflow management APIs (start, terminate, get workflow status etc.) +2. Worker execution framework + +## Install + +```Using virtualenv + virtualenv conductorclient + source conductorclient/bin/activate + cd ../conductor/client/python + python setup.py install +``` + +## Using Workflow Management API +Python class ```WorkflowClient``` provides client API calls to the conductor server to start manage the workflows. + +### Example + +```python +import sys +from conductor import conductor +import json + +def getStatus(workflowId): + + workflowClient = conductor.WorkflowClient('http://localhost:8080/api') + + workflow_json = workflowClient.getWorkflow(workflowId) + print json.dumps(workflow_json, indent=True, separators=(',', ': ')) + + return workflow_json + +``` + +## Task Worker Execution +Task worker execution APIs facilitates execution of a task worker using python client. +The API provides necessary mechanism to poll for task work at regular interval and executing the python worker in a separate threads. + +### Example +The following python script demonstrates workers for the kitchensink workflow. + +```python +from __future__ import print_function +from conductor.ConductorWorker import ConductorWorker + +def execute(task): + return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0}, 'logs': ['one', 'two']} + +def execute4(task): + forkTasks = [{"name": "task_1", "taskReferenceName": "task_1_1", "type": "SIMPLE"},{"name": "sub_workflow_4", "taskReferenceName": "wf_dyn", "type": "SUB_WORKFLOW", "subWorkflowParam": {"name": "sub_flow_1"}}]; + input = {'task_1_1': {}, 'wf_dyn': {}} + return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0, 'dynamicTasks': forkTasks, 'inputs': input}, 'logs': ['one','two']} + +def main(): + print('Starting Kitchensink workflows') + cc = ConductorWorker('http://localhost:8080/api', 1, 0.1) + for x in range(1, 30): + if(x == 4): + cc.start('task_{0}'.format(x), execute4, False) + else: + cc.start('task_{0}'.format(x), execute, False) + cc.start('task_30', execute, True) + +if __name__ == '__main__': + main() +``` diff --git a/polyglot-clients/python/conductor/ConductorWorker.py b/polyglot-clients/python/conductor/ConductorWorker.py new file mode 100644 index 0000000000..6bae62ea11 --- /dev/null +++ b/polyglot-clients/python/conductor/ConductorWorker.py @@ -0,0 +1,182 @@ +# +# Copyright 2017 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import print_function, absolute_import +import sys +import time +from conductor.conductor import WFClientMgr +from threading import Thread +import socket +from enum import Enum + +hostname = socket.gethostname() + +class TaskStatus(Enum): + IN_PROGRESS = 'IN_PROGRESS' + FAILED = 'FAILED' + FAILED_WITH_TERMINAL_ERROR = 'FAILED_WITH_TERMINAL_ERROR' + COMPLETED = 'COMPLETED' + + def __str__(self): + return str(self.value) + + + +class ConductorWorker: + """ + Main class for implementing Conductor Workers + + A conductor worker is a separate system that executes the various + tasks that the conductor server queues up for execution. The worker + can run on the same instance as the server or on a remote instance. + + The worker generally provides a wrapper around some function that + performs the actual execution of the task. The function that is + being executed must return a `dict` with the `status`, `output` and + `log` keys. If these keys are not present, the worker will raise an + Exception after completion of the task. + + The start method is used to begin continous polling and execution + of the tasks that the conductor server makes available. The same + script can run multiple workers using the wait argument. For more + details, view the start method + """ + def __init__(self, server_url, thread_count, polling_interval, worker_id=None): + """ + Parameters + ---------- + server_url: str + The url to the server hosting the conductor api. + Ex: 'http://localhost:8080/api' + thread_count: int + The number of threads that will be polling for and + executing tasks in case of using the start method. + polling_interval: float + The number of seconds that each worker thread will wait + between polls to the conductor server. + worker_id: str, optional + The worker_id of the worker that is going to execute the + task. For further details, refer to the documentation + By default, it is set to hostname of the machine + """ + wfcMgr = WFClientMgr(server_url) + self.workflowClient = wfcMgr.workflowClient + self.taskClient = wfcMgr.taskClient + self.thread_count = thread_count + self.polling_interval = polling_interval + self.worker_id = worker_id or hostname + + @staticmethod + def task_result(status: TaskStatus, output=None, logs=None, reasonForIncompletion=None): + """ + Get task result + Parameters + ---------- + status: TaskStatus + The status of the task + Ex: TaskStatus.COMPLETED + output: dict + results of task processing + logs: list + log list + reasonForIncompletion: str, optional + the reason for not completing the task if any + """ + if logs is None: + logs = [] + if output is None: + output = {} + ret = { + 'status': status.__str__(), + 'output': output, + 'logs': logs + } + if reasonForIncompletion: + ret['reasonForIncompletion'] = reasonForIncompletion + return ret + + def execute(self, task, exec_function): + try: + resp = exec_function(task) + if type(resp) is not dict or not all(key in resp for key in ('status', 'output', 'logs')): + raise Exception('Task execution function MUST return a response as a dict with status, output and logs fields') + task['status'] = resp['status'] + task['outputData'] = resp['output'] + task['logs'] = resp['logs'] + if 'reasonForIncompletion' in resp: + task['reasonForIncompletion'] = resp['reasonForIncompletion'] + self.taskClient.updateTask(task) + except Exception as err: + print(f'Error executing task: {exec_function.__name__} with error: {str(err)}') + task['status'] = 'FAILED' + self.taskClient.updateTask(task) + + def poll_and_execute(self, taskType, exec_function, domain=None): + while True: + time.sleep(float(self.polling_interval)) + polled = self.taskClient.pollForTask(taskType, self.worker_id, domain) + if polled is not None: + self.taskClient.ackTask(polled['taskId'], self.worker_id) + self.execute(polled, exec_function) + + def start(self, taskType, exec_function, wait, domain=None): + """ + start begins the continuous polling of the conductor server + + Parameters + ---------- + taskType: str + The name of the task that the worker is looking to execute + exec_function: function + The function that the worker will execute. The function + must return a dict with the `status`, `output` and `logs` + keys present. If this is not present, an Exception will be + raised + wait: bool + Whether the worker will block execution of further code. + Since the workers are being run in daemon threads, when the + program completes execution, all the threads are destroyed. + Setting wait to True prevents the program from ending. + If multiple workers are being called from the same program, + all but the last start call but have wait set to False. + The last start call must always set wait to True. If a + single worker is being called, set wait to True. + domain: str, optional + The domain of the task under which the worker will run. For + further details refer to the conductor server documentation + By default, it is set to None + """ + print('Polling for task %s at a %f ms interval with %d threads for task execution, with worker id as %s' % (taskType, self.polling_interval * 1000, self.thread_count, self.worker_id)) + for x in range(0, int(self.thread_count)): + thread = Thread(target=self.poll_and_execute, args=(taskType, exec_function, domain,)) + thread.daemon = True + thread.start() + if wait: + while 1: + time.sleep(1) + + +def exc(taskType, inputData, startTime, retryCount, status, callbackAfterSeconds, pollCount): + print('Executing the function') + return {'status': 'COMPLETED', 'output': {}, 'logs': []} + + +def main(): + cc = ConductorWorker('http://localhost:8080/api', 5, 0.1) + cc.start(sys.argv[1], exc, False) + cc.start(sys.argv[2], exc, True) + + +if __name__ == '__main__': + main() diff --git a/polyglot-clients/python/conductor/__init__.py b/polyglot-clients/python/conductor/__init__.py new file mode 100644 index 0000000000..137d70b6b0 --- /dev/null +++ b/polyglot-clients/python/conductor/__init__.py @@ -0,0 +1,19 @@ +# +# Copyright 2017 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = '1.0.0' +VERSION = tuple(map(int, __version__.split('.'))) + +__all__ = ['conductor','ConductorWorker'] diff --git a/polyglot-clients/python/conductor/conductor.py b/polyglot-clients/python/conductor/conductor.py new file mode 100644 index 0000000000..603e9d06f2 --- /dev/null +++ b/polyglot-clients/python/conductor/conductor.py @@ -0,0 +1,381 @@ +# +# Copyright 2017 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import print_function +import requests +import json +import sys +import socket +import warnings + + +hostname = socket.gethostname() + + +class BaseClient(object): + printUrl = False + headers = {'Content-Type': 'application/json', 'Accept': 'application/json'} + + def __init__(self, baseURL, baseResource): + self.baseURL = baseURL + self.baseResource = baseResource + + def get(self, resPath, queryParams=None): + theUrl = "{}/{}".format(self.baseURL, resPath) + resp = requests.get(theUrl, params=queryParams) + self.__checkForSuccess(resp) + if(resp.content == b''): + return None + else: + return resp.json() + + def post(self, resPath, queryParams, body, headers=None): + theUrl = "{}/{}".format(self.baseURL, resPath) + theHeader = self.headers + if headers is not None: + theHeader = self.mergeTwoDicts(self.headers, headers) + if body is not None: + jsonBody = json.dumps(body, ensure_ascii=False).encode('utf8') + resp = requests.post(theUrl, params=queryParams, data=jsonBody, headers=theHeader) + else: + resp = requests.post(theUrl, params=queryParams, headers=theHeader) + + self.__checkForSuccess(resp) + return self.__return(resp, theHeader) + + def put(self, resPath, queryParams=None, body=None, headers=None): + theUrl = "{}/{}".format(self.baseURL, resPath) + theHeader = self.headers + if headers is not None: + theHeader = self.mergeTwoDicts(self.headers, headers) + + if body is not None: + jsonBody = json.dumps(body, ensure_ascii=False).encode('utf8') + resp = requests.put(theUrl, params=queryParams, data=jsonBody, headers=theHeader) + else: + resp = requests.put(theUrl, params=queryParams, headers=theHeader) + + self.__print(resp) + self.__checkForSuccess(resp) + + def delete(self, resPath, queryParams): + theUrl = "{}/{}".format(self.baseURL, resPath) + resp = requests.delete(theUrl, params=queryParams) + self.__print(resp) + self.__checkForSuccess(resp) + + def makeUrl(self, urlformat=None, *argv): + url = self.baseResource + '/' + if urlformat: + url += urlformat.format(*argv) + return url + + def makeParams(self, **kwargs): + return dict((k, v) for k, v in kwargs.items() if v is not None) or None + + def mergeTwoDicts(self, x, y): + z = x.copy() + z.update(y) + return z + + def __print(self, resp): + if self.printUrl: + print(resp.url) + + def __return(self, resp, header): + retval = '' + if len(resp.text) > 0: + if header['Accept'] == 'text/plain': + retval = resp.text + elif header['Accept'] == 'application/json': + retval = resp.json() + else: + retval = resp.text + return retval + + def __checkForSuccess(self, resp): + try: + resp.raise_for_status() + except requests.HTTPError: + print("ERROR: " + resp.text) + raise + + +class MetadataClient(BaseClient): + BASE_RESOURCE = 'metadata' + + def __init__(self, baseURL): + BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) + + def getWorkflowDef(self, wfname, version=None): + url = self.makeUrl('workflow/{}', wfname) + return self.get(url, self.makeParams(version=version)) + + def createWorkflowDef(self, wfdObj): + url = self.makeUrl('workflow') + return self.post(url, None, wfdObj) + + def updateWorkflowDefs(self, listOfWfdObj): + url = self.makeUrl('workflow') + self.put(url, None, listOfWfdObj) + + def getAllWorkflowDefs(self): + url = self.makeUrl('workflow') + return self.get(url) + + def unRegisterWorkflowDef(self, wfname, version): + url = self.makeUrl("workflow/{name}/{version}".format(name=wfname, version=version)) + self.delete(url, None) + + def getTaskDef(self, tdName): + url = self.makeUrl('taskdefs/{}', tdName) + return self.get(url) + + def registerTaskDefs(self, listOfTaskDefObj): + url = self.makeUrl('taskdefs') + return self.post(url, None, listOfTaskDefObj) + + def registerTaskDef(self, taskDefObj): + """registerTaskDef is deprecated since PUT /metadata/taskdefs does not + register but updates a task definition. Use updateTaskDef function + instead. + """ + warnings.warn(self.registerTaskDef.__doc__, DeprecationWarning) + url = self.makeUrl('taskdefs') + self.put(url, None, taskDefObj) + + def updateTaskDef(self, taskDefObj): + url = self.makeUrl('taskdefs') + self.put(url, None, taskDefObj) + + def unRegisterTaskDef(self, tdName, reason=None): + url = self.makeUrl('taskdefs/{}', tdName) + self.delete(url, self.makeParams(reason=reason)) + + def getAllTaskDefs(self): + url = self.makeUrl('taskdefs') + return self.get(url) + + +class TaskClient(BaseClient): + BASE_RESOURCE = 'tasks' + + def __init__(self, baseURL): + BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) + + def getTask(self, taskId): + url = self.makeUrl('{}', taskId) + return self.get(url) + + def updateTask(self, taskObj): + url = self.makeUrl('') + headers = {'Accept': 'text/plain'} + self.post(url, None, taskObj, headers) + + def pollForTask(self, taskType, workerid, domain=None): + url = self.makeUrl('poll/{}', taskType) + params = {} + params['workerid'] = workerid + if domain is not None: + params['domain'] = domain + + try: + return self.get(url, params) + except Exception as err: + print('Error while polling ' + str(err)) + return None + + def pollForBatch(self, taskType, count, timeout, workerid, domain=None): + url = self.makeUrl('poll/batch/{}', taskType) + params = {} + params['workerid'] = workerid + params['count'] = count + params['timeout'] = timeout + + if domain is not None: + params['domain'] = domain + + try: + return self.get(url, params) + except Exception as err: + print('Error while polling ' + str(err)) + return None + + def ackTask(self, taskId, workerid): + url = self.makeUrl('{}/ack', taskId) + params = {} + params['workerid'] = workerid + headers = {'Accept': 'application/json'} + value = self.post(url, params, None, headers) + return value in ['true', True] + + def getTasksInQueue(self, taskName): + url = self.makeUrl('queue/{}', taskName) + return self.get(url) + + def removeTaskFromQueue(self, taskId, reason=None): + url = self.makeUrl('queue/{}', taskId) + params = {} + params['reason'] = reason + self.delete(url, params) + + def getTaskQueueSizes(self, listOfTaskName): + url = self.makeUrl('queue/sizes') + return self.post(url, None, listOfTaskName) + + +class WorkflowClient(BaseClient): + BASE_RESOURCE = 'workflow' + + def __init__(self, baseURL): + BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) + + def getWorkflow(self, wfId, includeTasks=True): + url = self.makeUrl('{}', wfId) + params = {} + params['includeTasks'] = includeTasks + return self.get(url, params) + + def getRunningWorkflows(self, wfName, version=None, startTime=None, endTime=None): + url = self.makeUrl('running/{}', wfName) + params = {} + params['version'] = version + params['startTime'] = startTime + params['endTime'] = endTime + return self.get(url, params) + + def startWorkflow(self, wfName, inputjson, version=None, correlationId=None): + url = self.makeUrl('{}', wfName) + params = {} + params['version'] = version + params['correlationId'] = correlationId + headers = {'Accept': 'text/plain'} + return self.post(url, params, inputjson, headers) + + def terminateWorkflow(self, wfId, reason=None): + url = self.makeUrl('{}', wfId) + params = {} + params['reason'] = reason + self.delete(url, params) + + def removeWorkflow(self, wfId, archiveWorkflow, reason=None): + url = self.makeUrl('{}/remove', wfId) + self.delete(url, self.makeParams(archiveWorkflow=archiveWorkflow, reason=reason)) + + def pauseWorkflow(self, wfId): + url = self.makeUrl('{}/pause', wfId) + self.put(url) + + def resumeWorkflow(self, wfId): + url = self.makeUrl('{}/resume', wfId) + self.put(url) + + def skipTaskFromWorkflow(self, wfId, taskRefName, skipTaskRequest): + url = self.makeUrl('{}/skiptask/{}', wfId, taskRefName) + self.post(url, None, skipTaskRequest) + + def rerunWorkflow(self, wfId, taskRefName, rerunWorkflowRequest): + url = self.makeUrl('{}/rerun', wfId) + return self.post(url, None, rerunWorkflowRequest) + + def restartWorkflow(self, wfId, taskRefName, fromTaskRef): + url = self.makeUrl('{}/restart', wfId) + params = {} + params['from'] = fromTaskRef + self.post(url, params, None) + +class EventServicesClient(BaseClient): + BASE_RESOURCE = 'event' + + def __init__(self, baseURL): + BaseClient.__init__(self, baseURL, self.BASE_RESOURCE) + + def getEventHandlerDef(self, event, activeOnly=True): + url = self.makeUrl('{}', event) + params = {} + params['activeOnly'] = activeOnly + return self.get(url, params) + + def getEventHandlerDefs(self): + url = self.makeUrl() + return self.get(url) + + def createEventHandlerDef(self, ehObj): + url = self.makeUrl() + return self.post(url, None, ehObj) + + def updateEventHandlerDef(self, ehObj): + url = self.makeUrl() + return self.put(url, None, ehObj) + + def removeEventHandler(self, ehName): + url = self.makeUrl('{}', ehName) + self.delete(url, {}) + + def getEventHandlerQueues(self): + url = self.makeUrl('queues') + return self.get(url) + + def getEventHandlerQueuesProviders(self): + url = self.makeUrl('queues/providers') + return self.get(url) + +class WFClientMgr: + def __init__(self, server_url='http://localhost:8080/api/'): + self.workflowClient = WorkflowClient(server_url) + self.taskClient = TaskClient(server_url) + self.metadataClient = MetadataClient(server_url) + + +def main(): + if len(sys.argv) < 3: + print("Usage - python conductor server_url command parameters...") + return None + + server_url = sys.argv[1] + command = sys.argv[2] + wfcMgr = WFClientMgr(server_url) + wfc = wfcMgr.workflowClient + if command == 'start': + if len(sys.argv) < 7: + print('python conductor server_url start workflow_name input_json [version] [correlationId]') + return None + wfName = sys.argv[3] + input = json.loads(sys.argv[5]) + correlationId = sys.argv[6] + workflowId = wfc.startWorkflow(wfName, input, 1, correlationId) + print(workflowId) + return workflowId + elif command == 'get': + if len(sys.argv) < 4: + print('python conductor server_url get workflow_id') + return None + wfId = sys.argv[3] + wfjson = wfc.getWorkflow(wfId) + print(json.dumps(wfjson, indent=True, separators=(',', ': '))) + return wfjson + elif command == 'terminate': + if len(sys.argv) < 4: + print('python conductor server_url terminate workflow_id') + return None + wfId = sys.argv[3] + wfc.terminateWorkflow(wfId) + print('OK') + return wfId + + +if __name__ == '__main__': + main() + diff --git a/polyglot-clients/python/conductor_shell.py b/polyglot-clients/python/conductor_shell.py new file mode 100644 index 0000000000..33e0673fe2 --- /dev/null +++ b/polyglot-clients/python/conductor_shell.py @@ -0,0 +1,56 @@ +# +# Copyright 2017 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import print_function +import sys +from conductor import conductor +import json + + +def main(): + if(len(sys.argv) < 3): + print("Usage - python conductor server_url command parameters...") + return None + + wfc = conductor.WorkflowClient(sys.argv[1]) + command = sys.argv[2] + if command == 'start': + if len(sys.argv) < 5: + print('python conductor server_url start workflow_name input_json [version] [correlationId]') + return None + wfName = sys.argv[3] + input = json.loads(sys.argv[4]) + workflowId = wfc.startWorkflow(wfName, input, 1, None) + print(workflowId) + return workflowId + elif command == 'get': + if len(sys.argv) < 4: + print('python conductor server_url get workflow_id') + return None + wfId = sys.argv[3] + wfjson = wfc.getWorkflow(wfId) + print(json.dumps(wfjson, indent=True, separators=(',', ': '))) + return wfjson + elif command == 'terminate': + if len(sys.argv) < 4: + print('python conductor server_url terminate workflow_id') + return None + wfId = sys.argv[3] + wfjson = wfc.terminateWorkflow(wfId) + print('OK') + return wfId + +if __name__ == '__main__': + main() diff --git a/polyglot-clients/python/kitchensink_workers.py b/polyglot-clients/python/kitchensink_workers.py new file mode 100644 index 0000000000..31e95072e8 --- /dev/null +++ b/polyglot-clients/python/kitchensink_workers.py @@ -0,0 +1,27 @@ +from __future__ import print_function +from conductor.ConductorWorker import ConductorWorker,TaskStatus + +def execute(task): + return ConductorWorker.task_result( + status=TaskStatus.COMPLETED, + output= {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0}, + logs=['one','two'] + ) + +def execute4(task): + forkTasks = [{"name": "task_1", "taskReferenceName": "task_1_1", "type": "SIMPLE"},{"name": "sub_workflow_4", "taskReferenceName": "wf_dyn", "type": "SUB_WORKFLOW", "subWorkflowParam": {"name": "sub_flow_1"}}]; + input = {'task_1_1': {}, 'wf_dyn': {}} + return {'status': 'COMPLETED', 'output': {'mod': 5, 'taskToExecute': 'task_1', 'oddEven': 0, 'dynamicTasks': forkTasks, 'inputs': input}, 'logs': ['one','two']} + +def main(): + print('Starting Kitchensink workflows') + cc = ConductorWorker('http://localhost:8080/api', 1, 0.1) + for x in range(1, 30): + if(x == 4): + cc.start('task_{0}'.format(x), execute4, False) + else: + cc.start('task_{0}'.format(x), execute, False) + cc.start('task_30', execute, True) + +if __name__ == '__main__': + main() diff --git a/polyglot-clients/python/setup.cfg b/polyglot-clients/python/setup.cfg new file mode 100644 index 0000000000..b88034e414 --- /dev/null +++ b/polyglot-clients/python/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +description-file = README.md diff --git a/polyglot-clients/python/setup.py b/polyglot-clients/python/setup.py new file mode 100644 index 0000000000..053d27bede --- /dev/null +++ b/polyglot-clients/python/setup.py @@ -0,0 +1,45 @@ +# +# Copyright 2017 Netflix, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from setuptools import setup + +setup( + name = 'conductor', + packages = ['conductor'], # this must be the same as the name above + version = '1.0.0', + description = 'Conductor python client', + author = 'Viren Baraiya', + author_email = 'vbaraiya@netflix.com', + url = 'https://github.com/netflix/conductor', + download_url = 'https://github.com/Netflix/conductor/releases', + keywords = ['conductor'], + license = 'Apache 2.0', + install_requires = [ + 'requests', + ], + classifiers = [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'Intended Audience :: System Administrators', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python :: 2.7', + 'Topic :: Workflow', + 'Topic :: Microservices', + 'Topic :: Orchestration', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + 'Topic :: System :: Networking' + ], +) diff --git a/polyglot-clients/python/test_conductor.py b/polyglot-clients/python/test_conductor.py new file mode 100644 index 0000000000..0130f86557 --- /dev/null +++ b/polyglot-clients/python/test_conductor.py @@ -0,0 +1,89 @@ +import threading +import mock +import json +from conductor.conductor import TaskClient +from conductor.ConductorWorker import ConductorWorker + + +@mock.patch('requests.get') +def test_pollForTask(requests_get): + task_client = TaskClient('base') + task_client.pollForTask('fooType', 'barWorker') + requests_get.assert_called_with('base/tasks/poll/fooType', params={'workerid': 'barWorker'}) + + task_client.pollForTask('fooType', 'barWorker', 'bazDomain') + requests_get.assert_called_with('base/tasks/poll/fooType', + params={'workerid': 'barWorker', 'domain': 'bazDomain'}) + + +@mock.patch('requests.get') +def test_pollForBatch(requests_get): + task_client = TaskClient('base') + task_client.pollForBatch('fooType', 20, 100, 'barWorker') + requests_get.assert_called_with( + 'base/tasks/poll/batch/fooType', + params={'workerid': 'barWorker', 'count': 20, 'timeout': 100}) + + task_client.pollForBatch('fooType', 20, 100, 'barWorker', 'a_domain') + requests_get.assert_called_with( + 'base/tasks/poll/batch/fooType', + params={'workerid': 'barWorker', 'count': 20, 'timeout': 100, 'domain': 'a_domain'}) + + +@mock.patch('requests.post') +def test_ackTask(requests_post): + task_client = TaskClient('base') + task_client.ackTask('42', 'myWorker') + requests_post.assert_called_with( + 'base/tasks/42/ack', + headers={'Content-Type': 'application/json', 'Accept': 'application/json'}, + params={'workerid': 'myWorker'}) + + +@mock.patch('requests.post') +def test_updateTask(post): + task_client = TaskClient('base') + task_obj = {'task_id': '123', 'result': 'fail'} + task_client.updateTask(task_obj) + post.assert_called_with( + 'base/tasks/', + data=json.dumps(task_obj), + headers={'Accept': 'application/json', 'Content-Type': 'application/json'}, params=None) + + +def test_conductor_worker(): + num_threads = 2 + worker = ConductorWorker('http://server_url', num_threads, 0.1, 'wid') + num_tasks = num_threads * 3 + id_range = range(123, 123 + num_tasks) + events = [threading.Event() for _ in id_range] + return_val = {'status': '', 'output': 'out', 'logs': []} + + tasks = [{'taskId': str(n)} for n in id_range] + + # output is named outputData in the resulting task + out_tasks = [{'status': '', 'outputData': 'out', 'logs': [], 'taskId': task['taskId']} for task in tasks] + + def exec_function(task): + assert task in tasks + tasks.remove(task) + for ev in events: + if not ev.is_set(): + ev.set() + break + return return_val + + # verify conductor worker call the appropriate method in TaskClient, acks the task, and updates the output + poll = mock.Mock() + ack = mock.Mock() + update = mock.Mock() + with mock.patch.multiple('conductor.conductor.TaskClient', pollForTask=poll, updateTask=update, ackTask=ack): + poll.side_effect = tasks + [None] * num_threads + worker.start('task_a', exec_function, False, 'my_domain') + for ev in events: + assert ev.wait(2) is True + + poll.assert_has_calls([mock.call('task_a', 'wid', 'my_domain')] * num_tasks) + ack.assert_has_calls([mock.call(str(i), 'wid') for i in id_range]) + update.assert_has_calls([mock.call(t) for t in out_tasks]) + diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java index 325e48887d..7159cfcd23 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresDataSourceProvider.java @@ -15,14 +15,15 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; -import java.nio.file.Paths; -import java.util.concurrent.ThreadFactory; -import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.configuration.FluentConfiguration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.sql.DataSource; +import java.nio.file.Paths; +import java.util.concurrent.ThreadFactory; + public class PostgresDataSourceProvider { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresDataSourceProvider.class); @@ -63,9 +64,9 @@ private HikariConfig createConfiguration() { hikariConfig.setAutoCommit(properties.isAutoCommit()); ThreadFactory tf = new ThreadFactoryBuilder() - .setDaemon(true) - .setNameFormat("hikari-postgres-%d") - .build(); + .setDaemon(true) + .setNameFormat("hikari-postgres-%d") + .build(); hikariConfig.setThreadFactory(tf); return hikariConfig; @@ -79,17 +80,16 @@ private void flywayMigrate(DataSource dataSource) { return; } - FluentConfiguration fluentConfiguration = Flyway.configure() - .locations(Paths.get("db", "migration_postgres").toString()) - .dataSource(dataSource) - .placeholderReplacement(false); + String flywayTable = properties.getFlywayTable(); + LOGGER.debug("Using Flyway migration table '{}'", flywayTable); - properties.getFlywayTable().ifPresent(tableName -> { - LOGGER.debug("Using Flyway migration table '{}'", tableName); - fluentConfiguration.table(tableName); - }); + FluentConfiguration fluentConfiguration = Flyway.configure() + .table(flywayTable) + .locations(Paths.get("db", "migration_postgres").toString()) + .dataSource(dataSource) + .placeholderReplacement(false); - Flyway flyway = new Flyway(fluentConfiguration); + Flyway flyway = fluentConfiguration.load(); flyway.migrate(); } } diff --git a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java index e9f19da88e..6d0dba6468 100644 --- a/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java +++ b/postgres-persistence/src/main/java/com/netflix/conductor/postgres/config/PostgresProperties.java @@ -12,12 +12,12 @@ */ package com.netflix.conductor.postgres.config; +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.boot.convert.DurationUnit; + import java.sql.Connection; import java.time.Duration; import java.time.temporal.ChronoUnit; -import java.util.Optional; -import org.springframework.boot.context.properties.ConfigurationProperties; -import org.springframework.boot.convert.DurationUnit; @ConfigurationProperties("conductor.postgres") public class PostgresProperties { @@ -45,7 +45,7 @@ public class PostgresProperties { /** * Used to override the default flyway migration table */ - private String flywayTable = null; + private String flywayTable = "schema_version"; // The defaults are currently in line with the HikariConfig defaults, which are unfortunately private. /** @@ -125,8 +125,8 @@ public void setFlywayEnabled(boolean flywayEnabled) { this.flywayEnabled = flywayEnabled; } - public Optional getFlywayTable() { - return Optional.ofNullable(flywayTable); + public String getFlywayTable() { + return flywayTable; } public void setFlywayTable(String flywayTable) { diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/performance/PerformanceTest.java b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/performance/PerformanceTest.java index b874cac7b1..01171be6ae 100644 --- a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/performance/PerformanceTest.java +++ b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/performance/PerformanceTest.java @@ -417,10 +417,13 @@ // } // // private void flywayMigrate(DataSource dataSource) { -// Flyway flyway = new Flyway(); -// flyway.setDataSource(dataSource); -// flyway.setPlaceholderReplacement(false); -// flyway.setLocations(Paths.get("db", "migration_postgres").toString()); +// FluentConfiguration flywayConfiguration = Flyway.configure() +// .table(configuration.getFlywayTable()) +// .locations(Paths.get("db","migration_postgres").toString()) +// .dataSource(dataSource) +// .placeholderReplacement(false); +// +// Flyway flyway = flywayConfiguration.load(); // try { // flyway.migrate(); // } catch (FlywayException e) { diff --git a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java index 49f6b2cb4a..20182aa042 100644 --- a/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java +++ b/postgres-persistence/src/test/java/com/netflix/conductor/postgres/util/PostgresDAOTestUtil.java @@ -12,18 +12,9 @@ */ package com.netflix.conductor.postgres.util; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.postgres.config.PostgresProperties; import com.zaxxer.hikari.HikariDataSource; -import java.nio.file.Paths; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.SQLException; -import java.time.Duration; -import javax.sql.DataSource; import org.flywaydb.core.Flyway; import org.flywaydb.core.api.configuration.FluentConfiguration; import org.postgresql.ds.PGSimpleDataSource; @@ -31,6 +22,16 @@ import org.slf4j.LoggerFactory; import org.testcontainers.containers.PostgreSQLContainer; +import javax.sql.DataSource; +import java.nio.file.Paths; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.time.Duration; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + public class PostgresDAOTestUtil { private static final Logger LOGGER = LoggerFactory.getLogger(PostgresDAOTestUtil.class); @@ -76,11 +77,12 @@ private HikariDataSource getDataSource(PostgresProperties properties) { private void flywayMigrate(DataSource dataSource) { FluentConfiguration fluentConfiguration = Flyway.configure() - .locations(Paths.get("db", "migration_postgres").toString()) - .dataSource(dataSource) - .placeholderReplacement(false); + .table("schema_version") + .locations(Paths.get("db", "migration_postgres").toString()) + .dataSource(dataSource) + .placeholderReplacement(false); - Flyway flyway = new Flyway(fluentConfiguration); + Flyway flyway = fluentConfiguration.load(); flyway.migrate(); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java index c6a2ce497c..249a85d7cc 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/DynomiteClusterConfiguration.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 Netflix, Inc. + * Copyright 2021 Netflix, Inc. *

* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at @@ -35,8 +35,9 @@ protected JedisCommands createJedisCommands(RedisProperties properties, Conducto .setSocketTimeout(0) .setConnectTimeout(0) .setMaxConnsPerHost( - properties.getMaxConnectionsPerHost() - ); + properties.getMaxConnectionsPerHost()) + .setMaxTimeoutWhenExhausted((int)properties.getMaxTimeoutWhenExhausted().toMillis()) + .setRetryPolicyFactory(properties.getConnectionRetryPolicy()); return new DynoJedisClient.Builder() .withHostSupplier(hostSupplier) diff --git a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java index a199970a1b..ae4280726d 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/redis/config/RedisProperties.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 Netflix, Inc. + * Copyright 2021 Netflix, Inc. *

* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at @@ -14,12 +14,16 @@ import com.netflix.conductor.core.config.ConductorProperties; import com.netflix.conductor.redis.dynoqueue.RedisQueuesShardingStrategyProvider; -import java.time.Duration; -import java.time.temporal.ChronoUnit; +import com.netflix.dyno.connectionpool.RetryPolicy.RetryPolicyFactory; +import com.netflix.dyno.connectionpool.impl.RetryNTimes; +import com.netflix.dyno.connectionpool.impl.RunOnce; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.convert.DurationUnit; +import java.time.Duration; +import java.time.temporal.ChronoUnit; + @ConfigurationProperties("conductor.redis") public class RedisProperties { @@ -71,6 +75,16 @@ public RedisProperties(ConductorProperties conductorProperties) { */ private int maxConnectionsPerHost = 10; + /** + * The maximum amount of time to wait for a connection to become available from the connection pool + */ + private Duration maxTimeoutWhenExhausted = Duration.ofMillis(800); + + /** + * The maximum retry attempts to use with this connection pool + */ + private int maxRetryAttempts = 0; + /** * The read connection port to be used for connecting to dyno-queues */ @@ -157,6 +171,22 @@ public void setMaxConnectionsPerHost(int maxConnectionsPerHost) { this.maxConnectionsPerHost = maxConnectionsPerHost; } + public Duration getMaxTimeoutWhenExhausted() { + return maxTimeoutWhenExhausted; + } + + public void setMaxTimeoutWhenExhausted(Duration maxTimeoutWhenExhausted) { + this.maxTimeoutWhenExhausted = maxTimeoutWhenExhausted; + } + + public int getMaxRetryAttempts() { + return maxRetryAttempts; + } + + public void setMaxRetryAttempts(int maxRetryAttempts) { + this.maxRetryAttempts = maxRetryAttempts; + } + public int getQueuesNonQuorumPort() { return queuesNonQuorumPort; } @@ -196,4 +226,12 @@ public String getQueuePrefix() { } return prefix; } + + public RetryPolicyFactory getConnectionRetryPolicy() { + if (getMaxRetryAttempts() == 0) { + return RunOnce::new; + } else { + return () -> new RetryNTimes(maxRetryAttempts, false); + } + } }