From 972d9feb96c05a53627d69926f93a39be5892ff1 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 17 May 2018 12:06:57 +0200 Subject: [PATCH 001/163] Start migrating setup code that is currently inside Modules into Providers. --- .../conductor/dao/DynoProxyProvider.java | 24 ++++++++ .../conductor/dao/DynoQueueDAOProvider.java | 34 +++++++++++ .../ConfigurationHostSupplierProvider.java | 55 ++++++++++++++++++ .../server/DynomiteJedisProvider.java | 58 +++++++++++++++++++ .../server/InMemoryJedisProvider.java | 14 +++++ .../server/LocalHostSupplierProvider.java | 25 ++++++++ .../conductor/server/RedisJedisProvider.java | 36 ++++++++++++ .../server/TokenMapSupplierProvider.java | 44 ++++++++++++++ 8 files changed, 290 insertions(+) create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java new file mode 100644 index 0000000000..c47e947af0 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java @@ -0,0 +1,24 @@ +package com.netflix.conductor.dao; + + +import com.netflix.conductor.dao.dynomite.DynoProxy; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.JedisCommands; + +public class DynoProxyProvider implements Provider { + + private final JedisCommands dynomiteConnection; + + @Inject + public DynoProxyProvider(JedisCommands dynomiteConnection) { + this.dynomiteConnection = dynomiteConnection; + } + + @Override + public DynoProxy get() { + return new DynoProxy(dynomiteConnection); + } +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java new file mode 100644 index 0000000000..a246c745b2 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java @@ -0,0 +1,34 @@ +package com.netflix.conductor.dao; + + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; +import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.dyno.queues.redis.DynoShardSupplier; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.JedisCommands; + +public class DynoQueueDAOProvider implements Provider { + private final Configuration config; + private final JedisCommands dynomiteConnection; + private final HostSupplier hostSupplier; + + @Inject + public DynoQueueDAOProvider(Configuration config, JedisCommands dynomiteConnection, HostSupplier hostSupplier) { + this.config = config; + this.dynomiteConnection = dynomiteConnection; + this.hostSupplier = hostSupplier; + } + + @Override + public QueueDAO get() { + String localDC = config.getAvailabilityZone(); + localDC = localDC.replaceAll(config.getRegion(), ""); + DynoShardSupplier ss = new DynoShardSupplier(hostSupplier, config.getRegion(), localDC); + + return new DynoQueueDAO(dynomiteConnection, dynomiteConnection, ss, config); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java b/server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java new file mode 100644 index 0000000000..21b9175f0a --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java @@ -0,0 +1,55 @@ +package com.netflix.conductor.server; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.dyno.connectionpool.Host; +import com.netflix.dyno.connectionpool.HostSupplier; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class ConfigurationHostSupplierProvider implements Provider { + private static Logger logger = LoggerFactory.getLogger(ConfigurationHostSupplierProvider.class); + + private final Configuration configuration; + + @Inject + public ConfigurationHostSupplierProvider(Configuration configuration) { + this.configuration = configuration; + } + + @Override + public HostSupplier get() { + return () -> parseHostsFromConfig(configuration); + } + + private List parseHostsFromConfig(Configuration configuration) { + String hosts = configuration.getProperty("workflow.dynomite.cluster.hosts", null); + if(hosts == null) { + System.err.println("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); + logger.error("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); + System.exit(1); + } + return parseHostsFrom(hosts); + } + + private List parseHostsFrom(String hostConfig){ + List hostConfigs = Arrays.asList(hostConfig.split(";")); + + List hosts = hostConfigs.stream().map(hc -> { + String[] hostConfigValues = hc.split(":"); + String host = hostConfigValues[0]; + int port = Integer.parseInt(hostConfigValues[1]); + String rack = hostConfigValues[2]; + return new Host(host, port, rack, Host.Status.Up); + }).collect(Collectors.toList()); + + return hosts; + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java b/server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java new file mode 100644 index 0000000000..aed951c4c1 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java @@ -0,0 +1,58 @@ +package com.netflix.conductor.server; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.dyno.connectionpool.TokenMapSupplier; +import com.netflix.dyno.connectionpool.impl.ConnectionPoolConfigurationImpl; +import com.netflix.dyno.jedis.DynoJedisClient; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.JedisCommands; + +public class DynomiteJedisProvider implements Provider { + public static final String DYNOMITE_CLUSTER_NAME_PROPERTY_NAME = "workflow.dynomite.cluster.name"; + public static final String DYNOMITE_MAX_CONNECTIONS_PROPERTY_NAME = "workflow.dynomite.connection.maxConnsPerHost"; + public static final int DYNOMITE_MAX_CONNECTIONS_DEFAULT_VALUE = 10; + + private final HostSupplier hostSupplier; + private final TokenMapSupplier tokenMapSupplier; + private final Configuration configuration; + private final String clusterName; + + @Inject + public DynomiteJedisProvider( + Configuration configuration, + HostSupplier hostSupplier, + TokenMapSupplier tokenMapSupplier + ){ + this.configuration = configuration; + this.hostSupplier = hostSupplier; + this.tokenMapSupplier = tokenMapSupplier; + this.clusterName = configuration.getProperty(DYNOMITE_CLUSTER_NAME_PROPERTY_NAME, ""); + } + + @Override + public JedisCommands get() { + ConnectionPoolConfigurationImpl connectionPoolConfiguration = new ConnectionPoolConfigurationImpl(clusterName) + .withTokenSupplier(tokenMapSupplier) + .setLocalRack(configuration.getAvailabilityZone()) + .setLocalDataCenter(configuration.getRegion()) + .setSocketTimeout(0) + .setConnectTimeout(0) + .setMaxConnsPerHost( + configuration.getIntProperty( + DYNOMITE_MAX_CONNECTIONS_PROPERTY_NAME, + DYNOMITE_MAX_CONNECTIONS_DEFAULT_VALUE + ) + ); + + return new DynoJedisClient.Builder() + .withHostSupplier(hostSupplier) + .withApplicationName(configuration.getAppId()) + .withDynomiteClusterName(clusterName) + .withCPConfig(connectionPoolConfiguration) + .build(); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java b/server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java new file mode 100644 index 0000000000..8a0405b796 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java @@ -0,0 +1,14 @@ +package com.netflix.conductor.server; + +import com.netflix.conductor.redis.utils.JedisMock; + +import javax.inject.Provider; + +import redis.clients.jedis.JedisCommands; + +public class InMemoryJedisProvider implements Provider { + @Override + public JedisCommands get() { + return new JedisMock(); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java b/server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java new file mode 100644 index 0000000000..ca81e5a1e7 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java @@ -0,0 +1,25 @@ +package com.netflix.conductor.server; + +import com.google.common.collect.Lists; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.dyno.connectionpool.Host; +import com.netflix.dyno.connectionpool.HostSupplier; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class LocalHostSupplierProvider implements Provider { + private final Configuration configuration; + + @Inject + public LocalHostSupplierProvider(Configuration configuration) { + this.configuration = configuration; + } + + @Override + public HostSupplier get() { + Host dynoHost = new Host("localhost", 0, configuration.getAvailabilityZone(), Host.Status.Up); + return ()-> Lists.newArrayList(dynoHost); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java b/server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java new file mode 100644 index 0000000000..000b106b9a --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java @@ -0,0 +1,36 @@ +package com.netflix.conductor.server; + +import com.google.common.collect.Lists; + +import com.netflix.dyno.connectionpool.Host; +import com.netflix.dyno.connectionpool.HostSupplier; + +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.HostAndPort; +import redis.clients.jedis.JedisCluster; +import redis.clients.jedis.JedisCommands; + +public class RedisJedisProvider implements Provider { + private final HostSupplier hostSupplier; + + @Inject + public RedisJedisProvider(HostSupplier hostSupplier) { + this.hostSupplier = hostSupplier; + } + + @Override + public JedisCommands get() { + // FIXME Do we really want to ignore all additional hosts? + Host host = Lists.newArrayList(hostSupplier.getHosts()).get(0); + + GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); + poolConfig.setMinIdle(5); + poolConfig.setMaxTotal(1000); + logger.info("Starting conductor server using redis_cluster " + dynoClusterName); + return new JedisCluster(new HostAndPort(host.getHostName(), host.getPort()), poolConfig); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java b/server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java new file mode 100644 index 0000000000..8d60866b2b --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java @@ -0,0 +1,44 @@ +package com.netflix.conductor.server; + +import com.google.common.collect.Lists; + +import com.netflix.dyno.connectionpool.Host; +import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.dyno.connectionpool.TokenMapSupplier; +import com.netflix.dyno.connectionpool.impl.lb.HostToken; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class TokenMapSupplierProvider implements Provider { + private final HostSupplier hostSupplier; + + @Inject + public TokenMapSupplierProvider(HostSupplier hostSupplier) { + this.hostSupplier = hostSupplier; + } + + @Override + public TokenMapSupplier get() { + return new TokenMapSupplier() { + + // FIXME This isn't particularly safe, but it is equivalent to the existing code. + // FIXME It seems like we should be supply tokens for more than one host? + HostToken token = new HostToken(1L, Lists.newArrayList(hostSupplier.getHosts()).get(0)); + + @Override + public List getTokens(Set activeHosts) { + return Arrays.asList(token); + } + + @Override + public HostToken getTokenForHost(Host host, Set activeHosts) { + return token; + } + }; + } +} From c7cf3738acbb6c37461569060145ecd89ffb464d Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 23 May 2018 11:52:08 +0200 Subject: [PATCH 002/163] * Refactor Providers related to dynomite and redis into the redis-persistence module. * Add a DynomiteConfiguration extension to the core Configuration in order to help manage dynomite/redis configuration options. * Refactor the base ConductorConfig into the core package and rename to to SystemPropertiesConductorConfiguration, since that is how it is implemeneted. * Extend the core configuration in order to implement the DynomiteConfiguration. --- .../conductor/core/config/Configuration.java | 218 ++++++++++-------- .../config/SystemPropertiesConfiguration.java | 37 ++- redis-persistence/build.gradle | 5 +- .../conductor/dyno/DynomiteConfiguration.java | 16 ++ ...SystemPropertiesDynomiteConfiguration.java | 35 +++ .../ConfigurationHostSupplierProvider.java | 21 +- .../jedis}/DynomiteJedisProvider.java | 23 +- .../jedis}/InMemoryJedisProvider.java | 4 +- .../netflix/conductor/jedis}/JedisMock.java | 10 +- .../jedis}/LocalHostSupplierProvider.java | 2 +- .../conductor/jedis}/RedisJedisProvider.java | 13 +- .../jedis}/TokenMapSupplierProvider.java | 2 +- server/build.gradle | 3 - .../conductor/server/ConductorServer.java | 66 +++--- .../com/netflix/conductor/server/Main.java | 4 +- .../conductor/server/ServerModule.java | 36 ++- .../server/ServletContextListner.java | 4 +- .../tests/integration/End2EndTests.java | 4 +- .../conductor/tests/utils/TestModule.java | 6 +- 19 files changed, 287 insertions(+), 222 deletions(-) rename server/src/main/java/com/netflix/conductor/server/ConductorConfig.java => core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java (77%) create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java rename {server/src/main/java/com/netflix/conductor/server => redis-persistence/src/main/java/com/netflix/conductor/jedis}/ConfigurationHostSupplierProvider.java (63%) rename {server/src/main/java/com/netflix/conductor/server => redis-persistence/src/main/java/com/netflix/conductor/jedis}/DynomiteJedisProvider.java (60%) rename {server/src/main/java/com/netflix/conductor/server => redis-persistence/src/main/java/com/netflix/conductor/jedis}/InMemoryJedisProvider.java (72%) rename {server/src/main/java/com/netflix/conductor/redis/utils => redis-persistence/src/main/java/com/netflix/conductor/jedis}/JedisMock.java (99%) rename {server/src/main/java/com/netflix/conductor/server => redis-persistence/src/main/java/com/netflix/conductor/jedis}/LocalHostSupplierProvider.java (94%) rename {server/src/main/java/com/netflix/conductor/server => redis-persistence/src/main/java/com/netflix/conductor/jedis}/RedisJedisProvider.java (69%) rename {server/src/main/java/com/netflix/conductor/server => redis-persistence/src/main/java/com/netflix/conductor/jedis}/TokenMapSupplierProvider.java (97%) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index e43bc1d7cf..a436f72e68 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -1,121 +1,137 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.core.config; +import com.google.inject.AbstractModule; + import java.util.List; import java.util.Map; -import com.google.inject.AbstractModule; - /** * @author Viren * */ public interface Configuration { - - /** - * - * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. - */ - public int getSweepFrequency(); - - /** - * - * @return when set to true, the sweep is disabled - */ - public boolean disableSweep(); - - - /** - * - * @return when set to true, the background task workers executing async system tasks (eg HTTP) are disabled - * - */ - public boolean disableAsyncWorkers(); - - /** - * - * @return ID of the server. Can be host name, IP address or any other meaningful identifier. Used for logging - */ - public String getServerId(); - - /** - * - * @return Current environment. e.g. test, prod - */ - public String getEnvironment(); - - /** - * - * @return name of the stack under which the app is running. e.g. devint, testintg, staging, prod etc. - */ - public String getStack(); - - /** - * - * @return APP ID. Used for logging - */ - public String getAppId(); - - /** - * - * @return Data center region. if hosting on Amazon the value is something like us-east-1, us-west-2 etc. - */ - public String getRegion(); - - /** - * - * @return Availability zone / rack. for AWS deployments, the value is something like us-east-1a, etc. - */ - public String getAvailabilityZone(); - - /** - * - * @param name Name of the property - * @param defaultValue Default value when not specified - * @return User defined integer property. - */ - public int getIntProperty(String name, int defaultValue); - - /** - * - * @param name Name of the property - * @param defaultValue Default value when not specified - * @return User defined string property. - */ - public String getProperty(String name, String defaultValue); - - - /** - * - * @return Returns all the configurations in a map. - */ - public Map getAll(); - - /** - * - * @return Provides a list of additional modules to configure. - * Use this to inject additional modules that should be loaded as part of the Conductor server initialization - * If you are creating custom tasks (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask) then initialize them as part of the custom modules. - */ - public default List getAdditionalModules() { - return null; - } + String SWEEP_FREQUENCY_PROPERTY_NAME = "decider.sweep.frequency.seconds"; + int SWEEP_FREQUENCY_DEFAULT_VALUE = 30; + String SWEEP_DISABLE_PROPERTY_NAME = "decider.sweep.disable"; + // FIXME This really should be typed correctly. + String SWEEP_DISABLE_DEFAULT_VALUE = "false"; + String DISABLE_ASYNC_WORKERS_PROPERTY_NAME = "conductor.disable.async.workers"; + // FIXME This really should be typed correctly. + String DISABLE_ASYNC_WORKERS_DEFAULT_VALUE = "false"; + String ENVIRONMENT_PROPERTY_NAME = "environment"; + String ENVIRONMENT_DEFAULT_VALUE = "test"; + String STACK_PROPERTY_NAME = "STACK"; + String STACK_DEFAULT_VALUE = "test"; + String APP_ID_PROPERTY_NAME = "APP_ID"; + String APP_ID_DEFAULT_VALUE = "conductor"; + String REGION_PROPERTY_NAME = "EC2_REGION"; + String REGION_DEFAULT_VALUE = "us-east-1"; + String AVAILABILITY_ZONE_PROPERTY_NAME = "EC2_AVAILABILITY_ZONE"; + String AVAILABILITY_ZONE_DEFAULT_VALUE = "us-east-1c"; + String ADDITIONAL_MODULES_PROPERTY_NAME = "conductor.additional.modules"; + + /** + * + * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. + */ + public int getSweepFrequency(); + + /** + * + * @return when set to true, the sweep is disabled + */ + public boolean disableSweep(); + + + /** + * + * @return when set to true, the background task workers executing async system tasks (eg HTTP) are disabled + * + */ + public boolean disableAsyncWorkers(); + + /** + * + * @return ID of the server. Can be host name, IP address or any other meaningful identifier. Used for logging + */ + public String getServerId(); + + /** + * + * @return Current environment. e.g. test, prod + */ + public String getEnvironment(); + + /** + * + * @return name of the stack under which the app is running. e.g. devint, testintg, staging, prod etc. + */ + public String getStack(); + + /** + * + * @return APP ID. Used for logging + */ + public String getAppId(); + + /** + * + * @return Data center region. if hosting on Amazon the value is something like us-east-1, us-west-2 etc. + */ + public String getRegion(); + + /** + * + * @return Availability zone / rack. for AWS deployments, the value is something like us-east-1a, etc. + */ + public String getAvailabilityZone(); + + /** + * + * @param name Name of the property + * @param defaultValue Default value when not specified + * @return User defined integer property. + */ + public int getIntProperty(String name, int defaultValue); + + /** + * + * @param name Name of the property + * @param defaultValue Default value when not specified + * @return User defined string property. + */ + public String getProperty(String name, String defaultValue); + + + /** + * + * @return Returns all the configurations in a map. + */ + public Map getAll(); + + /** + * + * @return Provides a list of additional modules to configure. + * Use this to inject additional modules that should be loaded as part of the Conductor server initialization + * If you are creating custom tasks (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask) then initialize them as part of the custom modules. + */ + public default List getAdditionalModules() { + return null; + } } diff --git a/server/src/main/java/com/netflix/conductor/server/ConductorConfig.java b/core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java similarity index 77% rename from server/src/main/java/com/netflix/conductor/server/ConductorConfig.java rename to core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java index c8956366f6..1b18a6f111 100644 --- a/server/src/main/java/com/netflix/conductor/server/ConductorConfig.java +++ b/core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java @@ -16,7 +16,13 @@ /** * */ -package com.netflix.conductor.server; +package com.netflix.conductor.core.config; + +import com.google.inject.AbstractModule; + +import org.apache.commons.lang3.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.net.InetAddress; import java.net.UnknownHostException; @@ -27,35 +33,28 @@ import java.util.Optional; import java.util.Properties; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.inject.AbstractModule; -import com.netflix.conductor.core.config.Configuration; - /** * @author Viren * */ -public class ConductorConfig implements Configuration { +public class SystemPropertiesConfiguration implements Configuration { - private static Logger logger = LoggerFactory.getLogger(ConductorConfig.class); + private static Logger logger = LoggerFactory.getLogger(SystemPropertiesConfiguration.class); @Override public int getSweepFrequency() { - return getIntProperty("decider.sweep.frequency.seconds", 30); + return getIntProperty(SWEEP_FREQUENCY_PROPERTY_NAME, SWEEP_FREQUENCY_DEFAULT_VALUE); } @Override public boolean disableSweep() { - String disable = getProperty("decider.sweep.disable", "false"); + String disable = getProperty(SWEEP_DISABLE_PROPERTY_NAME, SWEEP_DISABLE_DEFAULT_VALUE); return Boolean.getBoolean(disable); } @Override public boolean disableAsyncWorkers() { - String disable = getProperty("conductor.disable.async.workers", "false"); + String disable = getProperty(DISABLE_ASYNC_WORKERS_PROPERTY_NAME, DISABLE_ASYNC_WORKERS_DEFAULT_VALUE); return Boolean.getBoolean(disable); } @@ -70,27 +69,27 @@ public String getServerId() { @Override public String getEnvironment() { - return getProperty("environment", "test"); + return getProperty(ENVIRONMENT_PROPERTY_NAME, ENVIRONMENT_DEFAULT_VALUE); } @Override public String getStack() { - return getProperty("STACK", "test"); + return getProperty(STACK_PROPERTY_NAME, STACK_DEFAULT_VALUE); } @Override public String getAppId() { - return getProperty("APP_ID", "conductor"); + return getProperty(APP_ID_PROPERTY_NAME, APP_ID_DEFAULT_VALUE); } @Override public String getRegion() { - return getProperty("EC2_REGION", "us-east-1"); + return getProperty(REGION_PROPERTY_NAME, REGION_DEFAULT_VALUE); } @Override public String getAvailabilityZone() { - return getProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); + return getProperty(AVAILABILITY_ZONE_PROPERTY_NAME, AVAILABILITY_ZONE_DEFAULT_VALUE); } @Override @@ -128,7 +127,7 @@ public Map getAll() { @Override public List getAdditionalModules() { - String additionalModuleClasses = getProperty("conductor.additional.modules", null); + String additionalModuleClasses = getProperty(ADDITIONAL_MODULES_PROPERTY_NAME, null); if(!StringUtils.isEmpty(additionalModuleClasses)) { try { List modules = new LinkedList<>(); diff --git a/redis-persistence/build.gradle b/redis-persistence/build.gradle index 580e142be2..9f014a38a2 100644 --- a/redis-persistence/build.gradle +++ b/redis-persistence/build.gradle @@ -8,7 +8,6 @@ dependencies { compile "com.netflix.dyno-queues:dyno-queues-redis:${revDynoQueues}" compile "org.elasticsearch:elasticsearch:${revElasticSearch2}" - //In memory redis for unit testing - testCompile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" - + //In memory + compile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java new file mode 100644 index 0000000000..76875fa491 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java @@ -0,0 +1,16 @@ +package com.netflix.conductor.dyno; + +import com.netflix.conductor.core.config.Configuration; + +public interface DynomiteConfiguration extends Configuration { + String CLUSTER_NAME_PROPERTY_NAME = "workflow.dynomite.cluster.name"; + String HOSTS_PROPERTY_NAME = "workflow.dynomite.cluster.hosts"; + String MAX_CONNECTIONS_PER_HOST_PROPERTY_NAME = "workflow.dynomite.connection.maxConnsPerHost"; + int MAX_CONNECTIONS_PER_HOST_DEFAULT_VALUE = 10; + + String getClusterName(); + + String getHosts(); + + int getMaxConnectionsPerHost(); +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java new file mode 100644 index 0000000000..483269b8a9 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java @@ -0,0 +1,35 @@ +package com.netflix.conductor.dyno; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + +import javax.inject.Inject; + +public class SystemPropertiesDynomiteConfiguration extends SystemPropertiesConfiguration + implements DynomiteConfiguration { + + private final Configuration configuration; + + @Inject + public SystemPropertiesDynomiteConfiguration(Configuration configuration) { + this.configuration = configuration; + } + + @Override + public String getClusterName() { + return configuration.getProperty(CLUSTER_NAME_PROPERTY_NAME, ""); + } + + @Override + public String getHosts() { + return configuration.getProperty(HOSTS_PROPERTY_NAME, null); + } + + @Override + public int getMaxConnectionsPerHost() { + return configuration.getIntProperty( + MAX_CONNECTIONS_PER_HOST_PROPERTY_NAME, + MAX_CONNECTIONS_PER_HOST_DEFAULT_VALUE + ); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java similarity index 63% rename from server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java index 21b9175f0a..c2ca3adf38 100644 --- a/server/src/main/java/com/netflix/conductor/server/ConfigurationHostSupplierProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java @@ -1,6 +1,6 @@ -package com.netflix.conductor.server; +package com.netflix.conductor.jedis; -import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dyno.DynomiteConfiguration; import com.netflix.dyno.connectionpool.Host; import com.netflix.dyno.connectionpool.HostSupplier; @@ -17,10 +17,10 @@ public class ConfigurationHostSupplierProvider implements Provider { private static Logger logger = LoggerFactory.getLogger(ConfigurationHostSupplierProvider.class); - private final Configuration configuration; + private final DynomiteConfiguration configuration; @Inject - public ConfigurationHostSupplierProvider(Configuration configuration) { + public ConfigurationHostSupplierProvider(DynomiteConfiguration configuration) { this.configuration = configuration; } @@ -29,11 +29,16 @@ public HostSupplier get() { return () -> parseHostsFromConfig(configuration); } - private List parseHostsFromConfig(Configuration configuration) { - String hosts = configuration.getProperty("workflow.dynomite.cluster.hosts", null); + private List parseHostsFromConfig(DynomiteConfiguration configuration) { + String hosts = configuration.getHosts(); if(hosts == null) { - System.err.println("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); - logger.error("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); + // FIXME This type of validation probably doesn't belong here. + String message = String.format( + "Missing dynomite/redis hosts. Ensure '%s' has been set in the supplied configuration.", + DynomiteConfiguration.HOSTS_PROPERTY_NAME + ); + System.err.println(message); + logger.error(message); System.exit(1); } return parseHostsFrom(hosts); diff --git a/server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/DynomiteJedisProvider.java similarity index 60% rename from server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/DynomiteJedisProvider.java index aed951c4c1..9652e98e8b 100644 --- a/server/src/main/java/com/netflix/conductor/server/DynomiteJedisProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/DynomiteJedisProvider.java @@ -1,6 +1,6 @@ -package com.netflix.conductor.server; +package com.netflix.conductor.jedis; -import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dyno.DynomiteConfiguration; import com.netflix.dyno.connectionpool.HostSupplier; import com.netflix.dyno.connectionpool.TokenMapSupplier; import com.netflix.dyno.connectionpool.impl.ConnectionPoolConfigurationImpl; @@ -12,46 +12,39 @@ import redis.clients.jedis.JedisCommands; public class DynomiteJedisProvider implements Provider { - public static final String DYNOMITE_CLUSTER_NAME_PROPERTY_NAME = "workflow.dynomite.cluster.name"; - public static final String DYNOMITE_MAX_CONNECTIONS_PROPERTY_NAME = "workflow.dynomite.connection.maxConnsPerHost"; - public static final int DYNOMITE_MAX_CONNECTIONS_DEFAULT_VALUE = 10; private final HostSupplier hostSupplier; private final TokenMapSupplier tokenMapSupplier; - private final Configuration configuration; - private final String clusterName; + private final DynomiteConfiguration configuration; @Inject public DynomiteJedisProvider( - Configuration configuration, + DynomiteConfiguration configuration, HostSupplier hostSupplier, TokenMapSupplier tokenMapSupplier ){ this.configuration = configuration; this.hostSupplier = hostSupplier; this.tokenMapSupplier = tokenMapSupplier; - this.clusterName = configuration.getProperty(DYNOMITE_CLUSTER_NAME_PROPERTY_NAME, ""); } @Override public JedisCommands get() { - ConnectionPoolConfigurationImpl connectionPoolConfiguration = new ConnectionPoolConfigurationImpl(clusterName) + ConnectionPoolConfigurationImpl connectionPoolConfiguration = + new ConnectionPoolConfigurationImpl(configuration.getClusterName()) .withTokenSupplier(tokenMapSupplier) .setLocalRack(configuration.getAvailabilityZone()) .setLocalDataCenter(configuration.getRegion()) .setSocketTimeout(0) .setConnectTimeout(0) .setMaxConnsPerHost( - configuration.getIntProperty( - DYNOMITE_MAX_CONNECTIONS_PROPERTY_NAME, - DYNOMITE_MAX_CONNECTIONS_DEFAULT_VALUE - ) + configuration.getMaxConnectionsPerHost() ); return new DynoJedisClient.Builder() .withHostSupplier(hostSupplier) .withApplicationName(configuration.getAppId()) - .withDynomiteClusterName(clusterName) + .withDynomiteClusterName(configuration.getClusterName()) .withCPConfig(connectionPoolConfiguration) .build(); } diff --git a/server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java similarity index 72% rename from server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java index 8a0405b796..4af6ff42db 100644 --- a/server/src/main/java/com/netflix/conductor/server/InMemoryJedisProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java @@ -1,6 +1,4 @@ -package com.netflix.conductor.server; - -import com.netflix.conductor.redis.utils.JedisMock; +package com.netflix.conductor.jedis; import javax.inject.Provider; diff --git a/server/src/main/java/com/netflix/conductor/redis/utils/JedisMock.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/JedisMock.java similarity index 99% rename from server/src/main/java/com/netflix/conductor/redis/utils/JedisMock.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/JedisMock.java index 0e3d4b67cd..5e7bac8a61 100644 --- a/server/src/main/java/com/netflix/conductor/redis/utils/JedisMock.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/JedisMock.java @@ -16,7 +16,11 @@ /** * */ -package com.netflix.conductor.redis.utils; +package com.netflix.conductor.jedis; + +import org.rarefiedredis.redis.IRedisClient; +import org.rarefiedredis.redis.IRedisSortedSet.ZsetPair; +import org.rarefiedredis.redis.RedisMock; import java.util.ArrayList; import java.util.HashSet; @@ -27,10 +31,6 @@ import java.util.Set; import java.util.stream.Collectors; -import org.rarefiedredis.redis.IRedisClient; -import org.rarefiedredis.redis.IRedisSortedSet.ZsetPair; -import org.rarefiedredis.redis.RedisMock; - import redis.clients.jedis.Jedis; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; diff --git a/server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/LocalHostSupplierProvider.java similarity index 94% rename from server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/LocalHostSupplierProvider.java index ca81e5a1e7..1280bd424f 100644 --- a/server/src/main/java/com/netflix/conductor/server/LocalHostSupplierProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/LocalHostSupplierProvider.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.server; +package com.netflix.conductor.jedis; import com.google.common.collect.Lists; diff --git a/server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisJedisProvider.java similarity index 69% rename from server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisJedisProvider.java index 000b106b9a..7c347d4227 100644 --- a/server/src/main/java/com/netflix/conductor/server/RedisJedisProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisJedisProvider.java @@ -1,11 +1,14 @@ -package com.netflix.conductor.server; +package com.netflix.conductor.jedis; import com.google.common.collect.Lists; +import com.netflix.conductor.dyno.DynomiteConfiguration; import com.netflix.dyno.connectionpool.Host; import com.netflix.dyno.connectionpool.HostSupplier; import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.inject.Inject; import javax.inject.Provider; @@ -15,11 +18,15 @@ import redis.clients.jedis.JedisCommands; public class RedisJedisProvider implements Provider { + private static Logger logger = LoggerFactory.getLogger(RedisJedisProvider.class); + private final HostSupplier hostSupplier; + private final DynomiteConfiguration configuration; @Inject - public RedisJedisProvider(HostSupplier hostSupplier) { + public RedisJedisProvider(HostSupplier hostSupplier, DynomiteConfiguration configuration) { this.hostSupplier = hostSupplier; + this.configuration = configuration; } @Override @@ -30,7 +37,7 @@ public JedisCommands get() { GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); poolConfig.setMinIdle(5); poolConfig.setMaxTotal(1000); - logger.info("Starting conductor server using redis_cluster " + dynoClusterName); + logger.info("Starting conductor server using redis_cluster " + configuration.getClusterName()); return new JedisCluster(new HostAndPort(host.getHostName(), host.getPort()), poolConfig); } } diff --git a/server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/TokenMapSupplierProvider.java similarity index 97% rename from server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/jedis/TokenMapSupplierProvider.java index 8d60866b2b..627fed0729 100644 --- a/server/src/main/java/com/netflix/conductor/server/TokenMapSupplierProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/TokenMapSupplierProvider.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.server; +package com.netflix.conductor.jedis; import com.google.common.collect.Lists; diff --git a/server/build.gradle b/server/build.gradle index 6edf0c067f..f98a8ff630 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -37,9 +37,6 @@ dependencies { //Swagger compile "io.swagger:swagger-jersey-jaxrs:${revSwagger}" - - //In memory - compile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" } shadowJar { diff --git a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java index 33f3f00b5b..941e879da8 100644 --- a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java +++ b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java @@ -18,30 +18,14 @@ */ package com.netflix.conductor.server; -import java.io.InputStream; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; - -import javax.servlet.DispatcherType; -import javax.ws.rs.core.MediaType; - -import org.apache.commons.pool2.impl.GenericObjectPoolConfig; -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.DefaultServlet; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.Guice; import com.google.inject.servlet.GuiceFilter; + +import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.conductor.dao.es.EmbeddedElasticSearch; -import com.netflix.conductor.redis.utils.JedisMock; +import com.netflix.conductor.jedis.JedisMock; import com.netflix.dyno.connectionpool.Host; import com.netflix.dyno.connectionpool.Host.Status; import com.netflix.dyno.connectionpool.HostSupplier; @@ -51,6 +35,24 @@ import com.netflix.dyno.jedis.DynoJedisClient; import com.sun.jersey.api.client.Client; +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.servlet.DefaultServlet; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.servlet.ServletHolder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InputStream; +import java.util.Arrays; +import java.util.EnumSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Set; + +import javax.servlet.DispatcherType; +import javax.ws.rs.core.MediaType; + import redis.clients.jedis.HostAndPort; import redis.clients.jedis.JedisCluster; import redis.clients.jedis.JedisCommands; @@ -71,16 +73,16 @@ enum DB { private Server server; - private ConductorConfig conductorConfig; + private SystemPropertiesConfiguration systemPropertiesConfiguration; private DB database; - public ConductorServer(ConductorConfig conductorConfig) { - this.conductorConfig = conductorConfig; - String dynoClusterName = conductorConfig.getProperty("workflow.dynomite.cluster.name", ""); + public ConductorServer(SystemPropertiesConfiguration systemPropertiesConfiguration) { + this.systemPropertiesConfiguration = systemPropertiesConfiguration; + String dynoClusterName = systemPropertiesConfiguration.getProperty("workflow.dynomite.cluster.name", ""); List dynoHosts = new LinkedList<>(); - String dbstring = conductorConfig.getProperty("db", "memory"); + String dbstring = systemPropertiesConfiguration.getProperty("db", "memory"); try { database = DB.valueOf(dbstring); }catch(IllegalArgumentException ie) { @@ -89,7 +91,7 @@ public ConductorServer(ConductorConfig conductorConfig) { } if(!(database.equals(DB.memory) || database.equals(DB.mysql))) { - String hosts = conductorConfig.getProperty("workflow.dynomite.cluster.hosts", null); + String hosts = systemPropertiesConfiguration.getProperty("workflow.dynomite.cluster.hosts", null); if(hosts == null) { System.err.println("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); logger.error("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); @@ -108,7 +110,7 @@ public ConductorServer(ConductorConfig conductorConfig) { }else { //Create a single shard host supplier - Host dynoHost = new Host("localhost", 0, conductorConfig.getAvailabilityZone(), Status.Up); + Host dynoHost = new Host("localhost", 0, systemPropertiesConfiguration.getAvailabilityZone(), Status.Up); dynoHosts.add(dynoHost); } init(dynoClusterName, dynoHosts); @@ -124,15 +126,15 @@ private void init(String dynoClusterName, List dynoHosts) { case dynomite: ConnectionPoolConfigurationImpl connectionPoolConfiguration = new ConnectionPoolConfigurationImpl(dynoClusterName) .withTokenSupplier(getTokenMapSupplier(dynoHosts)) - .setLocalRack(conductorConfig.getAvailabilityZone()) - .setLocalDataCenter(conductorConfig.getRegion()) + .setLocalRack(systemPropertiesConfiguration.getAvailabilityZone()) + .setLocalDataCenter(systemPropertiesConfiguration.getRegion()) .setSocketTimeout(0) .setConnectTimeout(0) - .setMaxConnsPerHost(conductorConfig.getIntProperty("workflow.dynomite.connection.maxConnsPerHost", 10)); + .setMaxConnsPerHost(systemPropertiesConfiguration.getIntProperty("workflow.dynomite.connection.maxConnsPerHost", 10)); jedis = new DynoJedisClient.Builder() .withHostSupplier(hostSupplier) - .withApplicationName(conductorConfig.getAppId()) + .withApplicationName(systemPropertiesConfiguration.getAppId()) .withDynomiteClusterName(dynoClusterName) .withCPConfig(connectionPoolConfiguration) .build(); @@ -170,7 +172,7 @@ private void init(String dynoClusterName, List dynoHosts) { break; } - this.serverModule = new ServerModule(jedis, hostSupplier, conductorConfig, database); + this.serverModule = new ServerModule(jedis, hostSupplier, systemPropertiesConfiguration, database); } private TokenMapSupplier getTokenMapSupplier(List dynoHosts) { diff --git a/server/src/main/java/com/netflix/conductor/server/Main.java b/server/src/main/java/com/netflix/conductor/server/Main.java index 9e7b90214f..c4b515c5c5 100644 --- a/server/src/main/java/com/netflix/conductor/server/Main.java +++ b/server/src/main/java/com/netflix/conductor/server/Main.java @@ -18,6 +18,8 @@ */ package com.netflix.conductor.server; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -40,7 +42,7 @@ public static void main(String[] args) throws Exception { PropertyConfigurator.configure(new FileInputStream(new File(args[1]))); } - ConductorConfig config = new ConductorConfig(); + SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); ConductorServer server = new ConductorServer(config); System.out.println("\n\n\n"); diff --git a/server/src/main/java/com/netflix/conductor/server/ServerModule.java b/server/src/main/java/com/netflix/conductor/server/ServerModule.java index 7a1f8cefb7..3ab3f416cd 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServerModule.java +++ b/server/src/main/java/com/netflix/conductor/server/ServerModule.java @@ -18,31 +18,25 @@ */ package com.netflix.conductor.server; -import java.util.List; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - import com.google.inject.AbstractModule; import com.google.inject.Provides; + import com.netflix.conductor.contribs.http.HttpTask; import com.netflix.conductor.contribs.http.RestClientManager; import com.netflix.conductor.contribs.json.JsonJqTransform; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; -import com.netflix.conductor.dao.ExecutionDAO; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.dao.RedisESWorkflowModule; -import com.netflix.conductor.dao.dynomite.DynoProxy; -import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; -import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; -import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; import com.netflix.conductor.dao.index.ElasticSearchDAO; import com.netflix.conductor.dao.index.ElasticsearchModule; import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.queues.redis.DynoShardSupplier; + +import java.util.List; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; import redis.clients.jedis.JedisCommands; @@ -64,16 +58,16 @@ public class ServerModule extends AbstractModule { private String localRack; - private ConductorConfig conductorConfig; + private SystemPropertiesConfiguration systemPropertiesConfiguration; private ConductorServer.DB db; - public ServerModule(JedisCommands jedis, HostSupplier hostSupplier, ConductorConfig conductorConfig, ConductorServer.DB db) { + public ServerModule(JedisCommands jedis, HostSupplier hostSupplier, SystemPropertiesConfiguration systemPropertiesConfiguration, ConductorServer.DB db) { this.dynoConn = jedis; this.hostSupplier = hostSupplier; - this.conductorConfig = conductorConfig; - this.region = conductorConfig.getRegion(); - this.localRack = conductorConfig.getAvailabilityZone(); + this.systemPropertiesConfiguration = systemPropertiesConfiguration; + this.region = systemPropertiesConfiguration.getRegion(); + this.localRack = systemPropertiesConfiguration.getAvailabilityZone(); this.db = db; } @@ -83,12 +77,12 @@ protected void configure() { configureExecutorService(); - bind(Configuration.class).toInstance(conductorConfig); + bind(Configuration.class).toInstance(systemPropertiesConfiguration); if (db == ConductorServer.DB.mysql) { install(new MySQLWorkflowModule()); } else { - install(new RedisESWorkflowModule(conductorConfig, dynoConn, hostSupplier)); + install(new RedisESWorkflowModule(systemPropertiesConfiguration, dynoConn, hostSupplier)); } install(new ElasticsearchModule()); @@ -97,10 +91,10 @@ protected void configure() { install(new CoreModule()); install(new JerseyModule()); - new HttpTask(new RestClientManager(), conductorConfig); + new HttpTask(new RestClientManager(), systemPropertiesConfiguration); new JsonJqTransform(); - List additionalModules = conductorConfig.getAdditionalModules(); + List additionalModules = systemPropertiesConfiguration.getAdditionalModules(); if(additionalModules != null) { for(AbstractModule additionalModule : additionalModules) { install(additionalModule); diff --git a/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java b/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java index 6cd437704d..9c772acaf3 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java +++ b/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java @@ -32,6 +32,8 @@ import com.google.inject.servlet.GuiceServletContextListener; import com.google.inject.servlet.ServletModule; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + /** * @author Viren * @@ -43,7 +45,7 @@ protected Injector getInjector() { loadProperties(); - ConductorConfig config = new ConductorConfig(); + SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); ConductorServer server = new ConductorServer(config); return Guice.createInjector(server.getGuiceModule(), getSwagger()); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 972bf59b1e..9b6d13edb7 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -43,7 +43,7 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.server.ConductorConfig; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.conductor.server.ConductorServer; /** @@ -68,7 +68,7 @@ public class End2EndTests { @BeforeClass public static void setup() throws Exception { - ConductorServer server = new ConductorServer(new ConductorConfig()); + ConductorServer server = new ConductorServer(new SystemPropertiesConfiguration()); server.start(8080, false); tc = new TaskClient(); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java index 2d4137d855..0ad980fe44 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java @@ -36,8 +36,8 @@ import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; -import com.netflix.conductor.redis.utils.JedisMock; -import com.netflix.conductor.server.ConductorConfig; +import com.netflix.conductor.jedis.JedisMock; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.dyno.queues.ShardSupplier; import redis.clients.jedis.JedisCommands; @@ -61,7 +61,7 @@ protected void configure() { configureExecutorService(); - ConductorConfig config = new ConductorConfig(); + SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); bind(Configuration.class).toInstance(config); JedisCommands jedisMock = new JedisMock(); From a668894a335a8258072ef46867760a310d49b91e Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 31 May 2018 15:51:57 +0200 Subject: [PATCH 003/163] Remove System.* calls. --- .../conductor/jedis/ConfigurationHostSupplierProvider.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java index c2ca3adf38..57e0a3f0be 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/ConfigurationHostSupplierProvider.java @@ -37,9 +37,8 @@ private List parseHostsFromConfig(DynomiteConfiguration configuration) { "Missing dynomite/redis hosts. Ensure '%s' has been set in the supplied configuration.", DynomiteConfiguration.HOSTS_PROPERTY_NAME ); - System.err.println(message); logger.error(message); - System.exit(1); + throw new RuntimeException(message); } return parseHostsFrom(hosts); } From fb9c61fd8b66024fe07079e0f143eac11c73fccd Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 4 Jun 2018 12:10:00 +0200 Subject: [PATCH 004/163] * Add more providers. * Hack in singleton enforcement. * Centralize more configuration. --- .../conductor/dao/DynoProxyProvider.java | 24 ---- .../conductor/dao/RedisESWorkflowModule.java | 2 +- .../conductor/dao/dynomite/BaseDynoDAO.java | 1 + .../dao/dynomite/RedisExecutionDAO.java | 1 + .../dao/dynomite/RedisMetadataDAO.java | 1 + .../queue}/DynoQueueDAOProvider.java | 14 ++- .../{dao/dynomite => dyno}/DynoProxy.java | 21 ++-- .../dyno/DynoProxyDiscoveryProvider.java | 33 ++++++ .../conductor/dyno/DynomiteConfiguration.java | 55 ++++++++- .../dyno/RedisQueuesDiscoveryProvider.java | 105 ++++++++++++++++++ .../conductor/dyno/RedisQueuesProvider.java | 52 +++++++++ ...SystemPropertiesDynomiteConfiguration.java | 31 +----- .../dao/dynomite/RedisExecutionDAOTest.java | 1 + .../dao/dynomite/RedisMetadataDAOTest.java | 1 + .../conductor/tests/utils/TestModule.java | 17 +-- 15 files changed, 279 insertions(+), 80 deletions(-) delete mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java rename redis-persistence/src/main/java/com/netflix/conductor/dao/{ => dynomite/queue}/DynoQueueDAOProvider.java (75%) rename redis-persistence/src/main/java/com/netflix/conductor/{dao/dynomite => dyno}/DynoProxy.java (99%) create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java deleted file mode 100644 index c47e947af0..0000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoProxyProvider.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.netflix.conductor.dao; - - -import com.netflix.conductor.dao.dynomite.DynoProxy; - -import javax.inject.Inject; -import javax.inject.Provider; - -import redis.clients.jedis.JedisCommands; - -public class DynoProxyProvider implements Provider { - - private final JedisCommands dynomiteConnection; - - @Inject - public DynoProxyProvider(JedisCommands dynomiteConnection) { - this.dynomiteConnection = dynomiteConnection; - } - - @Override - public DynoProxy get() { - return new DynoProxy(dynomiteConnection); - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java index b0cb0c8736..2378ae1d15 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java @@ -18,11 +18,11 @@ import com.google.inject.AbstractModule; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.dao.dynomite.DynoProxy; import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; import com.netflix.conductor.dao.index.ElasticSearchDAO; +import com.netflix.conductor.dyno.DynoProxy; import com.netflix.dyno.connectionpool.HostSupplier; import com.netflix.dyno.queues.redis.DynoShardSupplier; diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/BaseDynoDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/BaseDynoDAO.java index 6cc2d6258f..fa8c8d9220 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/BaseDynoDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/BaseDynoDAO.java @@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dyno.DynoProxy; public class BaseDynoDAO { diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index 138d1d72f1..7b478d8ea5 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -50,6 +50,7 @@ import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.MetadataDAO; +import com.netflix.conductor.dyno.DynoProxy; import com.netflix.conductor.metrics.Monitors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java index fbce4a854a..a84f13256d 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java @@ -39,6 +39,7 @@ import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.MetadataDAO; +import com.netflix.conductor.dyno.DynoProxy; @Singleton @Trace diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java similarity index 75% rename from redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java rename to redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java index a246c745b2..04f7064440 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/DynoQueueDAOProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java @@ -15,20 +15,26 @@ public class DynoQueueDAOProvider implements Provider { private final Configuration config; private final JedisCommands dynomiteConnection; private final HostSupplier hostSupplier; + private final QueueDAO dao; @Inject public DynoQueueDAOProvider(Configuration config, JedisCommands dynomiteConnection, HostSupplier hostSupplier) { this.config = config; this.dynomiteConnection = dynomiteConnection; this.hostSupplier = hostSupplier; - } - @Override - public QueueDAO get() { + // FIXME: This a hacky way to force a single instance. It would be better for Guice to enforce this by using + // @Inject constructor on DynoQueueDAO and a binding rather than a Provider. + String localDC = config.getAvailabilityZone(); localDC = localDC.replaceAll(config.getRegion(), ""); DynoShardSupplier ss = new DynoShardSupplier(hostSupplier, config.getRegion(), localDC); - return new DynoQueueDAO(dynomiteConnection, dynomiteConnection, ss, config); + this.dao = new DynoQueueDAO(dynomiteConnection, dynomiteConnection, ss, config); + } + + @Override + public QueueDAO get() { + return this.dao; } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/DynoProxy.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java similarity index 99% rename from redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/DynoProxy.java rename to redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java index ab59da9bc7..7a01680216 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/DynoProxy.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java @@ -13,7 +13,17 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package com.netflix.conductor.dao.dynomite; +package com.netflix.conductor.dyno; + +import com.google.inject.Singleton; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.discovery.DiscoveryClient; +import com.netflix.dyno.connectionpool.exception.DynoException; +import com.netflix.dyno.jedis.DynoJedisClient; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.HashSet; @@ -26,15 +36,6 @@ import javax.inject.Inject; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.inject.Singleton; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.discovery.DiscoveryClient; -import com.netflix.dyno.connectionpool.exception.DynoException; -import com.netflix.dyno.jedis.DynoJedisClient; - import redis.clients.jedis.JedisCommands; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java new file mode 100644 index 0000000000..2dbc08c168 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java @@ -0,0 +1,33 @@ +package com.netflix.conductor.dyno; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.discovery.DiscoveryClient; +import com.netflix.dyno.jedis.DynoJedisClient; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.JedisCommands; + +public class DynoProxyDiscoveryProvider implements Provider { + private final DiscoveryClient discoveryClient; + private final Configuration configuration; + + @Inject + public DynoProxyDiscoveryProvider(DiscoveryClient discoveryClient, Configuration configuration) { + this.discoveryClient = discoveryClient; + this.configuration = configuration; + } + + @Override + public JedisCommands get() { + String cluster = configuration.getProperty("workflow.dynomite.cluster", null); + String applicationName = configuration.getAppId(); + return new DynoJedisClient + .Builder() + .withApplicationName(applicationName) + .withDynomiteClusterName(cluster) + .withDiscoveryClient(discoveryClient) + .build(); + } +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java index 76875fa491..9607b24acf 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynomiteConfiguration.java @@ -3,14 +3,63 @@ import com.netflix.conductor.core.config.Configuration; public interface DynomiteConfiguration extends Configuration { + // FIXME Are cluster and cluster name really different things? + String CLUSTER_PROPERTY_NAME = "workflow.dynomite.cluster"; + String CLUSTER_DEFAULT_VALUE = null; + String CLUSTER_NAME_PROPERTY_NAME = "workflow.dynomite.cluster.name"; String HOSTS_PROPERTY_NAME = "workflow.dynomite.cluster.hosts"; + String MAX_CONNECTIONS_PER_HOST_PROPERTY_NAME = "workflow.dynomite.connection.maxConnsPerHost"; int MAX_CONNECTIONS_PER_HOST_DEFAULT_VALUE = 10; - String getClusterName(); + String ROOT_NAMESPACE_PROPERTY_NAME = "workflow.namespace.queue.prefix"; + String ROOT_NAMESPACE_DEFAULT_VALUE = null; + + String DOMAIN_PROPERTY_NAME = "workflow.dyno.keyspace.domain"; + String DOMAIN_DEFAULT_VALUE = null; + + String NON_QUORUM_PORT_PROPERTY_NAME = "queues.dynomite.nonQuorum.port"; + int NON_QUORUM_PORT_DEFAULT_VALUE = 22122; + + default String getCluster() { + return getProperty(CLUSTER_PROPERTY_NAME, CLUSTER_DEFAULT_VALUE); + } + + default String getClusterName() { + return getProperty(CLUSTER_NAME_PROPERTY_NAME, ""); + } + + default String getHosts() { + return getProperty(HOSTS_PROPERTY_NAME, null); + } + + default String getRootNamespace() { + return getProperty(ROOT_NAMESPACE_PROPERTY_NAME, ROOT_NAMESPACE_DEFAULT_VALUE); + } + + default String getDomain() { + return getProperty(DOMAIN_PROPERTY_NAME, DOMAIN_DEFAULT_VALUE); + } + + default int getMaxConnectionsPerHost() { + return getIntProperty( + MAX_CONNECTIONS_PER_HOST_PROPERTY_NAME, + MAX_CONNECTIONS_PER_HOST_DEFAULT_VALUE + ); + } + + default int getNonQuorumPort() { + return getIntProperty(NON_QUORUM_PORT_PROPERTY_NAME, NON_QUORUM_PORT_DEFAULT_VALUE); + } + + default String getQueuePrefix() { + String prefix = getRootNamespace() + "." + getStack(); - String getHosts(); + if (getDomain() != null) { + prefix = prefix + "." + getDomain(); + } - int getMaxConnectionsPerHost(); + return prefix; + } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java new file mode 100644 index 0000000000..c5629a929c --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java @@ -0,0 +1,105 @@ +package com.netflix.conductor.dyno; + +import com.netflix.discovery.DiscoveryClient; +import com.netflix.dyno.connectionpool.Host; +import com.netflix.dyno.contrib.EurekaHostsSupplier; +import com.netflix.dyno.jedis.DynoJedisClient; +import com.netflix.dyno.queues.ShardSupplier; +import com.netflix.dyno.queues.redis.DynoShardSupplier; +import com.netflix.dyno.queues.redis.RedisQueues; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class RedisQueuesDiscoveryProvider implements Provider { + + private static final Logger logger = LoggerFactory.getLogger(RedisQueuesDiscoveryProvider.class); + + private final DiscoveryClient discoveryClient; + private final DynomiteConfiguration configuration; + + @Inject + RedisQueuesDiscoveryProvider(DiscoveryClient discoveryClient, DynomiteConfiguration configuration){ + this.discoveryClient = discoveryClient; + this.configuration = configuration; + } + + @Override + public RedisQueues get() { + + logger.info("DynoQueueDAO::INIT"); + + String domain = configuration.getDomain(); + String cluster = configuration.getCluster(); + final int readConnPort = configuration.getNonQuorumPort(); + + EurekaHostsSupplier hostSupplier = new EurekaHostsSupplier(cluster, discoveryClient) { + @Override + public List getHosts() { + List hosts = super.getHosts(); + List updatedHosts = new ArrayList<>(hosts.size()); + hosts.forEach(host -> { + updatedHosts.add( + new Host( + host.getHostName(), + host.getIpAddress(), + readConnPort, + host.getRack(), + host.getDatacenter(), + host.isUp() ? Host.Status.Up : Host.Status.Down + ) + ); + }); + return updatedHosts; + } + }; + + DynoJedisClient dynoClient = new DynoJedisClient + .Builder() + .withApplicationName(configuration.getAppId()) + .withDynomiteClusterName(cluster) + .withDiscoveryClient(discoveryClient) + .build(); + + DynoJedisClient dynoClientRead = new DynoJedisClient + .Builder() + .withApplicationName(configuration.getAppId()) + .withDynomiteClusterName(cluster) + .withHostSupplier(hostSupplier) + .build(); + + String region = configuration.getRegion(); + String localDC = configuration.getAvailabilityZone(); + + if(localDC == null) { + throw new Error("Availability zone is not defined. " + + "Ensure Configuration.getAvailabilityZone() returns a non-null and non-empty value."); + } + + localDC = localDC.replaceAll(region, ""); + ShardSupplier ss = new DynoShardSupplier( + dynoClient.getConnPool().getConfiguration().getHostSupplier(), + region, + localDC + ); + + RedisQueues queues = new RedisQueues( + dynoClient, + dynoClientRead, + configuration.getQueuePrefix(), + ss, + 60_000, + 60_000 + ); + + logger.info("DynoQueueDAO initialized with prefix " + configuration.getQueuePrefix() + "!"); + + return queues; + } +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java new file mode 100644 index 0000000000..9be2a551f3 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java @@ -0,0 +1,52 @@ +package com.netflix.conductor.dyno; + +import com.netflix.dyno.queues.ShardSupplier; +import com.netflix.dyno.queues.redis.RedisQueues; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.JedisCommands; + +public class RedisQueuesProvider implements Provider { + + private static final Logger logger = LoggerFactory.getLogger(RedisQueuesProvider.class); + + private final JedisCommands dynoClient; + private final JedisCommands dynoClientRead; + private final ShardSupplier shardSupplier; + private final DynomiteConfiguration configuration; + + @Inject + public RedisQueuesProvider( + JedisCommands dynoClient, + JedisCommands dynoClientRead, + ShardSupplier ss, + DynomiteConfiguration config + ) { + this.dynoClient = dynoClient; + // FIXME: This was in the original code, but seems like a bug? + this.dynoClientRead = dynoClient; + this.shardSupplier = ss; + this.configuration = config; + } + + @Override + public RedisQueues get() { + RedisQueues queues = new RedisQueues( + dynoClient, + dynoClientRead, + configuration.getQueuePrefix(), + shardSupplier, + 60_000, + 60_000 + ); + + logger.info("DynoQueueDAO initialized with prefix " + configuration.getQueuePrefix() + "!"); + + return queues; + } +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java index 483269b8a9..029bcc10c7 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/SystemPropertiesDynomiteConfiguration.java @@ -1,35 +1,6 @@ package com.netflix.conductor.dyno; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; -import javax.inject.Inject; - public class SystemPropertiesDynomiteConfiguration extends SystemPropertiesConfiguration - implements DynomiteConfiguration { - - private final Configuration configuration; - - @Inject - public SystemPropertiesDynomiteConfiguration(Configuration configuration) { - this.configuration = configuration; - } - - @Override - public String getClusterName() { - return configuration.getProperty(CLUSTER_NAME_PROPERTY_NAME, ""); - } - - @Override - public String getHosts() { - return configuration.getProperty(HOSTS_PROPERTY_NAME, null); - } - - @Override - public int getMaxConnectionsPerHost() { - return configuration.getIntProperty( - MAX_CONNECTIONS_PER_HOST_PROPERTY_NAME, - MAX_CONNECTIONS_PER_HOST_DEFAULT_VALUE - ); - } -} + implements DynomiteConfiguration {} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java index ffa4a164ae..6e5368c81d 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java @@ -67,6 +67,7 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.index.ElasticSearchDAO; import com.netflix.conductor.dao.redis.JedisMock; +import com.netflix.conductor.dyno.DynoProxy; import redis.clients.jedis.JedisCommands; diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java index eb0091ff2d..d971e44413 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java @@ -47,6 +47,7 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.dao.redis.JedisMock; +import com.netflix.conductor.dyno.DynoProxy; import redis.clients.jedis.JedisCommands; diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java index 0ad980fe44..3d166fb2e5 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java @@ -18,28 +18,29 @@ */ package com.netflix.conductor.tests.utils; -import java.util.Arrays; -import java.util.Set; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; - import com.google.inject.AbstractModule; import com.google.inject.Provides; + import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.dao.dynomite.DynoProxy; import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; +import com.netflix.conductor.dyno.DynoProxy; import com.netflix.conductor.jedis.JedisMock; -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.dyno.queues.ShardSupplier; +import java.util.Arrays; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + import redis.clients.jedis.JedisCommands; /** From 6b961862e39d82ce3a7295d013795ab6309f0586 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Tue, 5 Jun 2018 16:52:12 +0200 Subject: [PATCH 005/163] This completely refactors and updates how the application is bootstrapped. * Create more modules to represent the various datastore configurations. * Remove Module constructor arguments in favor of using Providers from the previous PRs. This is better in line with Guice best practices and recommendations. * Move all of the logic related to composing Modules into ConductorServer and have it happen when the Injector is created. This is better in line with Guice best practices and recommendations. * Add a configuration option to disable Jersey (Although this won't be completely implemented until we work in gRPC). * Start to minimize redundant code between ServletContext and ConductorServer. * Deprecate some Dyno constructors in favor of simpler injection based models, especially in the DynoQueueDAO. --- .../conductor/core/config/Configuration.java | 32 ++ .../config/SystemPropertiesConfiguration.java | 249 +++++----- .../core/execution/TestConfiguration.java | 13 +- .../conductor/config/TestConfiguration.java | 12 +- .../conductor/dao/RedisESWorkflowModule.java | 36 +- .../dao/dynomite/queue/DynoQueueDAO.java | 378 +++++++-------- .../dynomite/queue/DynoQueueDAOProvider.java | 40 -- .../dao/index/ElasticsearchModule.java | 63 ++- .../com/netflix/conductor/dyno/DynoProxy.java | 435 +++++++++--------- .../dyno/DynoProxyDiscoveryProvider.java | 11 +- .../dyno/DynoShardSupplierProvider.java | 36 ++ .../conductor/dyno/RedisQueuesProvider.java | 6 +- .../jedis/InMemoryJedisProvider.java | 6 +- .../jedis/RedisClusterJedisProvider.java | 35 ++ .../conductor/config/TestConfiguration.java | 13 +- .../dao/dynomite/DynoQueueDAOTest.java | 28 +- .../conductor/server/ConductorServer.java | 389 +++++++--------- .../server/DynomiteClusterModule.java | 32 ++ .../server/ExecutorServiceProvider.java | 32 ++ .../conductor/server/LocalRedisModule.java | 28 ++ .../conductor/server/RedisClusterModule.java | 20 + .../conductor/server/ServerModule.java | 108 +---- .../server/ServletContextListner.java | 124 ++--- .../conductor/server/SwaggerModule.java | 25 + .../conductor/tests/utils/TestModule.java | 120 +++-- 25 files changed, 1150 insertions(+), 1121 deletions(-) delete mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java create mode 100644 redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisClusterJedisProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java create mode 100644 server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java create mode 100644 server/src/main/java/com/netflix/conductor/server/RedisClusterModule.java create mode 100644 server/src/main/java/com/netflix/conductor/server/SwaggerModule.java diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index a436f72e68..6f3c625906 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -25,26 +25,48 @@ * */ public interface Configuration { + String DB_PROPERTY_NAME = "db"; + String DB_DEFAULT_VALUE = "memory"; + String SWEEP_FREQUENCY_PROPERTY_NAME = "decider.sweep.frequency.seconds"; int SWEEP_FREQUENCY_DEFAULT_VALUE = 30; + String SWEEP_DISABLE_PROPERTY_NAME = "decider.sweep.disable"; // FIXME This really should be typed correctly. String SWEEP_DISABLE_DEFAULT_VALUE = "false"; + String DISABLE_ASYNC_WORKERS_PROPERTY_NAME = "conductor.disable.async.workers"; // FIXME This really should be typed correctly. String DISABLE_ASYNC_WORKERS_DEFAULT_VALUE = "false"; + String ENVIRONMENT_PROPERTY_NAME = "environment"; String ENVIRONMENT_DEFAULT_VALUE = "test"; + String STACK_PROPERTY_NAME = "STACK"; String STACK_DEFAULT_VALUE = "test"; + String APP_ID_PROPERTY_NAME = "APP_ID"; String APP_ID_DEFAULT_VALUE = "conductor"; + String REGION_PROPERTY_NAME = "EC2_REGION"; String REGION_DEFAULT_VALUE = "us-east-1"; + String AVAILABILITY_ZONE_PROPERTY_NAME = "EC2_AVAILABILITY_ZONE"; String AVAILABILITY_ZONE_DEFAULT_VALUE = "us-east-1c"; + + String JERSEY_ENABLED_PROPERTY_NAME = "conductor.jersey.enabled"; + boolean JERSEY_ENABLED_DEFAULT_VALUE = true; + String ADDITIONAL_MODULES_PROPERTY_NAME = "conductor.additional.modules"; + default DB getDB(){ + return DB.valueOf(getDBString()); + } + + default String getDBString() { + return getProperty(DB_PROPERTY_NAME, DB_DEFAULT_VALUE).toUpperCase(); + } + /** * * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. @@ -101,6 +123,10 @@ public interface Configuration { */ public String getAvailabilityZone(); + default boolean getJerseyEnabled(){ + return getBooleanProperty(JERSEY_ENABLED_PROPERTY_NAME, JERSEY_ENABLED_DEFAULT_VALUE); + } + /** * * @param name Name of the property @@ -117,6 +143,7 @@ public interface Configuration { */ public String getProperty(String name, String defaultValue); + boolean getBooleanProperty(String name, boolean defaultValue); /** * @@ -134,4 +161,9 @@ public default List getAdditionalModules() { return null; } + + enum DB { + REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL + } + } diff --git a/core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java b/core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java index 1b18a6f111..65314a4465 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/SystemPropertiesConfiguration.java @@ -1,20 +1,17 @@ /** * Copyright 2017 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.core.config; @@ -39,113 +36,127 @@ */ public class SystemPropertiesConfiguration implements Configuration { - private static Logger logger = LoggerFactory.getLogger(SystemPropertiesConfiguration.class); - - @Override - public int getSweepFrequency() { - return getIntProperty(SWEEP_FREQUENCY_PROPERTY_NAME, SWEEP_FREQUENCY_DEFAULT_VALUE); - } - - @Override - public boolean disableSweep() { - String disable = getProperty(SWEEP_DISABLE_PROPERTY_NAME, SWEEP_DISABLE_DEFAULT_VALUE); - return Boolean.getBoolean(disable); - } - - @Override - public boolean disableAsyncWorkers() { - String disable = getProperty(DISABLE_ASYNC_WORKERS_PROPERTY_NAME, DISABLE_ASYNC_WORKERS_DEFAULT_VALUE); - return Boolean.getBoolean(disable); - } - - @Override - public String getServerId() { - try { - return InetAddress.getLocalHost().getHostName(); - } catch (UnknownHostException e) { - return "unknown"; - } - } - - @Override - public String getEnvironment() { - return getProperty(ENVIRONMENT_PROPERTY_NAME, ENVIRONMENT_DEFAULT_VALUE); - } - - @Override - public String getStack() { - return getProperty(STACK_PROPERTY_NAME, STACK_DEFAULT_VALUE); - } - - @Override - public String getAppId() { - return getProperty(APP_ID_PROPERTY_NAME, APP_ID_DEFAULT_VALUE); - } - - @Override - public String getRegion() { - return getProperty(REGION_PROPERTY_NAME, REGION_DEFAULT_VALUE); - } - - @Override - public String getAvailabilityZone() { - return getProperty(AVAILABILITY_ZONE_PROPERTY_NAME, AVAILABILITY_ZONE_DEFAULT_VALUE); - } - - @Override - public int getIntProperty(String key, int defaultValue) { - String val = getProperty(key, Integer.toString(defaultValue)); - try{ - defaultValue = Integer.parseInt(val); - }catch(NumberFormatException e){} - return defaultValue; - } - - @Override - public String getProperty(String key, String defaultValue) { - - String val = null; - try{ - val = System.getenv(key.replace('.','_')); - if (val == null || val.isEmpty()) { - val = Optional.ofNullable(System.getProperty(key)).orElse(defaultValue); - } - }catch(Exception e){ - logger.error(e.getMessage(), e); - } - return val; - } - - @Override - public Map getAll() { - Map map = new HashMap<>(); - Properties props = System.getProperties(); - props.entrySet().forEach(entry -> map.put(entry.getKey().toString(), entry.getValue())); - return map; - } - - @Override - public List getAdditionalModules() { - - String additionalModuleClasses = getProperty(ADDITIONAL_MODULES_PROPERTY_NAME, null); - if(!StringUtils.isEmpty(additionalModuleClasses)) { - try { - List modules = new LinkedList<>(); - String[] classes = additionalModuleClasses.split(","); - for(String clazz : classes) { - Object moduleObj = Class.forName(clazz).newInstance(); - if(moduleObj instanceof AbstractModule) { - AbstractModule abstractModule = (AbstractModule)moduleObj; - modules.add(abstractModule); - } else { - logger.error(clazz + " does not implement " + AbstractModule.class.getName() + ", skipping..."); - } - } - return modules; - }catch(Exception e) { - logger.warn(e.getMessage(), e); - } - } - return null; - } + private static Logger logger = LoggerFactory.getLogger(SystemPropertiesConfiguration.class); + + @Override + public int getSweepFrequency() { + return getIntProperty(SWEEP_FREQUENCY_PROPERTY_NAME, SWEEP_FREQUENCY_DEFAULT_VALUE); + } + + @Override + public boolean disableSweep() { + String disable = getProperty(SWEEP_DISABLE_PROPERTY_NAME, SWEEP_DISABLE_DEFAULT_VALUE); + return Boolean.getBoolean(disable); + } + + @Override + public boolean disableAsyncWorkers() { + String disable = getProperty(DISABLE_ASYNC_WORKERS_PROPERTY_NAME, DISABLE_ASYNC_WORKERS_DEFAULT_VALUE); + return Boolean.getBoolean(disable); + } + + @Override + public String getServerId() { + try { + return InetAddress.getLocalHost().getHostName(); + } catch (UnknownHostException e) { + return "unknown"; + } + } + + @Override + public String getEnvironment() { + return getProperty(ENVIRONMENT_PROPERTY_NAME, ENVIRONMENT_DEFAULT_VALUE); + } + + @Override + public String getStack() { + return getProperty(STACK_PROPERTY_NAME, STACK_DEFAULT_VALUE); + } + + @Override + public String getAppId() { + return getProperty(APP_ID_PROPERTY_NAME, APP_ID_DEFAULT_VALUE); + } + + @Override + public String getRegion() { + return getProperty(REGION_PROPERTY_NAME, REGION_DEFAULT_VALUE); + } + + @Override + public String getAvailabilityZone() { + return getProperty(AVAILABILITY_ZONE_PROPERTY_NAME, AVAILABILITY_ZONE_DEFAULT_VALUE); + } + + @Override + public int getIntProperty(String key, int defaultValue) { + String val = getProperty(key, Integer.toString(defaultValue)); + try { + defaultValue = Integer.parseInt(val); + } catch (NumberFormatException e) { + } + return defaultValue; + } + + @Override + public String getProperty(String key, String defaultValue) { + + String val = null; + try { + val = System.getenv(key.replace('.', '_')); + if (val == null || val.isEmpty()) { + val = Optional.ofNullable(System.getProperty(key)).orElse(defaultValue); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + return val; + } + + @Override + public boolean getBooleanProperty(String name, boolean defaultValue) { + String val = getProperty(name, null); + + if (val != null) { + return Boolean.parseBoolean(val); + } else { + return defaultValue; + } + } + + @Override + public Map getAll() { + Map map = new HashMap<>(); + Properties props = System.getProperties(); + props.entrySet().forEach(entry -> map.put(entry.getKey().toString(), entry.getValue())); + return map; + } + + @Override + public List getAdditionalModules() { + + String additionalModuleClasses = getProperty(ADDITIONAL_MODULES_PROPERTY_NAME, null); + + List modules = new LinkedList<>(); + + if (!StringUtils.isEmpty(additionalModuleClasses)) { + try { + String[] classes = additionalModuleClasses.split(","); + for (String clazz : classes) { + Object moduleObj = Class.forName(clazz).newInstance(); + if (moduleObj instanceof AbstractModule) { + AbstractModule abstractModule = (AbstractModule) moduleObj; + modules.add(abstractModule); + } else { + logger.error(clazz + " does not implement " + AbstractModule.class.getName() + ", skipping..."); + } + } + } catch (Exception e) { + logger.warn(e.getMessage(), e); + } + } + + return modules; + } } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestConfiguration.java b/core/src/test/java/com/netflix/conductor/core/execution/TestConfiguration.java index d589f20e65..0067e97330 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestConfiguration.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestConfiguration.java @@ -19,10 +19,10 @@ */ -import java.util.Map; - import com.netflix.conductor.core.config.Configuration; +import java.util.Map; + /** * @author Viren * @@ -68,8 +68,13 @@ public String getAppId() { public String getProperty(String string, String def) { return "dummy"; } - - @Override + + @Override + public boolean getBooleanProperty(String name, boolean defaultValue) { + return false; + } + + @Override public String getAvailabilityZone() { return "us-east-1a"; } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java b/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java index b78e5f8ce8..0c48f58cfd 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java @@ -15,12 +15,13 @@ */ package com.netflix.conductor.config; -import java.util.Map; - import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; + import com.netflix.conductor.core.config.Configuration; +import java.util.Map; + /** * @author Viren * @@ -70,7 +71,12 @@ public String getProperty(String string, String def) { return val != null ? val : def; } - public void setProperty(String key, String value) { + @Override + public boolean getBooleanProperty(String name, boolean defaultValue) { + return false; + } + + public void setProperty(String key, String value) { testProperties.put(key, value); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java index 2378ae1d15..eb72434eb2 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/RedisESWorkflowModule.java @@ -11,59 +11,33 @@ * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao; -/** - * - */ import com.google.inject.AbstractModule; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; import com.netflix.conductor.dao.index.ElasticSearchDAO; import com.netflix.conductor.dyno.DynoProxy; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.queues.redis.DynoShardSupplier; - -import javax.inject.Inject; - -import redis.clients.jedis.JedisCommands; +import com.netflix.conductor.dyno.RedisQueuesProvider; +import com.netflix.dyno.queues.redis.RedisQueues; /** * @author Viren */ public class RedisESWorkflowModule extends AbstractModule { - private final Configuration config; - private final JedisCommands dynomiteConnection; - private final HostSupplier hostSupplier; - - @Inject - public RedisESWorkflowModule(Configuration config, JedisCommands dynomiteConnection, HostSupplier hostSupplier) { - this.config = config; - this.dynomiteConnection = dynomiteConnection; - this.hostSupplier = hostSupplier; - } @Override protected void configure() { + bind(MetadataDAO.class).to(RedisMetadataDAO.class); bind(ExecutionDAO.class).to(RedisExecutionDAO.class); bind(QueueDAO.class).to(DynoQueueDAO.class); bind(IndexDAO.class).to(ElasticSearchDAO.class); - bind(DynoQueueDAO.class).toInstance(createQueueDAO()); - bind(DynoProxy.class).toInstance(new DynoProxy(dynomiteConnection)); - - } - - private DynoQueueDAO createQueueDAO() { - - String localDC = config.getAvailabilityZone(); - localDC = localDC.replaceAll(config.getRegion(), ""); - DynoShardSupplier ss = new DynoShardSupplier(hostSupplier, config.getRegion(), localDC); + bind(RedisQueues.class).toProvider(RedisQueuesProvider.class).asEagerSingleton(); + bind(DynoProxy.class).asEagerSingleton(); - return new DynoQueueDAO(dynomiteConnection, dynomiteConnection, ss, config); } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java index 0cc1eb3509..cb1ee72a37 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java @@ -1,38 +1,21 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dao.dynomite.queue; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import javax.inject.Inject; -import javax.inject.Singleton; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.QueueDAO; import com.netflix.discovery.DiscoveryClient; import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.Host.Status; import com.netflix.dyno.contrib.EurekaHostsSupplier; import com.netflix.dyno.jedis.DynoJedisClient; import com.netflix.dyno.queues.DynoQueue; @@ -42,172 +25,193 @@ import com.netflix.dyno.queues.redis.RedisDynoQueue; import com.netflix.dyno.queues.redis.RedisQueues; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import javax.inject.Inject; +import javax.inject.Singleton; + import redis.clients.jedis.JedisCommands; @Singleton public class DynoQueueDAO implements QueueDAO { - private static Logger logger = LoggerFactory.getLogger(DynoQueueDAO.class); - - private RedisQueues queues; - - private JedisCommands dynoClient; - - private JedisCommands dynoClientRead; - - private ShardSupplier ss; - - private String domain; - - private Configuration config; - - @Inject - public DynoQueueDAO(DiscoveryClient dc, Configuration config) { - - logger.info("DynoQueueDAO::INIT"); - - this.config = config; - this.domain = config.getProperty("workflow.dyno.keyspace.domain", null); - String cluster = config.getProperty("workflow.dynomite.cluster", null); - final int readConnPort = config.getIntProperty("queues.dynomite.nonQuorum.port", 22122); - - EurekaHostsSupplier hostSupplier = new EurekaHostsSupplier(cluster, dc) { - @Override - public List getHosts() { - List hosts = super.getHosts(); - List updatedHosts = new ArrayList<>(hosts.size()); - hosts.forEach(host -> { - updatedHosts.add(new Host(host.getHostName(), host.getIpAddress(), readConnPort, host.getRack(), host.getDatacenter(), host.isUp() ? Status.Up : Status.Down)); - }); - return updatedHosts; - } - }; - - this.dynoClientRead = new DynoJedisClient.Builder().withApplicationName(config.getAppId()).withDynomiteClusterName(cluster).withHostSupplier(hostSupplier).build(); - DynoJedisClient dyno = new DynoJedisClient.Builder().withApplicationName(config.getAppId()).withDynomiteClusterName(cluster).withDiscoveryClient(dc).build(); - - this.dynoClient = dyno; - - String region = config.getRegion(); - String localDC = config.getAvailabilityZone(); - - if(localDC == null) { - throw new Error("Availability zone is not defined. Ensure Configuration.getAvailabilityZone() returns a non-null and non-empty value."); - } - - localDC = localDC.replaceAll(region, ""); - this.ss = new DynoShardSupplier(dyno.getConnPool().getConfiguration().getHostSupplier(), region, localDC); - init(); - } - - public DynoQueueDAO(JedisCommands dynoClient, JedisCommands dynoClientRead, ShardSupplier ss, Configuration config) { - this.dynoClient = dynoClient; - this.dynoClientRead = dynoClient; - this.ss = ss; - this.config = config; - init(); - } - - public void init() { - - String rootNamespace = config.getProperty("workflow.namespace.queue.prefix", null); - String stack = config.getStack(); - String prefix = rootNamespace + "." + stack; - if (domain != null) { - prefix = prefix + "." + domain; - } - queues = new RedisQueues(dynoClient, dynoClientRead, prefix, ss, 60_000, 60_000); - logger.info("DynoQueueDAO initialized with prefix " + prefix + "!"); - } - - @Override - public void push(String queueName, String id, long offsetTimeInSecond) { - Message msg = new Message(id, null); - msg.setTimeout(offsetTimeInSecond, TimeUnit.SECONDS); - queues.get(queueName).push(Arrays.asList(msg)); - } - - @Override - public void push(String queueName, List messages) { - List msgs = messages.stream().map(msg -> new Message(msg.getId(), msg.getPayload())).collect(Collectors.toList()); - queues.get(queueName).push(msgs); - } - - @Override - public boolean pushIfNotExists(String queueName, String id, long offsetTimeInSecond) { - DynoQueue queue = queues.get(queueName); - if (queue.get(id) != null) { - return false; - } - Message msg = new Message(id, null); - msg.setTimeout(offsetTimeInSecond, TimeUnit.SECONDS); - queue.push(Arrays.asList(msg)); - return true; - } - - @Override - public List pop(String queueName, int count, int timeout) { - List msg = queues.get(queueName).pop(count, timeout, TimeUnit.MILLISECONDS); - return msg.stream().map(m -> m.getId()).collect(Collectors.toList()); - } - - @Override - public List pollMessages(String queueName, int count, int timeout) { - List msgs = queues.get(queueName).pop(count, timeout, TimeUnit.MILLISECONDS); - return msgs.stream().map(msg -> new com.netflix.conductor.core.events.queue.Message(msg.getId(), msg.getPayload(), null)).collect(Collectors.toList()); - } - - @Override - public void remove(String queueName, String messageId) { - queues.get(queueName).remove(messageId); - } - - @Override - public int getSize(String queueName) { - return (int) queues.get(queueName).size(); - } - - @Override - public boolean ack(String queueName, String messageId) { - return queues.get(queueName).ack(messageId); - - } - - @Override - public boolean setUnackTimeout(String queueName, String messageId, long timeout) { - return queues.get(queueName).setUnackTimeout(messageId, timeout); - } - - @Override - public void flush(String queueName) { - DynoQueue queue = queues.get(queueName); - if (queue != null) { - queue.clear(); - } - } - - @Override - public Map queuesDetail() { - Map map = queues.queues().stream().collect(Collectors.toMap(queue -> queue.getName(), q -> q.size())); - return map; - } - - @Override - public Map>> queuesDetailVerbose() { - Map>> map = queues.queues().stream() - .collect(Collectors.toMap(queue -> queue.getName(), q -> q.shardSizes())); - return map; - } - - public void processUnacks(String queueName) { - ((RedisDynoQueue)queues.get(queueName)).processUnacks();; - } - - @Override - public boolean setOffsetTime(String queueName, String id, long offsetTimeInSecond) { - DynoQueue queue = queues.get(queueName); - return queue.setTimeout(id, offsetTimeInSecond); - - } - -} \ No newline at end of file + private static Logger logger = LoggerFactory.getLogger(DynoQueueDAO.class); + + private RedisQueues queues; + + private JedisCommands dynoClient; + + private JedisCommands dynoClientRead; + + private ShardSupplier ss; + + private String domain; + + private Configuration config; + + @Inject + public DynoQueueDAO(RedisQueues queues) { + this.queues = queues; + } + + @Deprecated + public DynoQueueDAO(DiscoveryClient dc, Configuration config) { + + logger.info("DynoQueueDAO::INIT"); + + this.config = config; + this.domain = config.getProperty("workflow.dyno.keyspace.domain", null); + String cluster = config.getProperty("workflow.dynomite.cluster", null); + final int readConnPort = config.getIntProperty("queues.dynomite.nonQuorum.port", 22122); + + EurekaHostsSupplier hostSupplier = new EurekaHostsSupplier(cluster, dc) { + @Override + public List getHosts() { + List hosts = super.getHosts(); + List updatedHosts = new ArrayList<>(hosts.size()); + hosts.forEach(host -> { + updatedHosts.add(new Host(host.getHostName(), host.getIpAddress(), readConnPort, host.getRack(), host.getDatacenter(), host.isUp() ? Host.Status.Up : Host.Status.Down)); + }); + return updatedHosts; + } + }; + + this.dynoClientRead = new DynoJedisClient.Builder().withApplicationName(config.getAppId()).withDynomiteClusterName(cluster).withHostSupplier(hostSupplier).build(); + DynoJedisClient dyno = new DynoJedisClient.Builder().withApplicationName(config.getAppId()).withDynomiteClusterName(cluster).withDiscoveryClient(dc).build(); + + this.dynoClient = dyno; + + String region = config.getRegion(); + String localDC = config.getAvailabilityZone(); + + if (localDC == null) { + throw new Error("Availability zone is not defined. Ensure Configuration.getAvailabilityZone() returns a non-null and non-empty value."); + } + + localDC = localDC.replaceAll(region, ""); + this.ss = new DynoShardSupplier(dyno.getConnPool().getConfiguration().getHostSupplier(), region, localDC); + init(); + } + + @Deprecated + public DynoQueueDAO(JedisCommands dynoClient, JedisCommands dynoClientRead, ShardSupplier ss, Configuration config) { + this.dynoClient = dynoClient; + this.dynoClientRead = dynoClient; + this.ss = ss; + this.config = config; + init(); + } + + @Deprecated + private void init() { + + String rootNamespace = config.getProperty("workflow.namespace.queue.prefix", null); + String stack = config.getStack(); + String prefix = rootNamespace + "." + stack; + if (domain != null) { + prefix = prefix + "." + domain; + } + queues = new RedisQueues(dynoClient, dynoClientRead, prefix, ss, 60_000, 60_000); + logger.info("DynoQueueDAO initialized with prefix " + prefix + "!"); + } + + @Override + public void push(String queueName, String id, long offsetTimeInSecond) { + Message msg = new Message(id, null); + msg.setTimeout(offsetTimeInSecond, TimeUnit.SECONDS); + queues.get(queueName).push(Arrays.asList(msg)); + } + + @Override + public void push(String queueName, List messages) { + List msgs = messages.stream().map(msg -> new Message(msg.getId(), msg.getPayload())).collect(Collectors.toList()); + queues.get(queueName).push(msgs); + } + + @Override + public boolean pushIfNotExists(String queueName, String id, long offsetTimeInSecond) { + DynoQueue queue = queues.get(queueName); + if (queue.get(id) != null) { + return false; + } + Message msg = new Message(id, null); + msg.setTimeout(offsetTimeInSecond, TimeUnit.SECONDS); + queue.push(Arrays.asList(msg)); + return true; + } + + @Override + public List pop(String queueName, int count, int timeout) { + List msg = queues.get(queueName).pop(count, timeout, TimeUnit.MILLISECONDS); + return msg.stream().map(m -> m.getId()).collect(Collectors.toList()); + } + + @Override + public List pollMessages(String queueName, int count, int timeout) { + List msgs = queues.get(queueName).pop(count, timeout, TimeUnit.MILLISECONDS); + return msgs.stream().map(msg -> new com.netflix.conductor.core.events.queue.Message(msg.getId(), msg.getPayload(), null)).collect(Collectors.toList()); + } + + @Override + public void remove(String queueName, String messageId) { + queues.get(queueName).remove(messageId); + } + + @Override + public int getSize(String queueName) { + return (int) queues.get(queueName).size(); + } + + @Override + public boolean ack(String queueName, String messageId) { + return queues.get(queueName).ack(messageId); + + } + + @Override + public boolean setUnackTimeout(String queueName, String messageId, long timeout) { + return queues.get(queueName).setUnackTimeout(messageId, timeout); + } + + @Override + public void flush(String queueName) { + DynoQueue queue = queues.get(queueName); + if (queue != null) { + queue.clear(); + } + } + + @Override + public Map queuesDetail() { + Map map = queues.queues().stream().collect(Collectors.toMap(queue -> queue.getName(), q -> q.size())); + return map; + } + + @Override + public Map>> queuesDetailVerbose() { + Map>> map = queues.queues().stream() + .collect(Collectors.toMap(queue -> queue.getName(), q -> q.shardSizes())); + return map; + } + + public void processUnacks(String queueName) { + ((RedisDynoQueue) queues.get(queueName)).processUnacks(); + ; + } + + @Override + public boolean setOffsetTime(String queueName, String id, long offsetTimeInSecond) { + DynoQueue queue = queues.get(queueName); + return queue.setTimeout(id, offsetTimeInSecond); + + } + +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java deleted file mode 100644 index 04f7064440..0000000000 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAOProvider.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.netflix.conductor.dao; - - -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.queues.redis.DynoShardSupplier; - -import javax.inject.Inject; -import javax.inject.Provider; - -import redis.clients.jedis.JedisCommands; - -public class DynoQueueDAOProvider implements Provider { - private final Configuration config; - private final JedisCommands dynomiteConnection; - private final HostSupplier hostSupplier; - private final QueueDAO dao; - - @Inject - public DynoQueueDAOProvider(Configuration config, JedisCommands dynomiteConnection, HostSupplier hostSupplier) { - this.config = config; - this.dynomiteConnection = dynomiteConnection; - this.hostSupplier = hostSupplier; - - // FIXME: This a hacky way to force a single instance. It would be better for Guice to enforce this by using - // @Inject constructor on DynoQueueDAO and a binding rather than a Provider. - - String localDC = config.getAvailabilityZone(); - localDC = localDC.replaceAll(config.getRegion(), ""); - DynoShardSupplier ss = new DynoShardSupplier(hostSupplier, config.getRegion(), localDC); - - this.dao = new DynoQueueDAO(dynomiteConnection, dynomiteConnection, ss, config); - } - - @Override - public QueueDAO get() { - return this.dao; - } -} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/index/ElasticsearchModule.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/index/ElasticsearchModule.java index 681b87e2a8..6cb40dbcb0 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/index/ElasticsearchModule.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/index/ElasticsearchModule.java @@ -1,26 +1,25 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.dao.index; -import java.net.InetAddress; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; -import javax.inject.Singleton; +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dao.IndexDAO; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -29,9 +28,9 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.netflix.conductor.core.config.Configuration; +import java.net.InetAddress; + +import javax.inject.Singleton; /** @@ -40,21 +39,21 @@ */ public class ElasticsearchModule extends AbstractModule { - private static Logger log = LoggerFactory.getLogger(ElasticSearchDAO.class); - - @Provides - @Singleton - public Client getClient(Configuration config) throws Exception { + private static Logger log = LoggerFactory.getLogger(ElasticsearchModule.class); - String clusterAddress = config.getProperty("workflow.elasticsearch.url", ""); - if(clusterAddress.equals("")) { - log.warn("workflow.elasticsearch.url is not set. Indexing will remain DISABLED."); - } - - Settings.Builder settings = Settings.settingsBuilder(); + @Provides + @Singleton + public Client getClient(Configuration config) throws Exception { + + String clusterAddress = config.getProperty("workflow.elasticsearch.url", ""); + if (clusterAddress.equals("")) { + log.warn("workflow.elasticsearch.url is not set. Indexing will remain DISABLED."); + } + + Settings.Builder settings = Settings.settingsBuilder(); settings.put("client.transport.ignore_cluster_name", true); settings.put("client.transport.sniff", true); - + TransportClient tc = TransportClient.builder().settings(settings).build(); String[] hosts = clusterAddress.split(","); for (String host : hosts) { @@ -65,11 +64,11 @@ public Client getClient(Configuration config) throws Exception { tc.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(hostname), hostport)); } return tc; - + } - @Override - protected void configure() { - - } + @Override + protected void configure() { + bind(IndexDAO.class).to(ElasticSearchDAO.class); + } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java index 9a45b98fdd..f7063d2c61 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxy.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.dyno; @@ -43,218 +40,222 @@ import redis.clients.jedis.params.sortedset.ZAddParams; /** - * + * * @author Viren Proxy for the Dynomite client */ @Singleton public class DynoProxy { - private static Logger logger = LoggerFactory.getLogger(DynoProxy.class); - - protected DiscoveryClient dc; - - protected JedisCommands dynoClient; - - @Inject - public DynoProxy(DiscoveryClient dc, Configuration config) throws DynoException, InterruptedException, ExecutionException { - this.dc = dc; - String cluster = config.getProperty("workflow.dynomite.cluster", null); - String applicationName = config.getAppId(); - this.dynoClient = new DynoJedisClient.Builder() - .withApplicationName(applicationName) - .withDynomiteClusterName(cluster) - .withDiscoveryClient(dc) - .build(); - } - - public DynoProxy(JedisCommands dynoClient) { - this.dynoClient = dynoClient; - } - - public Set zrange(String key, long start, long end) { - return dynoClient.zrange(key, start, end); - } - - public Set zrangeByScoreWithScores(String key, double maxScore, int count) { - return dynoClient.zrangeByScoreWithScores(key, 0, maxScore, 0, count); - } - - public Set zrangeByScore(String key, double maxScore, int count) { - return dynoClient.zrangeByScore(key, 0, maxScore, 0, count); - } - - public Set zrangeByScore(String key, double minScore, double maxScore, int count) { - return dynoClient.zrangeByScore(key, minScore, maxScore, 0, count); - } - - public ScanResult zscan(String key, int cursor) { - return dynoClient.zscan(key, "" + cursor); - } - - public String get(String key) { - return dynoClient.get(key); - } - - public Long zcard(String key) { - return dynoClient.zcard(key); - } - - public Long del(String key) { - return dynoClient.del(key); - } - - public Long zrem(String key, String member) { - return dynoClient.zrem(key, member); - } - - public String set(String key, String value) { - String retVal = dynoClient.set(key, value); - return retVal; - } - - public Long setnx(String key, String value) { - Long added = dynoClient.setnx(key, value); - return added; - } - - public Long zadd(String key, double score, String member) { - Long retVal = dynoClient.zadd(key, score, member); - return retVal; - } - - public Long zaddnx(String key, double score, String member) { - ZAddParams params = ZAddParams.zAddParams().nx(); - Long retVal = dynoClient.zadd(key, score, member, params); - return retVal; - } - - public Long hset(String key, String field, String value) { - Long retVal = dynoClient.hset(key, field, value); - return retVal; - } - - public Long hsetnx(String key, String field, String value) { - Long retVal = dynoClient.hsetnx(key, field, value); - return retVal; - } - - public Long hlen(String key) { - Long retVal = dynoClient.hlen(key); - return retVal; - } - - public String hget(String key, String field) { - return dynoClient.hget(key, field); - } - - public Optional optionalHget(String key, String field) { - return Optional.ofNullable(dynoClient.hget(key, field)); - } - - public Map hscan(String key, int count) { - Map m = new HashMap<>(); - int cursor = 0; - do { - ScanResult> sr = dynoClient.hscan(key, "" + cursor); - cursor = Integer.parseInt(sr.getStringCursor()); - for (Entry r : sr.getResult()) { - m.put(r.getKey(), r.getValue()); - } - if(m.size() > count) { - break; - } - } while (cursor > 0); - - return m; - } - - public Map hgetAll(String key) { - Map m = new HashMap<>(); - JedisCommands dyno = dynoClient; - int cursor = 0; - do { - ScanResult> sr = dyno.hscan(key, "" + cursor); - cursor = Integer.parseInt(sr.getStringCursor()); - for (Entry r : sr.getResult()) { - m.put(r.getKey(), r.getValue()); - } - } while (cursor > 0); - - return m; - } - - public List hvals(String key) { - logger.trace("hvals {}", key); - return dynoClient.hvals(key); - } - - public Set hkeys(String key) { - logger.trace("hkeys {}", key); - JedisCommands client = dynoClient; - Set keys = new HashSet<>(); - int cursor = 0; - do { - ScanResult> sr = client.hscan(key, "" + cursor); - cursor = Integer.parseInt(sr.getStringCursor()); - List> result = sr.getResult(); - for (Entry e : result) { - keys.add(e.getKey()); - } - } while (cursor > 0); - - return keys; - } - - public Long hdel(String key, String... fields) { - logger.trace("hdel {} {}", key, fields[0]); - return dynoClient.hdel(key, fields); - } - - public Long expire(String key, int seconds) { - return dynoClient.expire(key, seconds); - } - - public Boolean hexists(String key, String field) { - return dynoClient.hexists(key, field); - } - - public Long sadd(String key, String value) { - logger.trace("sadd {} {}", key, value); - Long retVal = dynoClient.sadd(key, value); - return retVal; - } - - public Long srem(String key, String member) { - logger.trace("srem {} {}", key, member); - Long retVal = dynoClient.srem(key, member); - return retVal; - } - - public boolean sismember(String key, String member) { - return dynoClient.sismember(key, member); - } - - public Set smembers(String key) { - logger.trace("smembers {}", key); - JedisCommands client = dynoClient; - Set r = new HashSet<>(); - int cursor = 0; - ScanParams sp = new ScanParams(); - sp.count(50); - - do { - ScanResult sr = client.sscan(key, "" + cursor, sp); - cursor = Integer.parseInt(sr.getStringCursor()); - r.addAll(sr.getResult()); - - } while (cursor > 0); - - return r; - - } - - public Long scard(String key) { - return dynoClient.scard(key); - } + private static Logger logger = LoggerFactory.getLogger(DynoProxy.class); + + protected DiscoveryClient dc; + + protected JedisCommands dynoClient; + + @Inject + public DynoProxy(JedisCommands dynoClient) { + this.dynoClient = dynoClient; + } + + @Deprecated + /** + * @deprecated The preferred method of construction for this use case is via DynoProxyDiscoveryProvider. + */ + public DynoProxy(DiscoveryClient dc, Configuration config) throws DynoException, InterruptedException, ExecutionException { + this.dc = dc; + String cluster = config.getProperty("workflow.dynomite.cluster", null); + String applicationName = config.getAppId(); + this.dynoClient = new DynoJedisClient.Builder() + .withApplicationName(applicationName) + .withDynomiteClusterName(cluster) + .withDiscoveryClient(dc) + .build(); + } + + public Set zrange(String key, long start, long end) { + return dynoClient.zrange(key, start, end); + } + + public Set zrangeByScoreWithScores(String key, double maxScore, int count) { + return dynoClient.zrangeByScoreWithScores(key, 0, maxScore, 0, count); + } + + public Set zrangeByScore(String key, double maxScore, int count) { + return dynoClient.zrangeByScore(key, 0, maxScore, 0, count); + } + + public Set zrangeByScore(String key, double minScore, double maxScore, int count) { + return dynoClient.zrangeByScore(key, minScore, maxScore, 0, count); + } + + public ScanResult zscan(String key, int cursor) { + return dynoClient.zscan(key, "" + cursor); + } + + public String get(String key) { + return dynoClient.get(key); + } + + public Long zcard(String key) { + return dynoClient.zcard(key); + } + + public Long del(String key) { + return dynoClient.del(key); + } + + public Long zrem(String key, String member) { + return dynoClient.zrem(key, member); + } + + public String set(String key, String value) { + String retVal = dynoClient.set(key, value); + return retVal; + } + + public Long setnx(String key, String value) { + Long added = dynoClient.setnx(key, value); + return added; + } + + public Long zadd(String key, double score, String member) { + Long retVal = dynoClient.zadd(key, score, member); + return retVal; + } + + public Long zaddnx(String key, double score, String member) { + ZAddParams params = ZAddParams.zAddParams().nx(); + Long retVal = dynoClient.zadd(key, score, member, params); + return retVal; + } + + public Long hset(String key, String field, String value) { + Long retVal = dynoClient.hset(key, field, value); + return retVal; + } + + public Long hsetnx(String key, String field, String value) { + Long retVal = dynoClient.hsetnx(key, field, value); + return retVal; + } + + public Long hlen(String key) { + Long retVal = dynoClient.hlen(key); + return retVal; + } + + public String hget(String key, String field) { + return dynoClient.hget(key, field); + } + + public Optional optionalHget(String key, String field) { + return Optional.ofNullable(dynoClient.hget(key, field)); + } + + public Map hscan(String key, int count) { + Map m = new HashMap<>(); + int cursor = 0; + do { + ScanResult> sr = dynoClient.hscan(key, "" + cursor); + cursor = Integer.parseInt(sr.getStringCursor()); + for (Entry r : sr.getResult()) { + m.put(r.getKey(), r.getValue()); + } + if (m.size() > count) { + break; + } + } while (cursor > 0); + + return m; + } + + public Map hgetAll(String key) { + Map m = new HashMap<>(); + JedisCommands dyno = dynoClient; + int cursor = 0; + do { + ScanResult> sr = dyno.hscan(key, "" + cursor); + cursor = Integer.parseInt(sr.getStringCursor()); + for (Entry r : sr.getResult()) { + m.put(r.getKey(), r.getValue()); + } + } while (cursor > 0); + + return m; + } + + public List hvals(String key) { + logger.trace("hvals {}", key); + return dynoClient.hvals(key); + } + + public Set hkeys(String key) { + logger.trace("hkeys {}", key); + JedisCommands client = dynoClient; + Set keys = new HashSet<>(); + int cursor = 0; + do { + ScanResult> sr = client.hscan(key, "" + cursor); + cursor = Integer.parseInt(sr.getStringCursor()); + List> result = sr.getResult(); + for (Entry e : result) { + keys.add(e.getKey()); + } + } while (cursor > 0); + + return keys; + } + + public Long hdel(String key, String... fields) { + logger.trace("hdel {} {}", key, fields[0]); + return dynoClient.hdel(key, fields); + } + + public Long expire(String key, int seconds) { + return dynoClient.expire(key, seconds); + } + + public Boolean hexists(String key, String field) { + return dynoClient.hexists(key, field); + } + + public Long sadd(String key, String value) { + logger.trace("sadd {} {}", key, value); + Long retVal = dynoClient.sadd(key, value); + return retVal; + } + + public Long srem(String key, String member) { + logger.trace("srem {} {}", key, member); + Long retVal = dynoClient.srem(key, member); + return retVal; + } + + public boolean sismember(String key, String member) { + return dynoClient.sismember(key, member); + } + + public Set smembers(String key) { + logger.trace("smembers {}", key); + JedisCommands client = dynoClient; + Set r = new HashSet<>(); + int cursor = 0; + ScanParams sp = new ScanParams(); + sp.count(50); + + do { + ScanResult sr = client.sscan(key, "" + cursor, sp); + cursor = Integer.parseInt(sr.getStringCursor()); + r.addAll(sr.getResult()); + + } while (cursor > 0); + + return r; + + } + + public Long scard(String key) { + return dynoClient.scard(key); + } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java index 2dbc08c168..753321d5f6 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java @@ -1,6 +1,5 @@ package com.netflix.conductor.dyno; -import com.netflix.conductor.core.config.Configuration; import com.netflix.discovery.DiscoveryClient; import com.netflix.dyno.jedis.DynoJedisClient; @@ -11,22 +10,20 @@ public class DynoProxyDiscoveryProvider implements Provider { private final DiscoveryClient discoveryClient; - private final Configuration configuration; + private final DynomiteConfiguration configuration; @Inject - public DynoProxyDiscoveryProvider(DiscoveryClient discoveryClient, Configuration configuration) { + public DynoProxyDiscoveryProvider(DiscoveryClient discoveryClient, DynomiteConfiguration configuration) { this.discoveryClient = discoveryClient; this.configuration = configuration; } @Override public JedisCommands get() { - String cluster = configuration.getProperty("workflow.dynomite.cluster", null); - String applicationName = configuration.getAppId(); return new DynoJedisClient .Builder() - .withApplicationName(applicationName) - .withDynomiteClusterName(cluster) + .withApplicationName(configuration.getAppId()) + .withDynomiteClusterName(configuration.getCluster()) .withDiscoveryClient(discoveryClient) .build(); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java new file mode 100644 index 0000000000..9e309859b6 --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java @@ -0,0 +1,36 @@ +package com.netflix.conductor.dyno; + +import com.google.inject.ProvisionException; + +import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.dyno.queues.ShardSupplier; +import com.netflix.dyno.queues.redis.DynoShardSupplier; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class DynoShardSupplierProvider implements Provider { + + private final HostSupplier hostSupplier; + private final DynomiteConfiguration configuration; + + @Inject + public DynoShardSupplierProvider(HostSupplier hostSupplier, DynomiteConfiguration dynomiteConfiguration) { + this.hostSupplier = hostSupplier; + this.configuration = dynomiteConfiguration; + } + + @Override + public ShardSupplier get() { + if(configuration.getAvailabilityZone() == null) + throw new ProvisionException( + "Availability zone is not defined. Ensure Configuration.getAvailabilityZone() returns a non-null " + + "and non-empty value." + ); + + + String localDC = configuration.getAvailabilityZone().replaceAll(configuration.getRegion(), ""); + + return new DynoShardSupplier(hostSupplier, configuration.getRegion(), localDC); + } +} diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java index 9be2a551f3..4b037a19f4 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java @@ -7,6 +7,7 @@ import org.slf4j.LoggerFactory; import javax.inject.Inject; +import javax.inject.Named; import javax.inject.Provider; import redis.clients.jedis.JedisCommands; @@ -23,13 +24,12 @@ public class RedisQueuesProvider implements Provider { @Inject public RedisQueuesProvider( JedisCommands dynoClient, - JedisCommands dynoClientRead, + @Named("DynoReadClient") JedisCommands dynoClientRead, ShardSupplier ss, DynomiteConfiguration config ) { this.dynoClient = dynoClient; - // FIXME: This was in the original code, but seems like a bug? - this.dynoClientRead = dynoClient; + this.dynoClientRead = dynoClientRead; this.shardSupplier = ss; this.configuration = config; } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java index 4af6ff42db..f8987db791 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/InMemoryJedisProvider.java @@ -1,12 +1,16 @@ package com.netflix.conductor.jedis; import javax.inject.Provider; +import javax.inject.Singleton; import redis.clients.jedis.JedisCommands; +@Singleton public class InMemoryJedisProvider implements Provider { + private final JedisCommands mock = new JedisMock(); + @Override public JedisCommands get() { - return new JedisMock(); + return mock; } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisClusterJedisProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisClusterJedisProvider.java new file mode 100644 index 0000000000..ef663469ac --- /dev/null +++ b/redis-persistence/src/main/java/com/netflix/conductor/jedis/RedisClusterJedisProvider.java @@ -0,0 +1,35 @@ +package com.netflix.conductor.jedis; + +import com.netflix.dyno.connectionpool.Host; +import com.netflix.dyno.connectionpool.HostSupplier; + +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; + +import java.util.ArrayList; + +import javax.inject.Inject; +import javax.inject.Provider; + +import redis.clients.jedis.HostAndPort; +import redis.clients.jedis.JedisCluster; +import redis.clients.jedis.JedisCommands; + +public class RedisClusterJedisProvider implements Provider { + + private final HostSupplier hostSupplier; + + @Inject + public RedisClusterJedisProvider(HostSupplier hostSupplier){ + this.hostSupplier = hostSupplier; + } + + @Override + public JedisCommands get() { + // FIXME This doesn't seem very safe, but is how it was in the code this was moved from. + Host host = new ArrayList(hostSupplier.getHosts()).get(0); + GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); + poolConfig.setMinIdle(5); + poolConfig.setMaxTotal(1000); + return new JedisCluster(new HostAndPort(host.getHostName(), host.getPort()), poolConfig); + } +} diff --git a/redis-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java b/redis-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java index 36f3e5953d..08d73987eb 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java @@ -19,10 +19,10 @@ */ -import java.util.Map; - import com.netflix.conductor.core.config.Configuration; +import java.util.Map; + /** * @author Viren * @@ -68,8 +68,13 @@ public String getAppId() { public String getProperty(String string, String def) { return "dummy"; } - - @Override + + @Override + public boolean getBooleanProperty(String name, boolean defaultValue) { + return false; + } + + @Override public String getAvailabilityZone() { return "us-east-1a"; } diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java index a0a7904bcb..008f8f6bec 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java @@ -15,20 +15,6 @@ */ package com.netflix.conductor.dao.dynomite; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; @@ -38,8 +24,22 @@ import com.netflix.conductor.dao.redis.JedisMock; import com.netflix.dyno.queues.ShardSupplier; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + import redis.clients.jedis.JedisCommands; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + /** * * @author Viren diff --git a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java index 941e879da8..c9bb174f7a 100644 --- a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java +++ b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java @@ -1,271 +1,200 @@ /** * Copyright 2017 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.server; +import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.servlet.GuiceFilter; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; +import com.netflix.conductor.dao.RedisESWorkflowModule; import com.netflix.conductor.dao.es.EmbeddedElasticSearch; -import com.netflix.conductor.jedis.JedisMock; -import com.netflix.dyno.connectionpool.Host; -import com.netflix.dyno.connectionpool.Host.Status; -import com.netflix.dyno.connectionpool.HostSupplier; -import com.netflix.dyno.connectionpool.TokenMapSupplier; -import com.netflix.dyno.connectionpool.impl.ConnectionPoolConfigurationImpl; -import com.netflix.dyno.connectionpool.impl.lb.HostToken; -import com.netflix.dyno.jedis.DynoJedisClient; +import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; import com.sun.jersey.api.client.Client; -import org.apache.commons.pool2.impl.GenericObjectPoolConfig; import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.DefaultServlet; import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.InputStream; +import java.util.ArrayList; import java.util.Arrays; import java.util.EnumSet; import java.util.LinkedList; import java.util.List; -import java.util.Set; import javax.servlet.DispatcherType; import javax.ws.rs.core.MediaType; -import redis.clients.jedis.HostAndPort; -import redis.clients.jedis.JedisCluster; -import redis.clients.jedis.JedisCommands; - /** * @author Viren - * */ public class ConductorServer { - private static Logger logger = LoggerFactory.getLogger(ConductorServer.class); - - enum DB { - redis, dynomite, memory, redis_cluster, mysql - } - - private ServerModule serverModule; - - private Server server; - - private SystemPropertiesConfiguration systemPropertiesConfiguration; - - private DB database; - - public ConductorServer(SystemPropertiesConfiguration systemPropertiesConfiguration) { - this.systemPropertiesConfiguration = systemPropertiesConfiguration; - String dynoClusterName = systemPropertiesConfiguration.getProperty("workflow.dynomite.cluster.name", ""); - - List dynoHosts = new LinkedList<>(); - String dbstring = systemPropertiesConfiguration.getProperty("db", "memory"); - try { - database = DB.valueOf(dbstring); - }catch(IllegalArgumentException ie) { - logger.error("Invalid db name: " + dbstring + ", supported values are: " + Arrays.toString(DB.values())); - System.exit(1); - } - - if(!(database.equals(DB.memory) || database.equals(DB.mysql))) { - String hosts = systemPropertiesConfiguration.getProperty("workflow.dynomite.cluster.hosts", null); - if(hosts == null) { - System.err.println("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); - logger.error("Missing dynomite/redis hosts. Ensure 'workflow.dynomite.cluster.hosts' has been set in the supplied configuration."); - System.exit(1); - } - String[] hostConfigs = hosts.split(";"); - - for(String hostConfig : hostConfigs) { - String[] hostConfigValues = hostConfig.split(":"); - String host = hostConfigValues[0]; - int port = Integer.parseInt(hostConfigValues[1]); - String rack = hostConfigValues[2]; - Host dynoHost = new Host(host, port, rack, Status.Up); - dynoHosts.add(dynoHost); - } - - }else { - //Create a single shard host supplier - Host dynoHost = new Host("localhost", 0, systemPropertiesConfiguration.getAvailabilityZone(), Status.Up); - dynoHosts.add(dynoHost); - } - init(dynoClusterName, dynoHosts); - } - - private void init(String dynoClusterName, List dynoHosts) { - HostSupplier hostSupplier = () -> dynoHosts; - - JedisCommands jedis = null; - - switch(database) { - case redis: - case dynomite: - ConnectionPoolConfigurationImpl connectionPoolConfiguration = new ConnectionPoolConfigurationImpl(dynoClusterName) - .withTokenSupplier(getTokenMapSupplier(dynoHosts)) - .setLocalRack(systemPropertiesConfiguration.getAvailabilityZone()) - .setLocalDataCenter(systemPropertiesConfiguration.getRegion()) - .setSocketTimeout(0) - .setConnectTimeout(0) - .setMaxConnsPerHost(systemPropertiesConfiguration.getIntProperty("workflow.dynomite.connection.maxConnsPerHost", 10)); - - jedis = new DynoJedisClient.Builder() - .withHostSupplier(hostSupplier) - .withApplicationName(systemPropertiesConfiguration.getAppId()) - .withDynomiteClusterName(dynoClusterName) - .withCPConfig(connectionPoolConfiguration) - .build(); - - logger.info("Starting conductor server using dynomite/redis cluster " + dynoClusterName); - - break; - - case mysql: - logger.info("Starting conductor server using MySQL data store", database); - break; - case memory: - jedis = new JedisMock(); - try { - EmbeddedElasticSearch.start(); - if(System.getProperty("workflow.elasticsearch.url") == null) { - System.setProperty("workflow.elasticsearch.url", "localhost:9300"); - } - if(System.getProperty("workflow.elasticsearch.index.name") == null) { - System.setProperty("workflow.elasticsearch.index.name", "conductor"); - } - } catch (Exception e) { - logger.error("Error starting embedded elasticsearch. Search functionality will be impacted: " + e.getMessage(), e); - } - logger.info("Starting conductor server using in memory data store"); - break; - - case redis_cluster: - Host host = dynoHosts.get(0); - GenericObjectPoolConfig poolConfig = new GenericObjectPoolConfig(); - poolConfig.setMinIdle(5); - poolConfig.setMaxTotal(1000); - jedis = new JedisCluster(new HostAndPort(host.getHostName(), host.getPort()), poolConfig); - logger.info("Starting conductor server using redis_cluster " + dynoClusterName); - break; - } - - this.serverModule = new ServerModule(jedis, hostSupplier, systemPropertiesConfiguration, database); - } - - private TokenMapSupplier getTokenMapSupplier(List dynoHosts) { - return new TokenMapSupplier() { - - HostToken token = new HostToken(1L, dynoHosts.get(0)); - - @Override - public List getTokens(Set activeHosts) { - return Arrays.asList(token); + private static Logger logger = LoggerFactory.getLogger(ConductorServer.class); + + private Server server; + + private SystemPropertiesConfiguration systemPropertiesConfiguration; + + public ConductorServer(SystemPropertiesConfiguration systemPropertiesConfiguration) { + this.systemPropertiesConfiguration = systemPropertiesConfiguration; + } + + private List selectModulesToLoad() { + Configuration.DB database = null; + List modules = new ArrayList<>(); + + try { + database = systemPropertiesConfiguration.getDB(); + } catch (IllegalArgumentException ie) { + logger.error("Invalid db name: " + systemPropertiesConfiguration.getDBString() + + ", supported values are: " + Arrays.toString(Configuration.DB.values())); + System.exit(1); + } + + switch (database) { + case REDIS: + case DYNOMITE: + modules.add(new DynomiteClusterModule()); + modules.add(new RedisESWorkflowModule()); + logger.info("Starting conductor server using dynomite/redis cluster."); + break; + + case MYSQL: + modules.add(new MySQLWorkflowModule()); + modules.add(new MySQLWorkflowModule()); + logger.info("Starting conductor server using MySQL data store", database); + break; + case MEMORY: + // TODO This ES logic should probably live elsewhere. + try { + EmbeddedElasticSearch.start(); + if (System.getProperty("workflow.elasticsearch.url") == null) { + System.setProperty("workflow.elasticsearch.url", "localhost:9300"); + } + if (System.getProperty("workflow.elasticsearch.index.name") == null) { + System.setProperty("workflow.elasticsearch.index.name", "conductor"); + } + } catch (Exception e) { + logger.error("Error starting embedded elasticsearch. Search functionality will be impacted: " + e.getMessage(), e); + } + + modules.add(new LocalRedisModule()); + modules.add(new RedisESWorkflowModule()); + logger.info("Starting conductor server using in memory data store"); + break; + + case REDIS_CLUSTER: + modules.add(new RedisClusterModule()); + modules.add(new RedisESWorkflowModule()); + logger.info("Starting conductor server using redis_cluster."); + break; + } + + if (systemPropertiesConfiguration.getJerseyEnabled()) { + modules.add(new JerseyModule()); + modules.add(new SwaggerModule()); + } + + modules.add(new ServerModule()); + + return modules; + } + + public synchronized void start(int port, boolean join) throws Exception { + + if (server != null) { + throw new IllegalStateException("Server is already running"); + } + + Guice.createInjector(getModulesToLoad()); + + this.server = new Server(port); + + ServletContextHandler context = new ServletContextHandler(); + context.addFilter(GuiceFilter.class, "/*", EnumSet.allOf(DispatcherType.class)); + context.setWelcomeFiles(new String[]{"index.html"}); + + server.setHandler(context); + + server.start(); + System.out.println("Started server on http://localhost:" + port + "/"); + try { + boolean create = Boolean.getBoolean("loadSample"); + if (create) { + System.out.println("Creating kitchensink workflow"); + createKitchenSink(port); } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } - @Override - public HostToken getTokenForHost(Host host, Set activeHosts) { - return token; - } - }; - } - - public ServerModule getGuiceModule() { - return serverModule; - } - - public synchronized void start(int port, boolean join) throws Exception { - - if(server != null) { - throw new IllegalStateException("Server is already running"); - } - - Guice.createInjector(serverModule); - - //Swagger - String resourceBasePath = Main.class.getResource("/swagger-ui").toExternalForm(); - this.server = new Server(port); - - ServletContextHandler context = new ServletContextHandler(); - context.addFilter(GuiceFilter.class, "/*", EnumSet.allOf(DispatcherType.class)); - context.setResourceBase(resourceBasePath); - context.setWelcomeFiles(new String[] { "index.html" }); - - server.setHandler(context); - - - DefaultServlet staticServlet = new DefaultServlet(); - context.addServlet(new ServletHolder(staticServlet), "/*"); - - server.start(); - System.out.println("Started server on http://localhost:" + port + "/"); - try { - boolean create = Boolean.getBoolean("loadSample"); - if(create) { - System.out.println("Creating kitchensink workflow"); - createKitchenSink(port); - } - }catch(Exception e) { - logger.error(e.getMessage(), e); - } - - if(join) { - server.join(); - } - - } - - public synchronized void stop() throws Exception { - if(server == null) { - throw new IllegalStateException("Server is not running. call #start() method to start the server"); - } - server.stop(); - server = null; - } - - private static void createKitchenSink(int port) throws Exception { - - List taskDefs = new LinkedList<>(); - for(int i = 0; i < 40; i++) { - taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); - } - taskDefs.add(new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0)); - - Client client = Client.create(); - ObjectMapper om = new ObjectMapper(); - client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(om.writeValueAsString(taskDefs)); - - InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); - client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); - - stream = Main.class.getResourceAsStream("/sub_flow_1.json"); - client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); - - String input = "{\"task2Name\":\"task_5\"}"; - client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(input); - - logger.info("Kitchen sink workflows are created!"); - } + if (join) { + server.join(); + } + + } + + public synchronized void stop() throws Exception { + if (server == null) { + throw new IllegalStateException("Server is not running. call #start() method to start the server"); + } + server.stop(); + server = null; + } + + private List getAdditionalModules() { + return systemPropertiesConfiguration.getAdditionalModules(); + } + + public List getModulesToLoad() { + List modulesToLoad = new ArrayList<>(); + + modulesToLoad.addAll(selectModulesToLoad()); + modulesToLoad.addAll(getAdditionalModules()); + + return modulesToLoad; + } + + private static void createKitchenSink(int port) throws Exception { + + List taskDefs = new LinkedList<>(); + for (int i = 0; i < 40; i++) { + taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); + } + taskDefs.add(new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0)); + + Client client = Client.create(); + ObjectMapper om = new ObjectMapper(); + client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(om.writeValueAsString(taskDefs)); + + InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); + client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); + + stream = Main.class.getResourceAsStream("/sub_flow_1.json"); + client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); + + String input = "{\"task2Name\":\"task_5\"}"; + client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(input); + + logger.info("Kitchen sink workflows are created!"); + } } diff --git a/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java b/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java new file mode 100644 index 0000000000..aa4d75e3f7 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java @@ -0,0 +1,32 @@ +package com.netflix.conductor.server; + +import com.google.inject.AbstractModule; +import com.google.inject.name.Names; + +import com.netflix.conductor.dyno.DynoShardSupplierProvider; +import com.netflix.conductor.dyno.DynomiteConfiguration; +import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; +import com.netflix.conductor.jedis.ConfigurationHostSupplierProvider; +import com.netflix.conductor.jedis.DynomiteJedisProvider; +import com.netflix.conductor.jedis.TokenMapSupplierProvider; +import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.dyno.connectionpool.TokenMapSupplier; +import com.netflix.dyno.queues.ShardSupplier; + +import redis.clients.jedis.JedisCommands; + +public class DynomiteClusterModule extends AbstractModule { + @Override + protected void configure() { + + bind(DynomiteConfiguration.class).to(SystemPropertiesDynomiteConfiguration.class); + bind(JedisCommands.class).toProvider(DynomiteJedisProvider.class).asEagerSingleton(); + bind(JedisCommands.class) + .annotatedWith(Names.named("DynoReadClient")) + .toProvider(DynomiteJedisProvider.class) + .asEagerSingleton(); + bind(HostSupplier.class).toProvider(ConfigurationHostSupplierProvider.class); + bind(TokenMapSupplier.class).toProvider(TokenMapSupplierProvider.class); + bind(ShardSupplier.class).toProvider(DynoShardSupplierProvider.class); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java b/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java new file mode 100644 index 0000000000..3513c14342 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java @@ -0,0 +1,32 @@ +package com.netflix.conductor.server; + +import com.netflix.conductor.core.config.Configuration; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class ExecutorServiceProvider implements Provider { + private static final int MAX_THREADS = 50; + + private final Configuration configuration; + private final ExecutorService executorService; + + @Inject + public ExecutorServiceProvider(Configuration configuration){ + this.configuration = configuration; + + AtomicInteger count = new AtomicInteger(0); + this.executorService = java.util.concurrent.Executors.newFixedThreadPool(MAX_THREADS, runnable -> { + Thread conductorWorkerThread = new Thread(runnable); + conductorWorkerThread.setName("conductor-worker-" + count.getAndIncrement()); + return conductorWorkerThread; + }); + } + @Override + public ExecutorService get() { + return executorService; + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java b/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java new file mode 100644 index 0000000000..9adfc46150 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java @@ -0,0 +1,28 @@ +package com.netflix.conductor.server; + +import com.google.inject.AbstractModule; +import com.google.inject.name.Names; + +import com.netflix.conductor.dyno.DynoShardSupplierProvider; +import com.netflix.conductor.dyno.DynomiteConfiguration; +import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; +import com.netflix.conductor.jedis.InMemoryJedisProvider; +import com.netflix.conductor.jedis.LocalHostSupplierProvider; +import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.dyno.queues.ShardSupplier; + +import redis.clients.jedis.JedisCommands; + +public class LocalRedisModule extends AbstractModule { + @Override + protected void configure() { + + bind(DynomiteConfiguration.class).to(SystemPropertiesDynomiteConfiguration.class); + bind(JedisCommands.class).toProvider(InMemoryJedisProvider.class); + bind(JedisCommands.class) + .annotatedWith(Names.named("DynoReadClient")) + .toProvider(InMemoryJedisProvider.class); + bind(HostSupplier.class).toProvider(LocalHostSupplierProvider.class); + bind(ShardSupplier.class).toProvider(DynoShardSupplierProvider.class); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/RedisClusterModule.java b/server/src/main/java/com/netflix/conductor/server/RedisClusterModule.java new file mode 100644 index 0000000000..fc57fa2bfe --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/RedisClusterModule.java @@ -0,0 +1,20 @@ +package com.netflix.conductor.server; + +import com.google.inject.AbstractModule; + +import com.netflix.conductor.dyno.DynomiteConfiguration; +import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; +import com.netflix.conductor.jedis.ConfigurationHostSupplierProvider; +import com.netflix.conductor.jedis.RedisClusterJedisProvider; +import com.netflix.dyno.connectionpool.HostSupplier; + +import redis.clients.jedis.JedisCommands; + +public class RedisClusterModule extends AbstractModule { + @Override + protected void configure(){ + bind(HostSupplier.class).toProvider(ConfigurationHostSupplierProvider.class); + bind(DynomiteConfiguration.class).to(SystemPropertiesDynomiteConfiguration.class); + bind(JedisCommands.class).toProvider(RedisClusterJedisProvider.class); + } +} diff --git a/server/src/main/java/com/netflix/conductor/server/ServerModule.java b/server/src/main/java/com/netflix/conductor/server/ServerModule.java index 3ab3f416cd..09b10f1bf7 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServerModule.java +++ b/server/src/main/java/com/netflix/conductor/server/ServerModule.java @@ -1,118 +1,42 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.server; import com.google.inject.AbstractModule; -import com.google.inject.Provides; +import com.google.inject.Scopes; -import com.netflix.conductor.contribs.http.HttpTask; -import com.netflix.conductor.contribs.http.RestClientManager; -import com.netflix.conductor.contribs.json.JsonJqTransform; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.RedisESWorkflowModule; -import com.netflix.conductor.dao.index.ElasticSearchDAO; import com.netflix.conductor.dao.index.ElasticsearchModule; -import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; -import com.netflix.dyno.connectionpool.HostSupplier; +import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; -import java.util.List; import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - -import redis.clients.jedis.JedisCommands; /** * @author Viren * */ public class ServerModule extends AbstractModule { - - private int maxThreads = 50; - - private ExecutorService es; - - private JedisCommands dynoConn; - - private HostSupplier hostSupplier; - - private String region; - - private String localRack; - - private SystemPropertiesConfiguration systemPropertiesConfiguration; - - private ConductorServer.DB db; - - public ServerModule(JedisCommands jedis, HostSupplier hostSupplier, SystemPropertiesConfiguration systemPropertiesConfiguration, ConductorServer.DB db) { - this.dynoConn = jedis; - this.hostSupplier = hostSupplier; - this.systemPropertiesConfiguration = systemPropertiesConfiguration; - this.region = systemPropertiesConfiguration.getRegion(); - this.localRack = systemPropertiesConfiguration.getAvailabilityZone(); - this.db = db; - - } - - @Override - protected void configure() { - - configureExecutorService(); - - bind(Configuration.class).toInstance(systemPropertiesConfiguration); - if (db == ConductorServer.DB.mysql) { - install(new MySQLWorkflowModule()); - } else { - install(new RedisESWorkflowModule(systemPropertiesConfiguration, dynoConn, hostSupplier)); - } + @Override + protected void configure() { + install(new CoreModule()); + install(new ElasticsearchModule()); - install(new ElasticsearchModule()); - bind(IndexDAO.class).to(ElasticSearchDAO.class); - - install(new CoreModule()); - install(new JerseyModule()); - - new HttpTask(new RestClientManager(), systemPropertiesConfiguration); - new JsonJqTransform(); - - List additionalModules = systemPropertiesConfiguration.getAdditionalModules(); - if(additionalModules != null) { - for(AbstractModule additionalModule : additionalModules) { - install(additionalModule); - } - } - } - - @Provides - public ExecutorService getExecutorService(){ - return this.es; - } - - private void configureExecutorService(){ - AtomicInteger count = new AtomicInteger(0); - this.es = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { - Thread conductorWorkerThread = new Thread(runnable); - conductorWorkerThread.setName("conductor-worker-" + count.getAndIncrement()); - return conductorWorkerThread; - }); - } + bind(Configuration.class).to(SystemPropertiesDynomiteConfiguration.class); + bind(ExecutorService.class).toProvider(ExecutorServiceProvider.class).in(Scopes.SINGLETON); + } } diff --git a/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java b/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java index 9c772acaf3..2b9d4f1f78 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java +++ b/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java @@ -1,98 +1,72 @@ /** * Copyright 2017 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.server; -import java.io.FileInputStream; -import java.util.HashMap; -import java.util.Map; -import java.util.Optional; -import java.util.Properties; - -import org.apache.log4j.PropertyConfigurator; -import org.eclipse.jetty.servlet.DefaultServlet; - import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.servlet.GuiceServletContextListener; -import com.google.inject.servlet.ServletModule; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; +import org.apache.log4j.PropertyConfigurator; + +import java.io.FileInputStream; +import java.util.Optional; +import java.util.Properties; + /** * @author Viren * */ public class ServletContextListner extends GuiceServletContextListener { - @Override - protected Injector getInjector() { - - loadProperties(); - - SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); - ConductorServer server = new ConductorServer(config); - - return Guice.createInjector(server.getGuiceModule(), getSwagger()); - } - - private ServletModule getSwagger() { - - String resourceBasePath = ServletContextListner.class.getResource("/swagger-ui").toExternalForm(); - DefaultServlet ds = new DefaultServlet(); - - ServletModule sm = new ServletModule() { - @Override - protected void configureServlets() { - Map params = new HashMap<>(); - params.put("resourceBase", resourceBasePath); - params.put("redirectWelcome", "true"); - serve("/*").with(ds, params); - } - }; - - return sm; - - } - - private void loadProperties() { - try { - - String key = "conductor_properties"; - String propertyFile = Optional.ofNullable(System.getProperty(key)).orElse(System.getenv(key)); - if(propertyFile != null) { - System.out.println("Using " + propertyFile); - FileInputStream propFile = new FileInputStream(propertyFile); - Properties props = new Properties(System.getProperties()); - props.load(propFile); - System.setProperties(props); - } - - key = "log4j_properties"; - String log4jConfig = Optional.ofNullable(System.getProperty(key)).orElse(System.getenv(key)); - if(log4jConfig != null) { - PropertyConfigurator.configure(new FileInputStream(log4jConfig)); - } - - } catch (Exception e) { - System.err.println("Error loading properties " + e.getMessage()); - e.printStackTrace(); - } - } - + @Override + protected Injector getInjector() { + + loadProperties(); + + SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); + ConductorServer server = new ConductorServer(config); + + return Guice.createInjector(server.getModulesToLoad()); + } + + private void loadProperties() { + try { + + String key = "conductor_properties"; + String propertyFile = Optional.ofNullable(System.getProperty(key)).orElse(System.getenv(key)); + if (propertyFile != null) { + System.out.println("Using " + propertyFile); + FileInputStream propFile = new FileInputStream(propertyFile); + Properties props = new Properties(System.getProperties()); + props.load(propFile); + System.setProperties(props); + } + + key = "log4j_properties"; + String log4jConfig = Optional.ofNullable(System.getProperty(key)).orElse(System.getenv(key)); + if (log4jConfig != null) { + PropertyConfigurator.configure(new FileInputStream(log4jConfig)); + } + + } catch (Exception e) { + System.err.println("Error loading properties " + e.getMessage()); + e.printStackTrace(); + } + } + } diff --git a/server/src/main/java/com/netflix/conductor/server/SwaggerModule.java b/server/src/main/java/com/netflix/conductor/server/SwaggerModule.java new file mode 100644 index 0000000000..812d679b03 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/server/SwaggerModule.java @@ -0,0 +1,25 @@ +package com.netflix.conductor.server; + +import com.google.inject.Scopes; +import com.google.inject.servlet.ServletModule; + +import org.eclipse.jetty.servlet.DefaultServlet; + +import java.util.HashMap; +import java.util.Map; + +public class SwaggerModule extends ServletModule { + + @Override + protected void configureServlets() { + bind(DefaultServlet.class).in(Scopes.SINGLETON); + Map params = new HashMap<>(); + params.put("resourceBase", getResourceBasePath()); + params.put("redirectWelcome", "true"); + serve("/*").with(DefaultServlet.class, params); + } + + private String getResourceBasePath() { + return SwaggerModule.class.getResource("/swagger-ui").toExternalForm(); + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java index 3d166fb2e5..1aa1273c6c 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java @@ -1,20 +1,17 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.tests.utils; @@ -31,7 +28,7 @@ import com.netflix.conductor.dao.dynomite.RedisExecutionDAO; import com.netflix.conductor.dao.dynomite.RedisMetadataDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; -import com.netflix.conductor.dyno.DynoProxy; +import com.netflix.conductor.jedis.InMemoryJedisProvider; import com.netflix.conductor.jedis.JedisMock; import com.netflix.dyno.queues.ShardSupplier; @@ -48,59 +45,58 @@ * */ public class TestModule extends AbstractModule { - - private int maxThreads = 50; - - private ExecutorService executorService; - - @Override - protected void configure() { - - System.setProperty("workflow.system.task.worker.callback.seconds", "0"); - System.setProperty("workflow.system.task.worker.queue.size", "10000"); - System.setProperty("workflow.system.task.worker.thread.count", "10"); - - configureExecutorService(); - - SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); - bind(Configuration.class).toInstance(config); - JedisCommands jedisMock = new JedisMock(); - - DynoQueueDAO queueDao = new DynoQueueDAO(jedisMock, jedisMock, new ShardSupplier() { - - @Override - public Set getQueueShards() { - return Arrays.asList("a").stream().collect(Collectors.toSet()); - } - - @Override - public String getCurrentShard() { - return "a"; - } - }, config); - - bind(MetadataDAO.class).to(RedisMetadataDAO.class); - bind(ExecutionDAO.class).to(RedisExecutionDAO.class); - bind(DynoQueueDAO.class).toInstance(queueDao); - bind(QueueDAO.class).to(DynoQueueDAO.class); - bind(IndexDAO.class).to(MockIndexDAO.class); - DynoProxy proxy = new DynoProxy(jedisMock); - bind(DynoProxy.class).toInstance(proxy); - install(new CoreModule()); - bind(UserTask.class).asEagerSingleton(); - } - - @Provides - public ExecutorService getExecutorService(){ - return this.executorService; - } - - private void configureExecutorService(){ - AtomicInteger count = new AtomicInteger(0); - this.executorService = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { + + private int maxThreads = 50; + + private ExecutorService executorService; + + @Override + protected void configure() { + + System.setProperty("workflow.system.task.worker.callback.seconds", "0"); + System.setProperty("workflow.system.task.worker.queue.size", "10000"); + System.setProperty("workflow.system.task.worker.thread.count", "10"); + + configureExecutorService(); + + SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); + bind(Configuration.class).toInstance(config); + JedisCommands jedisMock = new JedisMock(); + + DynoQueueDAO queueDao = new DynoQueueDAO(jedisMock, jedisMock, new ShardSupplier() { + + @Override + public Set getQueueShards() { + return Arrays.asList("a").stream().collect(Collectors.toSet()); + } + + @Override + public String getCurrentShard() { + return "a"; + } + }, config); + + bind(MetadataDAO.class).to(RedisMetadataDAO.class); + bind(ExecutionDAO.class).to(RedisExecutionDAO.class); + bind(DynoQueueDAO.class).toInstance(queueDao); + bind(QueueDAO.class).to(DynoQueueDAO.class); + bind(IndexDAO.class).to(MockIndexDAO.class); + bind(JedisCommands.class).toProvider(InMemoryJedisProvider.class); + install(new CoreModule()); + bind(UserTask.class).asEagerSingleton(); + } + + @Provides + public ExecutorService getExecutorService() { + return this.executorService; + } + + private void configureExecutorService() { + AtomicInteger count = new AtomicInteger(0); + this.executorService = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { Thread workflowWorkerThread = new Thread(runnable); workflowWorkerThread.setName(String.format("workflow-worker-%d", count.getAndIncrement())); return workflowWorkerThread; }); - } + } } From 8223b398991726336e410b0fcc0db5c5d360ea25 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 7 Jun 2018 11:32:45 +0200 Subject: [PATCH 006/163] Minor review feedback. --- .../conductor/dyno/DynoProxyDiscoveryProvider.java | 7 +++---- .../conductor/dyno/RedisQueuesDiscoveryProvider.java | 8 ++++---- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java index 2dbc08c168..fdb33d0ae5 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoProxyDiscoveryProvider.java @@ -1,6 +1,5 @@ package com.netflix.conductor.dyno; -import com.netflix.conductor.core.config.Configuration; import com.netflix.discovery.DiscoveryClient; import com.netflix.dyno.jedis.DynoJedisClient; @@ -11,17 +10,17 @@ public class DynoProxyDiscoveryProvider implements Provider { private final DiscoveryClient discoveryClient; - private final Configuration configuration; + private final DynomiteConfiguration configuration; @Inject - public DynoProxyDiscoveryProvider(DiscoveryClient discoveryClient, Configuration configuration) { + public DynoProxyDiscoveryProvider(DiscoveryClient discoveryClient, DynomiteConfiguration configuration) { this.discoveryClient = discoveryClient; this.configuration = configuration; } @Override public JedisCommands get() { - String cluster = configuration.getProperty("workflow.dynomite.cluster", null); + String cluster = configuration.getCluster(); String applicationName = configuration.getAppId(); return new DynoJedisClient .Builder() diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java index c5629a929c..0eed672d64 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesDiscoveryProvider.java @@ -20,16 +20,16 @@ public class RedisQueuesDiscoveryProvider implements Provider { private static final Logger logger = LoggerFactory.getLogger(RedisQueuesDiscoveryProvider.class); - + private final DiscoveryClient discoveryClient; private final DynomiteConfiguration configuration; @Inject - RedisQueuesDiscoveryProvider(DiscoveryClient discoveryClient, DynomiteConfiguration configuration){ + RedisQueuesDiscoveryProvider(DiscoveryClient discoveryClient, DynomiteConfiguration configuration) { this.discoveryClient = discoveryClient; this.configuration = configuration; } - + @Override public RedisQueues get() { @@ -77,7 +77,7 @@ public List getHosts() { String region = configuration.getRegion(); String localDC = configuration.getAvailabilityZone(); - if(localDC == null) { + if (localDC == null) { throw new Error("Availability zone is not defined. " + "Ensure Configuration.getAvailabilityZone() returns a non-null and non-empty value."); } From f515b0b20f91afad505c21b7a4a379b0bea36add Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 28 May 2018 15:45:34 +0200 Subject: [PATCH 007/163] protogen: Implement ProtoBuf schema generator In order to expose a GRPC interface for Conductor, we need ProtoBuf schemas for all the internal domain models that the service use, currently implemented as POJOs. Manually creating ProtoBuf schemas for all the objects exposed through the API _and_ creating the mapping code between the ProtoBuf objects and the POJOs is a very cumbersome and error-prone tasn, and hence we're tackling it with an automated code generator. protogen is a simple code generator that parses the annotated fields in a POJO and generates both the ProtoBuf schema and the mapping code to convert to/from ProtoBuf objects and the native domain objects. It is implemented as a standalone tool that uses reflection to generate the sources, as opposed to a Java Annotation Processor, because the annotation processing workflow doesn't fit well with our goals: since annotation processing runs at compile time to generate Java source code, there is no clean way to generate the accompanying `.proto` schemas, and there isn't either a way to compile to PB schemas into the resulting Java code simultaneously with the mapping code. With the current approach, we simply run `protogen` to update all the `.proto` schemas in the `grpc` package and the mapping code in the `grpc-server` package. The autogenerated schemas and mapping code will be introduced in subsequent commits. --- .../common/annotations/ProtoEnum.java | 11 ++ .../common/annotations/ProtoField.java | 13 ++ .../common/annotations/ProtoMessage.java | 14 ++ protogen/build.gradle | 6 + .../netflix/conductor/protogen/Element.java | 99 ++++++++++++ .../com/netflix/conductor/protogen/Enum.java | 67 ++++++++ .../com/netflix/conductor/protogen/File.java | 46 ++++++ .../netflix/conductor/protogen/Message.java | 112 +++++++++++++ .../netflix/conductor/protogen/ProtoGen.java | 100 ++++++++++++ .../protogen/types/AbstractType.java | 152 ++++++++++++++++++ .../conductor/protogen/types/AnyType.java | 21 +++ .../conductor/protogen/types/GenericType.java | 53 ++++++ .../conductor/protogen/types/ListType.java | 78 +++++++++ .../conductor/protogen/types/MapType.java | 99 ++++++++++++ .../conductor/protogen/types/MessageType.java | 52 ++++++ .../conductor/protogen/types/ScalarType.java | 43 +++++ protogen/templates/file.proto | 13 ++ protogen/templates/message.proto | 8 + settings.gradle | 6 +- 19 files changed, 992 insertions(+), 1 deletion(-) create mode 100644 common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java create mode 100644 common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java create mode 100644 common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java create mode 100644 protogen/build.gradle create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/Element.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/Enum.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/File.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/Message.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java create mode 100644 protogen/templates/file.proto create mode 100644 protogen/templates/message.proto diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java new file mode 100644 index 0000000000..00bcc28e74 --- /dev/null +++ b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java @@ -0,0 +1,11 @@ +package com.netflix.conductor.common.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface ProtoEnum { +} diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java new file mode 100644 index 0000000000..8bca1787f6 --- /dev/null +++ b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java @@ -0,0 +1,13 @@ +package com.netflix.conductor.common.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.FIELD) +public @interface ProtoField { + int id(); + String wrap() default ""; +} diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java new file mode 100644 index 0000000000..3e22c73ae8 --- /dev/null +++ b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java @@ -0,0 +1,14 @@ +package com.netflix.conductor.common.annotations; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE) +public @interface ProtoMessage { + boolean toProto() default true; + boolean fromProto() default true; + boolean wrapper() default false; +} diff --git a/protogen/build.gradle b/protogen/build.gradle new file mode 100644 index 0000000000..3d5ad1829d --- /dev/null +++ b/protogen/build.gradle @@ -0,0 +1,6 @@ +dependencies { + compile project(':conductor-common') + compile 'com.squareup:javapoet:1.11.1' + compile 'com.google.guava:guava:25.0-jre' + compile 'com.github.jknack:handlebars:4.0.6' +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Element.java b/protogen/src/main/java/com/netflix/conductor/protogen/Element.java new file mode 100644 index 0000000000..6942b76f1a --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Element.java @@ -0,0 +1,99 @@ +package com.netflix.conductor.protogen; + +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoMessage; +import com.netflix.conductor.protogen.types.AbstractType; +import com.netflix.conductor.protogen.types.MessageType; +import com.squareup.javapoet.TypeSpec; + +import java.util.*; + +public abstract class Element { + protected Class clazz; + protected MessageType type; + protected List fields = new ArrayList(); + protected List nested = new ArrayList<>(); + + public Element(Class cls, MessageType parentType) { + this.clazz = cls; + this.type = AbstractType.declare(cls, parentType); + + for (Class nested : clazz.getDeclaredClasses()) { + if (nested.isEnum()) + addNestedEnum(nested); + else + addNestedClass(nested); + } + } + + private void addNestedEnum(Class cls) { + ProtoEnum ann = (ProtoEnum)cls.getAnnotation(ProtoEnum.class); + if (ann != null) { + nested.add(new Enum(cls, this.type)); + } + } + + private void addNestedClass(Class cls) { + ProtoMessage ann = (ProtoMessage)cls.getAnnotation(ProtoMessage.class); + if (ann != null) { + nested.add(new Message(cls, this.type)); + } + } + + public abstract String getProtoClass(); + protected abstract void javaMapToProto(TypeSpec.Builder builder); + protected abstract void javaMapFromProto(TypeSpec.Builder builder); + + public void generateJavaMapper(TypeSpec.Builder builder) { + javaMapToProto(builder); + javaMapFromProto(builder); + + for (Element element : this.nested) { + element.generateJavaMapper(builder); + } + } + + public void findDependencies(Set dependencies) { + for (Field field : fields) { + field.getDependencies(dependencies); + } + + for (Element elem : nested) { + elem.findDependencies(dependencies); + } + } + + public List getNested() { + return nested; + } + + public List getFields() { + return fields; + } + + public String getName() { + return clazz.getSimpleName(); + } + + public static abstract class Field { + protected int protoIndex; + protected java.lang.reflect.Field field; + + protected Field(int index, java.lang.reflect.Field field) { + this.protoIndex = index; + this.field = field; + } + + public abstract String getProtoTypeDeclaration(); + + public int getProtoIndex() { + return protoIndex; + } + + public String getName() { + return field.getName(); + } + + public void getDependencies(Set deps) {} + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java new file mode 100644 index 0000000000..2fe9448ca6 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java @@ -0,0 +1,67 @@ +package com.netflix.conductor.protogen; + +import com.netflix.conductor.protogen.types.MessageType; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; +import com.squareup.javapoet.TypeSpec; + +import javax.lang.model.element.Modifier; + +public class Enum extends Element { + public Enum(Class cls, MessageType parent) { + super(cls, parent); + + int protoIndex = 0; + for (java.lang.reflect.Field field : cls.getDeclaredFields()) { + if (field.isEnumConstant()) + fields.add(new EnumField(protoIndex++, field)); + } + } + + @Override + public String getProtoClass() { + return "enum"; + } + + private MethodSpec javaMap(String methodName, TypeName from, TypeName to) { + MethodSpec.Builder method = MethodSpec.methodBuilder(methodName); + method.addModifiers(Modifier.STATIC, Modifier.PUBLIC); + method.returns(to); + method.addParameter(from, "from"); + + method.addStatement("$T to", to); + method.beginControlFlow("switch (from)"); + + for (Field field : fields) { + String name = field.getName(); + method.addStatement("case $L: to = $T.$L; break", name, to, name); + } + + method.addStatement("default: throw new $T(\"Unexpected enum constant: \" + from)", + IllegalArgumentException.class); + method.endControlFlow(); + method.addStatement("return to"); + return method.build(); + } + + @Override + protected void javaMapFromProto(TypeSpec.Builder type) { + type.addMethod(javaMap("fromProto", this.type.getJavaProtoType(), TypeName.get(this.clazz))); + } + + @Override + protected void javaMapToProto(TypeSpec.Builder type) { + type.addMethod(javaMap("toProto", TypeName.get(this.clazz), this.type.getJavaProtoType())); + } + + public class EnumField extends Field { + protected EnumField(int index, java.lang.reflect.Field field) { + super(index, field); + } + + @Override + public String getProtoTypeDeclaration() { + return String.format("%s = %d", getName(), getProtoIndex()); + } + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/File.java b/protogen/src/main/java/com/netflix/conductor/protogen/File.java new file mode 100644 index 0000000000..f7258c8d99 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/File.java @@ -0,0 +1,46 @@ +package com.netflix.conductor.protogen; + +import com.netflix.conductor.protogen.types.AbstractType; +import com.squareup.javapoet.ClassName; + +import java.util.*; + +public class File { + public static String PROTO_SUFFIX = "Pb"; + + private ClassName baseClass; + private Element message; + + public File(Class object) { + String className = object.getSimpleName() + PROTO_SUFFIX; + baseClass = ClassName.get(ProtoGen.GENERATED_PROTO_PACKAGE, className); + this.message = new Message(object, AbstractType.baseClass(baseClass, this)); + } + + public String getJavaClassName() { + return baseClass.simpleName(); + } + + public String getFilePath() { + return "model/" + message.getName().toLowerCase() + ".proto"; + } + + public String getPackageName() { + return ProtoGen.GENERATED_PROTO_PACKAGE; + } + + public String getGoPackage() { + return ProtoGen.GENERATED_GO_PACKAGE; + } + + public Element getMessage() { + return message; + } + + public Set getIncludes() { + Set includes = new HashSet<>(); + message.findDependencies(includes); + includes.remove(this.getFilePath()); + return includes; + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java new file mode 100644 index 0000000000..dc27d17fe4 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java @@ -0,0 +1,112 @@ +package com.netflix.conductor.protogen; + +import com.google.common.base.CaseFormat; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; +import com.netflix.conductor.protogen.types.AbstractType; +import com.netflix.conductor.protogen.types.MessageType; +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeSpec; + +import javax.lang.model.element.Modifier; +import java.util.Set; + +public class Message extends Element { + public Message(Class cls, MessageType parent) { + super(cls, parent); + + for (java.lang.reflect.Field field: clazz.getDeclaredFields()) { + ProtoField ann = field.getAnnotation(ProtoField.class); + if (ann == null) + continue; + + fields.add(new MessageField(ann.id(), field)); + } + } + + protected ProtoMessage getAnnotation() { + return (ProtoMessage)this.clazz.getAnnotation(ProtoMessage.class); + } + + @Override + public String getProtoClass() { + return "message"; + } + + @Override + protected void javaMapToProto(TypeSpec.Builder type) { + if (!getAnnotation().toProto() || getAnnotation().wrapper()) + return; + + ClassName javaProtoType = (ClassName)this.type.getJavaProtoType(); + MethodSpec.Builder method = MethodSpec.methodBuilder("toProto"); + method.addModifiers(Modifier.STATIC, Modifier.PUBLIC); + method.returns(javaProtoType); + method.addParameter(this.clazz, "from"); + + method.addStatement("$T to = $T.newBuilder()", + javaProtoType.nestedClass("Builder"), javaProtoType); + + for (Field field : this.fields) { + if (field instanceof MessageField) { + AbstractType fieldType = ((MessageField) field).getAbstractType(); + fieldType.mapToProto(field.getName(), method); + } + } + + method.addStatement("return to.build()"); + type.addMethod(method.build()); + } + + @Override + protected void javaMapFromProto(TypeSpec.Builder type) { + if (!getAnnotation().fromProto() || getAnnotation().wrapper()) + return; + + MethodSpec.Builder method = MethodSpec.methodBuilder("fromProto"); + method.addModifiers(Modifier.STATIC, Modifier.PUBLIC); + method.returns(this.clazz); + method.addParameter(this.type.getJavaProtoType(), "from"); + + method.addStatement("$T to = new $T()", this.clazz, this.clazz); + + for (Field field : this.fields) { + if (field instanceof MessageField) { + AbstractType fieldType = ((MessageField) field).getAbstractType(); + fieldType.mapFromProto(field.getName(), method); + } + } + + method.addStatement("return to"); + type.addMethod(method.build()); + } + + public static class MessageField extends Field { + protected AbstractType type; + + protected MessageField(int index, java.lang.reflect.Field field) { + super(index, field); + } + + public AbstractType getAbstractType() { + if (type == null) { + type = AbstractType.get(field.getGenericType()); + } + return type; + } + + @Override + public String getProtoTypeDeclaration() { + return String.format("%s %s = %d", + getAbstractType().getProtoType(), + CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, getName()), + getProtoIndex()); + } + + @Override + public void getDependencies(Set deps) { + getAbstractType().getDependencies(deps); + } + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java new file mode 100644 index 0000000000..f6b50916ca --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java @@ -0,0 +1,100 @@ +package com.netflix.conductor.protogen; + +import com.github.jknack.handlebars.EscapingStrategy; +import com.github.jknack.handlebars.Handlebars; +import com.github.jknack.handlebars.Template; +import com.github.jknack.handlebars.io.FileTemplateLoader; +import com.github.jknack.handlebars.io.TemplateLoader; +import com.squareup.javapoet.AnnotationSpec; +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.JavaFile; +import com.squareup.javapoet.TypeSpec; + +import javax.annotation.Generated; +import javax.lang.model.element.Modifier; +import java.io.FileWriter; +import java.io.Writer; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.*; + +public class ProtoGen { + public static String GENERATED_PROTO_PACKAGE = "com.netflix.conductor.proto"; + public static String GENERATED_MAPPER_PACKAGE = "com.netflix.conductor.grpc.server"; + public static String GENERATOR_NAME = "com.netflix.conductor.protogen.ProtoGen"; + public static String GENERATED_GO_PACKAGE = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + public static ClassName CUSTOM_MAPPER_CLASS = ClassName.get(GENERATED_MAPPER_PACKAGE, "ProtoMapperBase"); + + private List files = new ArrayList<>(); + + public static void main(String[] args) throws Exception { + ProtoGen generator = new ProtoGen(); + + generator.process(com.netflix.conductor.common.metadata.events.EventExecution.class); + + generator.process(com.netflix.conductor.common.metadata.tasks.PollData.class); + generator.process(com.netflix.conductor.common.metadata.tasks.Task.class); + generator.process(com.netflix.conductor.common.metadata.tasks.TaskDef.class); + generator.process(com.netflix.conductor.common.metadata.tasks.TaskExecLog.class); + generator.process(com.netflix.conductor.common.metadata.tasks.TaskResult.class); + + generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask.class); + generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList.class); + generator.process(com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest.class); + generator.process(com.netflix.conductor.common.metadata.workflow.SkipTaskRequest.class); + generator.process(com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest.class); + generator.process(com.netflix.conductor.common.metadata.workflow.SubWorkflowParams.class); + generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowDef.class); + generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowTask.class); + + generator.process(com.netflix.conductor.common.run.TaskSummary.class); + generator.process(com.netflix.conductor.common.run.Workflow.class); + generator.process(com.netflix.conductor.common.run.WorkflowSummary.class); + + generator.writeProtos("grpc/src/main/proto"); + generator.writeMapper("grpc-server/src/main/java/com/netflix/conductor/grpc/server"); + } + + public ProtoGen() { + } + + public void writeMapper(String root) throws Exception { + TypeSpec.Builder protoMapper = TypeSpec.classBuilder("ProtoMapper") + .addModifiers(Modifier.PUBLIC, Modifier.FINAL) + .addAnnotation(AnnotationSpec.builder(Generated.class) + .addMember("value", "$S", GENERATOR_NAME).build()) + .superclass(CUSTOM_MAPPER_CLASS); + + for (File file : files) { + file.getMessage().generateJavaMapper(protoMapper); + } + + JavaFile javaFile = JavaFile.builder(GENERATED_MAPPER_PACKAGE, protoMapper.build()) + .indent(" ").build(); + Path filename = Paths.get(root, "ProtoMapper.java"); + try (Writer writer = new FileWriter(filename.toString())) { + javaFile.writeTo(writer); + } + } + + public void writeProtos(String root) throws Exception { + TemplateLoader loader = new FileTemplateLoader("protogen/templates", ".proto"); + Handlebars handlebars = new Handlebars(loader) + .infiniteLoops(true) + .prettyPrint(true) + .with(EscapingStrategy.NOOP); + + Template protoFile = handlebars.compile("file"); + + for (File file : files) { + Path filename = Paths.get(root, file.getFilePath()); + try (Writer writer = new FileWriter(filename.toString())) { + protoFile.apply(file, writer); + } + } + } + + public void process(Class obj) throws Exception { + files.add(new File(obj)); + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java new file mode 100644 index 0000000000..3932ad5033 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java @@ -0,0 +1,152 @@ +package com.netflix.conductor.protogen.types; + +import com.google.common.base.CaseFormat; +import com.netflix.conductor.protogen.*; +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; + +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.*; + +public abstract class AbstractType { + private static Map TYPES = new HashMap<>(); + private static void addScalar(Type t, String protoType) { + TYPES.put(t, new ScalarType(t, TypeName.get(t), protoType)); + } + static { + addScalar(int.class, "int32"); + addScalar(Integer.class, "int32"); + addScalar(long.class, "int64"); + addScalar(Long.class, "int64"); + addScalar(String.class, "string"); + addScalar(boolean.class, "bool"); + addScalar(Boolean.class, "bool"); + + TYPES.put(Object.class, new AnyType()); + } + + static Map PROTO_LIST_TYPES = new HashMap<>(); + static { + PROTO_LIST_TYPES.put(List.class, ArrayList.class); + PROTO_LIST_TYPES.put(Set.class, HashSet.class); + PROTO_LIST_TYPES.put(LinkedList.class, LinkedList.class); + } + + public static AbstractType get(Type t) { + if (!TYPES.containsKey(t)) { + if (t instanceof ParameterizedType) { + Type raw = ((ParameterizedType) t).getRawType(); + if (PROTO_LIST_TYPES.containsKey(raw)) { + TYPES.put(t, new ListType(t)); + } else if (raw.equals(Map.class)) { + TYPES.put(t, new MapType(t)); + } + } + } + if (!TYPES.containsKey(t)) { + throw new IllegalArgumentException("Cannot map type: " + t); + } + return TYPES.get(t); + } + + public static MessageType get(String className) { + for (Map.Entry pair : TYPES.entrySet()) { + AbstractType t = pair.getValue(); + if (t instanceof MessageType) { + if (((Class) t.getJavaType()).getSimpleName().equals(className)) + return (MessageType)t; + } + } + return null; + } + + public static MessageType declare(Class type, MessageType parent) { + return declare(type, (ClassName)parent.getJavaProtoType(), parent.getProtoFile()); + } + + public static MessageType declare(Class type, ClassName parentType, File protoFile) { + String simpleName = type.getSimpleName(); + MessageType t = new MessageType(type, parentType.nestedClass(simpleName), protoFile); + if (TYPES.containsKey(type)) { + throw new IllegalArgumentException("duplicate type declaration: "+type); + } + TYPES.put(type, t); + return t; + } + + public static MessageType baseClass(ClassName className, File protoFile) { + return new MessageType(Object.class, className, protoFile); + } + + Type javaType; + TypeName javaProtoType; + + AbstractType(Type javaType, TypeName javaProtoType) { + this.javaType = javaType; + this.javaProtoType = javaProtoType; + } + + public Type getJavaType() { + return javaType; + } + + public TypeName getJavaProtoType() { + return javaProtoType; + } + + public abstract String getProtoType(); + public abstract TypeName getRawJavaType(); + public abstract void mapToProto(String field, MethodSpec.Builder method); + public abstract void mapFromProto(String field, MethodSpec.Builder method); + + public void getDependencies(Set deps) {} + + protected String fieldMethod(String m, String field) { + return m + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, field); + } + + public static class WrappedType extends AbstractType { + private AbstractType realType; + private MessageType wrappedType; + + public static WrappedType wrap(GenericType realType) { + Type valueType = realType.getValueType().getJavaType(); + if (!(valueType instanceof Class)) + throw new IllegalArgumentException("cannot wrap primitive type: "+ valueType); + + String className = ((Class) valueType).getSimpleName() + realType.getWrapperSuffix(); + MessageType wrappedType = AbstractType.get(className); + if (wrappedType == null) + throw new IllegalArgumentException("missing wrapper class: "+className); + return new WrappedType(realType, wrappedType); + } + + public WrappedType(AbstractType realType, MessageType wrappedType) { + super(realType.getJavaType(), wrappedType.getJavaProtoType()); + this.realType = realType; + this.wrappedType = wrappedType; + } + + @Override + public String getProtoType() { + return wrappedType.getProtoType(); + } + + @Override + public TypeName getRawJavaType() { + return realType.getRawJavaType(); + } + + @Override + public void mapToProto(String field, MethodSpec.Builder method) { + wrappedType.mapToProto(field, method); + } + + @Override + public void mapFromProto(String field, MethodSpec.Builder method) { + wrappedType.mapFromProto(field, method); + } + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java new file mode 100644 index 0000000000..3e18600930 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java @@ -0,0 +1,21 @@ +package com.netflix.conductor.protogen.types; + +import com.squareup.javapoet.ClassName; + +import java.util.Set; + +public class AnyType extends MessageType { + public AnyType() { + super(Object.class, ClassName.get("com.google.protobuf", "Value"), null); + } + + @Override + public void getDependencies(Set deps) { + deps.add("google/protobuf/struct.proto"); + } + + @Override + public String getProtoType() { + return "google.protobuf.Value"; + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java new file mode 100644 index 0000000000..2250cce1b8 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java @@ -0,0 +1,53 @@ +package com.netflix.conductor.protogen.types; + +import com.netflix.conductor.protogen.types.AbstractType; +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.TypeName; + +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.Set; + +abstract class GenericType extends AbstractType { + public GenericType(Type type) { + super(type, null); + } + + protected Class getRawType() { + ParameterizedType tt = (ParameterizedType)this.getJavaType(); + return (Class)tt.getRawType(); + } + + protected AbstractType resolveGenericParam(int idx) { + ParameterizedType tt = (ParameterizedType)this.getJavaType(); + Type[] types = tt.getActualTypeArguments(); + + AbstractType abstractType = AbstractType.get(types[idx]); + if (abstractType instanceof GenericType) { + return WrappedType.wrap((GenericType) abstractType); + } + return abstractType; + } + + public abstract String getWrapperSuffix(); + public abstract AbstractType getValueType(); + public abstract TypeName resolveJavaProtoType(); + + @Override + public TypeName getRawJavaType() { + return ClassName.get(getRawType()); + } + + @Override + public void getDependencies(Set deps) { + getValueType().getDependencies(deps); + } + + @Override + public TypeName getJavaProtoType() { + if (javaProtoType == null) { + javaProtoType = resolveJavaProtoType(); + } + return javaProtoType; + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java new file mode 100644 index 0000000000..3713160c7b --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java @@ -0,0 +1,78 @@ +package com.netflix.conductor.protogen.types; + +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.ParameterizedTypeName; +import com.squareup.javapoet.TypeName; + +import java.lang.reflect.Type; +import java.util.stream.Collectors; + +public class ListType extends GenericType { + private AbstractType valueType; + + public ListType(Type type) { + super(type); + } + + @Override + public String getWrapperSuffix() { + return "List"; + } + + @Override + public AbstractType getValueType() { + if (valueType == null) { + valueType = resolveGenericParam(0); + } + return valueType; + } + + @Override + public void mapToProto(String field, MethodSpec.Builder method) { + AbstractType subtype = getValueType(); + if (subtype instanceof ScalarType) { + method.addStatement("to.$L( from.$L() )", + fieldMethod("addAll", field), fieldMethod("get", field)); + } else { + method.beginControlFlow("for ($T elem : from.$L())", + subtype.getJavaType(), fieldMethod("get", field)); + method.addStatement("to.$L( toProto(elem) )", + fieldMethod("add", field)); + method.endControlFlow(); + } + } + + @Override + public void mapFromProto(String field, MethodSpec.Builder method) { + AbstractType subtype = getValueType(); + Type entryType = subtype.getJavaType(); + Class collector = PROTO_LIST_TYPES.get(getRawType()); + + if (subtype instanceof ScalarType) { + if (entryType.equals(String.class)) { + method.addStatement("to.$L( from.$L().stream().collect($T.toCollection($T::new)) )", + fieldMethod("set", field), fieldMethod("get", field)+"List", + Collectors.class, collector); + } else { + method.addStatement("to.$L( from.$L() )", + fieldMethod("set", field), fieldMethod("get", field) + "List"); + } + } else { + method.addStatement("to.$L( from.$L().stream().map(ProtoMapper::fromProto).collect($T.toCollection($T::new)) )", + fieldMethod("set", field), fieldMethod("get", field)+"List", + Collectors.class, collector); + } + } + + @Override + public TypeName resolveJavaProtoType() { + return ParameterizedTypeName.get((ClassName)getRawJavaType(), + getValueType().getJavaProtoType()); + } + + @Override + public String getProtoType() { + return "repeated " + getValueType().getProtoType(); + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java new file mode 100644 index 0000000000..133f13f10c --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java @@ -0,0 +1,99 @@ +package com.netflix.conductor.protogen.types; + +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.ParameterizedTypeName; +import com.squareup.javapoet.TypeName; + +import java.lang.reflect.Type; +import java.util.HashMap; +import java.util.Map; + +public class MapType extends GenericType { + private AbstractType keyType; + private AbstractType valueType; + + public MapType(Type type) { + super(type); + } + + @Override + public String getWrapperSuffix() { + return "Map"; + } + + @Override + public AbstractType getValueType() { + if (valueType == null) { + valueType = resolveGenericParam(1); + } + return valueType; + } + + public AbstractType getKeyType() { + if (keyType == null) { + keyType = resolveGenericParam(0); + } + return keyType; + } + + @Override + public void mapToProto(String field, MethodSpec.Builder method) { + AbstractType valueType = getValueType(); + if (valueType instanceof ScalarType) { + method.addStatement("to.$L( from.$L() )", + fieldMethod("putAll", field), fieldMethod("get", field)); + } else { + TypeName typeName = ParameterizedTypeName.get(Map.Entry.class, + getKeyType().getJavaType(), + getValueType().getJavaType()); + method.beginControlFlow("for ($T pair : from.$L().entrySet())", + typeName, fieldMethod("get", field)); + method.addStatement("to.$L( pair.getKey(), toProto( pair.getValue() ) )", + fieldMethod("put", field)); + method.endControlFlow(); + } + } + + @Override + public void mapFromProto(String field, MethodSpec.Builder method) { + AbstractType valueType = getValueType(); + if (valueType instanceof ScalarType) { + method.addStatement("to.$L( from.$L() )", + fieldMethod("set", field), fieldMethod("get", field)+"Map"); + } else { + Type keyType = getKeyType().getJavaType(); + Type valueTypeJava = getValueType().getJavaType(); + TypeName valueTypePb = getValueType().getJavaProtoType(); + + ParameterizedTypeName entryType = ParameterizedTypeName.get(ClassName.get(Map.Entry.class), TypeName.get(keyType), valueTypePb); + ParameterizedTypeName mapType = ParameterizedTypeName.get(Map.class, keyType, valueTypeJava); + ParameterizedTypeName hashMapType = ParameterizedTypeName.get(HashMap.class, keyType, valueTypeJava); + String mapName = field+"Map"; + + method.addStatement("$T $L = new $T()", mapType, mapName, hashMapType); + method.beginControlFlow("for ($T pair : from.$L().entrySet())", + entryType, fieldMethod("get", field)+"Map"); + method.addStatement("$L.put( pair.getKey(), fromProto( pair.getValue() ) )", mapName); + method.endControlFlow(); + method.addStatement("to.$L($L)", fieldMethod("set", field), mapName); + } + } + + @Override + public TypeName resolveJavaProtoType() { + return ParameterizedTypeName.get((ClassName)getRawJavaType(), + getKeyType().getJavaProtoType(), + getValueType().getJavaProtoType()); + } + + @Override + public String getProtoType() { + AbstractType keyType = getKeyType(); + AbstractType valueType = getValueType(); + if (!(keyType instanceof ScalarType)) { + throw new IllegalArgumentException("cannot map non-scalar map key: "+this.getJavaType()); + } + return String.format("map<%s, %s>", keyType.getProtoType(), valueType.getProtoType()); + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java new file mode 100644 index 0000000000..bbba66c86e --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java @@ -0,0 +1,52 @@ +package com.netflix.conductor.protogen.types; + +import com.netflix.conductor.protogen.File; +import com.netflix.conductor.protogen.types.AbstractType; +import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; + +import java.lang.reflect.Type; +import java.util.List; +import java.util.Set; + +public class MessageType extends AbstractType { + private File protoFile; + + public MessageType(Type javaType, ClassName javaProtoType, File protoFile) { + super(javaType, javaProtoType); + this.protoFile = protoFile; + } + + @Override + public String getProtoType() { + List classes = ((ClassName)getJavaProtoType()).simpleNames(); + return String.join(".", classes.subList(1, classes.size())); + } + + @Override + public TypeName getRawJavaType() { + return getJavaProtoType(); + } + + public File getProtoFile() { + return protoFile; + } + + @Override + public void mapToProto(String field, MethodSpec.Builder method) { + method.addStatement("to.$L( toProto( from.$L() ) )", + fieldMethod("set", field), fieldMethod("get", field)); + } + + @Override + public void mapFromProto(String field, MethodSpec.Builder method) { + method.addStatement("to.$L( fromProto( from.$L() ) )", + fieldMethod("set", field), fieldMethod("get", field)); + } + + @Override + public void getDependencies(Set deps) { + deps.add(getProtoFile().getFilePath()); + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java new file mode 100644 index 0000000000..1f16d9a17e --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java @@ -0,0 +1,43 @@ +package com.netflix.conductor.protogen.types; + +import com.netflix.conductor.protogen.types.AbstractType; +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; + +import java.lang.reflect.Type; + +public class ScalarType extends AbstractType { + private String protoType; + + public ScalarType(Type javaType, TypeName javaProtoType, String protoType) { + super(javaType, javaProtoType); + this.protoType = protoType; + } + + @Override + public String getProtoType() { + return protoType; + } + + @Override + public TypeName getRawJavaType() { + return getJavaProtoType(); + } + + private void mapCode(String field, MethodSpec.Builder method, String getter) { + method.addStatement("to.$L( from.$L() )", + fieldMethod("set", field), fieldMethod(getter, field)); + } + + @Override + public void mapFromProto(String field, MethodSpec.Builder method) { + mapCode(field, method, "get"); + } + + @Override + public void mapToProto(String field, MethodSpec.Builder method) { + String getter = (getJavaType().equals(boolean.class) || + getJavaType().equals(Boolean.class)) ? "is" : "get"; + mapCode(field, method, getter); + } +} diff --git a/protogen/templates/file.proto b/protogen/templates/file.proto new file mode 100644 index 0000000000..23dc9401ae --- /dev/null +++ b/protogen/templates/file.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package {{packageName}}; + +{{#includes}} +import "{{this}}"; +{{/includes}} + +option java_outer_classname = "{{javaClassName}}"; +option go_package = "{{goPackage}}"; + +{{#message}} +{{>message}} +{{/message}} diff --git a/protogen/templates/message.proto b/protogen/templates/message.proto new file mode 100644 index 0000000000..7de110162b --- /dev/null +++ b/protogen/templates/message.proto @@ -0,0 +1,8 @@ +{{protoClass}} {{name}} { +{{#nested}} + {{>message}} +{{/nested}} +{{#fields}} + {{protoTypeDeclaration}}; +{{/fields}} +} diff --git a/settings.gradle b/settings.gradle index 42fc5d9bbb..efc2eb4a20 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,3 +1,7 @@ rootProject.name='conductor' -include 'common', 'core', 'redis-persistence','es2-persistence', 'es5-persistence','mysql-persistence','jersey', 'client', 'test-harness', 'ui', 'contribs', 'server' + +include 'client','common','contribs','core','es5-persistence','jersey','mysql-persistence' +include 'redis-persistence','server','test-harness','ui' +include 'protogen' + rootProject.children.each {it.name="conductor-${it.name}"} From f2cb45fe5dbd27017a7534ff2ed525e2f5fc98fe Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 28 May 2018 15:46:05 +0200 Subject: [PATCH 008/163] common: Annotate the internal domain objects for protogen In order for `protogen` to generate the ProtoBuf schemas and the mapping code, all the mappable objects need to be explicitly tagged with `ProtoMessage`, `ProtoEnum` and `ProtoField` annotations. We require explicit tagging as to not map/schema objects that are not part of the API, and most importantly, to ensure that every exposed field from the internal domain objects keeps a consistent ProtoBuf id in the schemas. This ensures that adding or removing fields or objects will always result in consistent and backwards-compatible ProtoBuf schemas. --- .../metadata/events/EventExecution.java | 29 +- .../common/metadata/tasks/PollData.java | 11 + .../conductor/common/metadata/tasks/Task.java | 1127 +++++++++-------- .../common/metadata/tasks/TaskDef.java | 40 +- .../common/metadata/tasks/TaskExecLog.java | 13 +- .../common/metadata/tasks/TaskResult.java | 398 +++--- .../workflow/DynamicForkJoinTask.java | 12 + .../workflow/DynamicForkJoinTaskList.java | 7 +- .../workflow/RerunWorkflowRequest.java | 11 +- .../metadata/workflow/SkipTaskRequest.java | 8 +- .../workflow/StartWorkflowRequest.java | 13 + .../metadata/workflow/SubWorkflowParams.java | 6 + .../common/metadata/workflow/WorkflowDef.java | 23 +- .../metadata/workflow/WorkflowTask.java | 72 +- .../conductor/common/run/TaskSummary.java | 51 +- .../conductor/common/run/Workflow.java | 48 +- .../conductor/common/run/WorkflowSummary.java | 43 +- 17 files changed, 1077 insertions(+), 835 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java index e034cd7146..390b44faa5 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java @@ -21,36 +21,47 @@ import java.util.HashMap; import java.util.Map; +import com.netflix.conductor.common.annotations.*; import com.netflix.conductor.common.metadata.events.EventHandler.Action; /** * @author Viren * */ +@ProtoMessage public class EventExecution { + @ProtoEnum public enum Status { IN_PROGRESS, COMPLETED, FAILED, SKIPPED } - + + @ProtoField(id = 1) private String id; - + + @ProtoField(id = 2) private String messageId; - + + @ProtoField(id = 3) private String name; - + + @ProtoField(id = 4) private String event; - + + @ProtoField(id = 5) private long created; - + + @ProtoField(id = 6) private Status status; - + + // TODO: Proto private Action.Type action; - + + @ProtoField(id = 8) private Map output = new HashMap<>(); public EventExecution() { - + } public EventExecution(String id, String messageId) { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java index 7b8e0bc06b..74ad6e9d68 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java @@ -1,5 +1,8 @@ package com.netflix.conductor.common.metadata.tasks; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + /** * Copyright 2016 Netflix, Inc. * @@ -15,10 +18,18 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +@ProtoMessage public class PollData { + @ProtoField(id = 1) String queueName; + + @ProtoField(id = 2) String domain; + + @ProtoField(id = 3) String workerId; + + @ProtoField(id = 4) long lastPollTime; public PollData() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index ad2834cb73..bec80d6ce6 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -20,580 +20,617 @@ import java.util.HashMap; import java.util.Map; -public class Task { - - public enum Status { - - IN_PROGRESS(false, true, true), - CANCELED(true, false, false), - FAILED(true, false, true), - FAILED_WITH_TERMINAL_ERROR(true, false, false), //No Retires even if retries are configured, the task and the related workflow should be terminated - COMPLETED(true, true, true), - COMPLETED_WITH_ERRORS(true, true, true), - SCHEDULED(false, true, true), - TIMED_OUT(true, false, true), - READY_FOR_RERUN(false, true, true), - SKIPPED(true, true, false); - - private boolean terminal; - - private boolean successful; - - private boolean retriable; - - Status(boolean terminal, boolean successful, boolean retriable) { - this.terminal = terminal; - this.successful = successful; - this.retriable = retriable; - } - - public boolean isTerminal() { - return terminal; - } - - public boolean isSuccessful() { - return successful; - } - - public boolean isRetriable() { - return retriable; - } - } - - private String taskType; - - private Status status; - - private Map inputData = new HashMap<>(); - - private String referenceTaskName; - - private int retryCount; +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - private int seq; +@ProtoMessage +public class Task { - private String correlationId; + @ProtoEnum + public enum Status { - private int pollCount; + IN_PROGRESS(false, true, true), + CANCELED(true, false, false), + FAILED(true, false, true), + COMPLETED(true, true, true), + COMPLETED_WITH_ERRORS(true, true, true), + SCHEDULED(false, true, true), + TIMED_OUT(true, false, true), + READY_FOR_RERUN(false, true, true), + SKIPPED(true, true, false); - private String taskDefName; + private boolean terminal; - /** - * Time when the task was scheduled - */ - private long scheduledTime; + private boolean successful; - /** - * Time when the task was first polled - */ - private long startTime; + private boolean retriable; - /** - * Time when the task completed executing - */ - private long endTime; + Status(boolean terminal, boolean successful, boolean retriable){ + this.terminal = terminal; + this.successful = successful; + this.retriable = retriable; + } - /** - * Time when the task was last updated - */ - private long updateTime; + public boolean isTerminal(){ + return terminal; + } - private int startDelayInSeconds; + public boolean isSuccessful(){ + return successful; + } - private String retriedTaskId; + public boolean isRetriable(){ + return retriable; + } + }; - private boolean retried; + @ProtoField(id = 1) + private String taskType; - private boolean executed; + @ProtoField(id = 2) + private Status status; - private boolean callbackFromWorker = true; + @ProtoField(id = 3) + private Map inputData = new HashMap<>(); - private int responseTimeoutSeconds; + @ProtoField(id = 4) + private String referenceTaskName; - private String workflowInstanceId; + @ProtoField(id = 5) + private int retryCount; - private String workflowType; + @ProtoField(id = 6) + private int seq; - private String taskId; + @ProtoField(id = 7) + private String correlationId; - private String reasonForIncompletion; + @ProtoField(id = 8) + private int pollCount; - private long callbackAfterSeconds; + @ProtoField(id = 9) + private String taskDefName; - private String workerId; + /** + * Time when the task was scheduled + */ + @ProtoField(id = 10) + private long scheduledTime; - private Map outputData = new HashMap<>(); + /** + * Time when the task was first polled + */ + @ProtoField(id = 11) + private long startTime; - private WorkflowTask workflowTask; + /** + * Time when the task completed executing + */ + @ProtoField(id = 12) + private long endTime; - private String domain; + /** + * Time when the task was last updated + */ + @ProtoField(id = 13) + private long updateTime; - public Task() { + @ProtoField(id = 14) + private int startDelayInSeconds; - } + @ProtoField(id = 15) + private String retriedTaskId; - /** - * @return Type of the task - * @see WorkflowTask.Type - */ - public String getTaskType() { - return taskType; - } + @ProtoField(id = 16) + private boolean retried; - public void setTaskType(String taskType) { - this.taskType = taskType; - } + @ProtoField(id = 17) + private boolean callbackFromWorker = true; - /** - * @return Status of the task - */ - public Status getStatus() { - return status; - } + @ProtoField(id = 18) + private int responseTimeoutSeconds; + + @ProtoField(id = 19) + private String workflowInstanceId; - /** - * @param status Status of the task - */ - public void setStatus(Status status) { - this.status = status; - } + @ProtoField(id = 20) + private String workflowType; - @Deprecated - public Status getTaskStatus() { - return status; - } + @ProtoField(id = 21) + private String taskId; - @Deprecated - public void setTaskStatus(Status taskStatus) { - this.status = taskStatus; - } - - public Map getInputData() { - return inputData; - } - - public void setInputData(Map inputData) { - this.inputData = inputData; - } - - - /** - * @return the referenceTaskName - */ - public String getReferenceTaskName() { - return referenceTaskName; - } - - /** - * @param referenceTaskName the referenceTaskName to set - */ - public void setReferenceTaskName(String referenceTaskName) { - this.referenceTaskName = referenceTaskName; - } - - /** - * @return the correlationId - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @param correlationId the correlationId to set - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - /** - * @return the retryCount - */ - public int getRetryCount() { - return retryCount; - } - - /** - * @param retryCount the retryCount to set - */ - public void setRetryCount(int retryCount) { - this.retryCount = retryCount; - } - - /** - * @return the scheduledTime - */ - public long getScheduledTime() { - return scheduledTime; - } - - /** - * @param scheduledTime the scheduledTime to set - */ - public void setScheduledTime(long scheduledTime) { - this.scheduledTime = scheduledTime; - } - - /** - * @return the startTime - */ - public long getStartTime() { - return startTime; - } - - /** - * @param startTime the startTime to set - */ - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - /** - * @return the endTime - */ - public long getEndTime() { - return endTime; - } - - /** - * @param endTime the endTime to set - */ - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - - /** - * @return the startDelayInSeconds - */ - public int getStartDelayInSeconds() { - return startDelayInSeconds; - } - - /** - * @param startDelayInSeconds the startDelayInSeconds to set - */ - public void setStartDelayInSeconds(int startDelayInSeconds) { - this.startDelayInSeconds = startDelayInSeconds; - } - - /** - * @return the retriedTaskId - */ - public String getRetriedTaskId() { - return retriedTaskId; - } - - /** - * @param retriedTaskId the retriedTaskId to set - */ - public void setRetriedTaskId(String retriedTaskId) { - this.retriedTaskId = retriedTaskId; - } - - /** - * @return the seq - */ - public int getSeq() { - return seq; - } - - /** - * @param seq the seq to set - */ - public void setSeq(int seq) { - this.seq = seq; - } - - /** - * @return the updateTime - */ - public long getUpdateTime() { - return updateTime; - } - - /** - * @param updateTime the updateTime to set - */ - public void setUpdateTime(long updateTime) { - this.updateTime = updateTime; - } - - - /** - * @return the queueWaitTime - */ - public long getQueueWaitTime() { - if (this.startTime > 0 && this.scheduledTime > 0) { - return this.startTime - scheduledTime - (getCallbackAfterSeconds() * 1000); - } - return 0L; - } - - public void setQueueWaitTime(long t) { - - } - - /** - * @return True if the task has been retried after failure - */ - public boolean isRetried() { - return retried; - } - - /** - * @param retried the retried to set - */ - public void setRetried(boolean retried) { - this.retried = retried; - } - - /** - * @return True if the task has completed its lifecycle within conductor (from start to completion to being updated in the datastore) - */ - public boolean isExecuted() { - return executed; - } - - /** - * @param executed the executed value to set - */ - public void setExecuted(boolean executed) { - this.executed = executed; - } - - /** - * @return No. of times task has been polled - */ - public int getPollCount() { - return pollCount; - } - - public void setPollCount(int pollCount) { - this.pollCount = pollCount; - } - - - public boolean isCallbackFromWorker() { - return callbackFromWorker; - } - - public void setCallbackFromWorker(boolean callbackFromWorker) { - this.callbackFromWorker = callbackFromWorker; - } - - /** - * @return Name of the task definition - */ - public String getTaskDefName() { - if (taskDefName == null || "".equals(taskDefName)) { - taskDefName = taskType; - } - return taskDefName; - } - - /** - * @param taskDefName Name of the task definition - */ - public void setTaskDefName(String taskDefName) { - this.taskDefName = taskDefName; - } - - - /** - * @return the timeout for task to send response. After this timeout, the task will be re-queued - */ - public int getResponseTimeoutSeconds() { - return responseTimeoutSeconds; - } - - /** - * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the task will be re-queued - */ - public void setResponseTimeoutSeconds(int responseTimeoutSeconds) { - this.responseTimeoutSeconds = responseTimeoutSeconds; - } - - - /** - * @return the workflowInstanceId - */ - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - /** - * @param workflowInstanceId the workflowInstanceId to set - */ - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getWorkflowType() { - return workflowType; - } - - - /** - * @param workflowType workflow type - */ - public Task setWorkflowType(String workflowType) { - this.workflowType = workflowType; - return this; - } - - /** - * @return the taskId - */ - public String getTaskId() { - return taskId; - } - - /** - * @param taskId the taskId to set - */ - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - /** - * @return the reasonForIncompletion - */ - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - /** - * @param reasonForIncompletion the reasonForIncompletion to set - */ - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - /** - * @return the callbackAfterSeconds - */ - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - /** - * @param callbackAfterSeconds the callbackAfterSeconds to set - */ - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - /** - * @return the workerId - */ - public String getWorkerId() { - return workerId; - } - - /** - * @param workerId the workerId to set - */ - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - /** - * @return the outputData - */ - public Map getOutputData() { - return outputData; - } - - /** - * @param outputData the outputData to set - */ - public void setOutputData(Map outputData) { - this.outputData = outputData; - } - - /** - * @return Workflow Task definition - */ - public WorkflowTask getWorkflowTask() { - return workflowTask; - } - - /** - * @param workflowTask Task definition - */ - public void setWorkflowTask(WorkflowTask workflowTask) { - this.workflowTask = workflowTask; - } - - /** - * @return the domain - */ - public String getDomain() { - return domain; - } - - /** - * @param domain the Domain - */ - public void setDomain(String domain) { - this.domain = domain; - } - - public Task copy() { - - Task copy = new Task(); - copy.setCallbackAfterSeconds(callbackAfterSeconds); - copy.setCallbackFromWorker(callbackFromWorker); - copy.setCorrelationId(correlationId); - copy.setInputData(inputData); - copy.setOutputData(outputData); - copy.setReferenceTaskName(referenceTaskName); - copy.setStartDelayInSeconds(startDelayInSeconds); - copy.setTaskDefName(taskDefName); - copy.setTaskType(taskType); - copy.setWorkflowInstanceId(workflowInstanceId); - copy.setResponseTimeoutSeconds(responseTimeoutSeconds); - copy.setStatus(status); - copy.setRetryCount(retryCount); - copy.setPollCount(pollCount); - copy.setTaskId(taskId); - copy.setReasonForIncompletion(reasonForIncompletion); - copy.setWorkerId(workerId); - copy.setWorkflowTask(workflowTask); - copy.setDomain(domain); - return copy; - } - - - @Override - public String toString() { - return "Task{" + - "taskType='" + taskType + '\'' + - ", status=" + status + - ", inputData=" + inputData + - ", referenceTaskName='" + referenceTaskName + '\'' + - ", retryCount=" + retryCount + - ", seq=" + seq + - ", correlationId='" + correlationId + '\'' + - ", pollCount=" + pollCount + - ", taskDefName='" + taskDefName + '\'' + - ", scheduledTime=" + scheduledTime + - ", startTime=" + startTime + - ", endTime=" + endTime + - ", updateTime=" + updateTime + - ", startDelayInSeconds=" + startDelayInSeconds + - ", retriedTaskId='" + retriedTaskId + '\'' + - ", retried=" + retried + - ", callbackFromWorker=" + callbackFromWorker + - ", responseTimeoutSeconds=" + responseTimeoutSeconds + - ", workflowInstanceId='" + workflowInstanceId + '\'' + - ", taskId='" + taskId + '\'' + - ", reasonForIncompletion='" + reasonForIncompletion + '\'' + - ", callbackAfterSeconds=" + callbackAfterSeconds + - ", workerId='" + workerId + '\'' + - ", outputData=" + outputData + - ", workflowTask=" + workflowTask + - ", domain='" + domain + '\'' + - '}'; - } + @ProtoField(id = 22) + private String reasonForIncompletion; + + @ProtoField(id = 23) + private long callbackAfterSeconds; + + @ProtoField(id = 24) + private String workerId; + + @ProtoField(id = 25) + private Map outputData = new HashMap<>(); + + @ProtoField(id = 26) + private WorkflowTask workflowTask; + + @ProtoField(id = 27) + private String domain; + + public Task(){ + + } + + /** + * + * @return Type of the task + * @see WorkflowTask.Type + */ + public String getTaskType() { + return taskType; + } + + public void setTaskType(String taskType) { + this.taskType = taskType; + } + + /** + * + * @return Status of the task + */ + public Status getStatus() { + return status; + } + + /** + * + * @param status Status of the task + */ + public void setStatus(Status status) { + this.status = status; + } + + @Deprecated + public Status getTaskStatus() { + return status; + } + + @Deprecated + public void setTaskStatus(Status taskStatus) { + this.status = taskStatus; + } + + public Map getInputData() { + return inputData; + } + + public void setInputData(Map inputData) { + this.inputData = inputData; + } + + + + /** + * @return the referenceTaskName + */ + public String getReferenceTaskName() { + return referenceTaskName; + } + + /** + * @param referenceTaskName the referenceTaskName to set + */ + public void setReferenceTaskName(String referenceTaskName) { + this.referenceTaskName = referenceTaskName; + } + + /** + * @return the correlationId + */ + public String getCorrelationId() { + return correlationId; + } + + /** + * @param correlationId the correlationId to set + */ + public void setCorrelationId(String correlationId) { + this.correlationId = correlationId; + } + + /** + * @return the retryCount + */ + public int getRetryCount() { + return retryCount; + } + + /** + * @param retryCount the retryCount to set + */ + public void setRetryCount(int retryCount) { + this.retryCount = retryCount; + } + + /** + * @return the scheduledTime + */ + public long getScheduledTime() { + return scheduledTime; + } + + /** + * @param scheduledTime the scheduledTime to set + */ + public void setScheduledTime(long scheduledTime) { + this.scheduledTime = scheduledTime; + } + + /** + * @return the startTime + */ + public long getStartTime() { + return startTime; + } + + /** + * @param startTime the startTime to set + */ + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + /** + * @return the endTime + */ + public long getEndTime() { + return endTime; + } + + /** + * @param endTime the endTime to set + */ + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + + /** + * @return the startDelayInSeconds + */ + public int getStartDelayInSeconds() { + return startDelayInSeconds; + } + + /** + * @param startDelayInSeconds the startDelayInSeconds to set + */ + public void setStartDelayInSeconds(int startDelayInSeconds) { + this.startDelayInSeconds = startDelayInSeconds; + } + + /** + * @return the retriedTaskId + */ + public String getRetriedTaskId() { + return retriedTaskId; + } + + /** + * @param retriedTaskId the retriedTaskId to set + */ + public void setRetriedTaskId(String retriedTaskId) { + this.retriedTaskId = retriedTaskId; + } + + /** + * @return the seq + */ + public int getSeq() { + return seq; + } + + /** + * @param seq the seq to set + */ + public void setSeq(int seq) { + this.seq = seq; + } + + /** + * @return the updateTime + */ + public long getUpdateTime() { + return updateTime; + } + + /** + * @param updateTime the updateTime to set + */ + public void setUpdateTime(long updateTime) { + this.updateTime = updateTime; + } + + + /** + * @return the queueWaitTime + */ + public long getQueueWaitTime() { + if(this.startTime > 0 && this.scheduledTime > 0){ + return this.startTime - scheduledTime - (getCallbackAfterSeconds()*1000); + } + return 0L; + } + + public void setQueueWaitTime(long t) { + + } + + + /** + * + * @return True if the task has been retried after failure + */ + public boolean isRetried() { + return retried; + } + + /** + * @param retried the retried to set + */ + public void setRetried(boolean retried) { + this.retried = retried; + } + + /** + * + * @return No. of times task has been polled + */ + public int getPollCount() { + return pollCount; + } + + public void setPollCount(int pollCount) { + this.pollCount = pollCount; + } + + + public boolean isCallbackFromWorker() { + return callbackFromWorker; + } + + public void setCallbackFromWorker(boolean callbackFromWorker) { + this.callbackFromWorker = callbackFromWorker; + } + + /** + * + * @return Name of the task definition + */ + public String getTaskDefName() { + if(taskDefName == null || "".equals(taskDefName)){ + taskDefName = taskType; + } + return taskDefName; + } + + /** + * + * @param taskDefName Name of the task definition + */ + public void setTaskDefName(String taskDefName) { + this.taskDefName = taskDefName; + } + + + /** + * + * @return the timeout for task to send response. After this timeout, the task will be re-queued + */ + public int getResponseTimeoutSeconds() { + return responseTimeoutSeconds; + } + + /** + * + * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the task will be re-queued + */ + public void setResponseTimeoutSeconds(int responseTimeoutSeconds) { + this.responseTimeoutSeconds = responseTimeoutSeconds; + } + + + /** + * @return the workflowInstanceId + */ + public String getWorkflowInstanceId() { + return workflowInstanceId; + } + + /** + * @param workflowInstanceId the workflowInstanceId to set + * + */ + public void setWorkflowInstanceId(String workflowInstanceId) { + this.workflowInstanceId = workflowInstanceId; + } + + public String getWorkflowType() { + return workflowType; + } + + + /** + * @param workflowType workflow type + */ + public Task setWorkflowType(String workflowType) { + this.workflowType = workflowType; + return this; + } + + /** + * @return the taskId + */ + public String getTaskId() { + return taskId; + } + + /** + * @param taskId the taskId to set + * + */ + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + /** + * @return the reasonForIncompletion + */ + public String getReasonForIncompletion() { + return reasonForIncompletion; + } + + /** + * @param reasonForIncompletion the reasonForIncompletion to set + * + */ + public void setReasonForIncompletion(String reasonForIncompletion) { + this.reasonForIncompletion = reasonForIncompletion; + } + + /** + * @return the callbackAfterSeconds + */ + public long getCallbackAfterSeconds() { + return callbackAfterSeconds; + } + + /** + * @param callbackAfterSeconds the callbackAfterSeconds to set + * + */ + public void setCallbackAfterSeconds(long callbackAfterSeconds) { + this.callbackAfterSeconds = callbackAfterSeconds; + } + + /** + * @return the workerId + */ + public String getWorkerId() { + return workerId; + } + + /** + * @param workerId the workerId to set + * + */ + public void setWorkerId(String workerId) { + this.workerId = workerId; + } + + /** + * @return the outputData + */ + public Map getOutputData() { + return outputData; + } + + /** + * @param outputData the outputData to set + * + */ + public void setOutputData(Map outputData) { + this.outputData = outputData; + } + + /** + * + * @return Workflow Task definition + */ + public WorkflowTask getWorkflowTask() { + return workflowTask; + } + + /** + * + * @param workflowTask Task definition + */ + public void setWorkflowTask(WorkflowTask workflowTask) { + this.workflowTask = workflowTask; + } + + /** + * @return the domain + */ + public String getDomain() { + return domain; + } + + /** + * @param domain the Domain + * + */ + public void setDomain(String domain) { + this.domain = domain; + } + + public Task copy() { + + Task copy = new Task(); + copy.setCallbackAfterSeconds(callbackAfterSeconds); + copy.setCallbackFromWorker(callbackFromWorker); + copy.setCorrelationId(correlationId); + copy.setInputData(inputData); + copy.setOutputData(outputData); + copy.setReferenceTaskName(referenceTaskName); + copy.setStartDelayInSeconds(startDelayInSeconds); + copy.setTaskDefName(taskDefName); + copy.setTaskType(taskType); + copy.setWorkflowInstanceId(workflowInstanceId); + copy.setResponseTimeoutSeconds(responseTimeoutSeconds); + copy.setStatus(status); + copy.setRetryCount(retryCount); + copy.setPollCount(pollCount); + copy.setTaskId(taskId); + copy.setReasonForIncompletion(reasonForIncompletion); + copy.setWorkerId(workerId); + copy.setWorkflowTask(workflowTask); + copy.setDomain(domain); + return copy; + } + + + @Override + public String toString() { + return "Task{" + + "taskType='" + taskType + '\'' + + ", status=" + status + + ", inputData=" + inputData + + ", referenceTaskName='" + referenceTaskName + '\'' + + ", retryCount=" + retryCount + + ", seq=" + seq + + ", correlationId='" + correlationId + '\'' + + ", pollCount=" + pollCount + + ", taskDefName='" + taskDefName + '\'' + + ", scheduledTime=" + scheduledTime + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", updateTime=" + updateTime + + ", startDelayInSeconds=" + startDelayInSeconds + + ", retriedTaskId='" + retriedTaskId + '\'' + + ", retried=" + retried + + ", callbackFromWorker=" + callbackFromWorker + + ", responseTimeoutSeconds=" + responseTimeoutSeconds + + ", workflowInstanceId='" + workflowInstanceId + '\'' + + ", taskId='" + taskId + '\'' + + ", reasonForIncompletion='" + reasonForIncompletion + '\'' + + ", callbackAfterSeconds=" + callbackAfterSeconds + + ", workerId='" + workerId + '\'' + + ", outputData=" + outputData + + ", workflowTask=" + workflowTask + + ", domain='" + domain + '\'' + + '}'; + } } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java index bd7ee029d0..30901515f0 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java @@ -23,16 +23,22 @@ import java.util.List; import java.util.Map; +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.Auditable; /** * @author Viren * Defines a workflow task definition */ +@ProtoMessage public class TaskDef extends Auditable { - + + @ProtoEnum public static enum TimeoutPolicy {RETRY, TIME_OUT_WF, ALERT_ONLY} - + + @ProtoEnum public static enum RetryLogic {FIXED, EXPONENTIAL_BACKOFF} private static final int ONE_HOUR = 60 * 60; @@ -40,28 +46,40 @@ public static enum RetryLogic {FIXED, EXPONENTIAL_BACKOFF} /** * Unique name identifying the task. The name is unique across */ + @ProtoField(id = 1) private String name; - + + @ProtoField(id = 2) private String description; - + + @ProtoField(id = 3) private int retryCount = 3; // Default + @ProtoField(id = 4) private long timeoutSeconds; + @ProtoField(id = 5) private List inputKeys = new ArrayList(); - + + @ProtoField(id = 6) private List outputKeys = new ArrayList(); - + + @ProtoField(id = 7) private TimeoutPolicy timeoutPolicy = TimeoutPolicy.TIME_OUT_WF; - + + @ProtoField(id = 8) private RetryLogic retryLogic = RetryLogic.FIXED; - + + @ProtoField(id = 9) private int retryDelaySeconds = 60; - + + @ProtoField(id = 10) private int responseTimeoutSeconds = ONE_HOUR; - + + @ProtoField(id = 11) private Integer concurrentExecLimit; - + + @ProtoField(id = 12) private Map inputTemplate = new HashMap<>(); public TaskDef() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java index 092b45ac69..65a1ff9825 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java @@ -18,16 +18,23 @@ */ package com.netflix.conductor.common.metadata.tasks; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + /** * @author Viren * Model that represents the task's execution log. */ +@ProtoMessage public class TaskExecLog { - + + @ProtoField(id = 1) private String log; - + + @ProtoField(id = 2) private String taskId; - + + @ProtoField(id = 3) private long createdTime; public TaskExecLog() {} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java index 1dfb6ec1fb..56a95d8585 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java @@ -18,6 +18,8 @@ */ package com.netflix.conductor.common.metadata.tasks; +import com.netflix.conductor.common.annotations.*; + import java.util.HashMap; import java.util.List; import java.util.Map; @@ -28,204 +30,202 @@ * Result of the task execution. * */ +@ProtoMessage public class TaskResult { - - public enum Status { - IN_PROGRESS, FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED, SCHEDULED; //SCHEDULED is added for the backward compatibility and should NOT be used when updating the task result - } - - private String workflowInstanceId; - - private String taskId; - - private String reasonForIncompletion; - - private long callbackAfterSeconds; - - private String workerId; - - private Status status; - - private Map outputData = new HashMap<>(); - - private List logs = new CopyOnWriteArrayList<>(); - - public TaskResult(Task task) { - this.workflowInstanceId = task.getWorkflowInstanceId(); - this.taskId = task.getTaskId(); - this.reasonForIncompletion = task.getReasonForIncompletion(); - this.callbackAfterSeconds = task.getCallbackAfterSeconds(); - this.status = Status.valueOf(task.getStatus().name()); - this.workerId = task.getWorkerId(); - this.outputData = task.getOutputData(); - } - - public TaskResult() { - - } - - /** - * - * @return Workflow instance id for which the task result is produced - */ - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getTaskId() { - return taskId; - } - - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - /** - * When set to non-zero values, the task remains in the queue for the specified seconds before sent back to the worker when polled. - * Useful for the long running task, where the task is updated as IN_PROGRESS and should not be polled out of the queue for a specified amount of time. (delayed queue implementation) - * @param callbackAfterSeconds Amount of time in seconds the task should be held in the queue before giving it to a polling worker. - */ - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - public String getWorkerId() { - return workerId; - } - - /** - * - * @param workerId a free form string identifying the worker host. - * Could be hostname, IP Address or any other meaningful identifier that can help identify the host/process which executed the task, in case of troubleshooting. - */ - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - /** - * @return the status - */ - public Status getStatus() { - return status; - } - - /** - * - * @param status Status of the task - *

- * IN_PROGRESS: Use this for long running tasks, indicating the task is still in progress and should be checked again at a later time. - * e.g. the worker checks the status of the job in the DB, while the job is being executed by another process. - *

- * FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED: Terminal statuses for the task. - *

- * - * @see #setCallbackAfterSeconds(long) - */ - public void setStatus(Status status) { - this.status = status; - } - - public Map getOutputData() { - return outputData; - } - - /** - * - * @param outputData output data to be set for the task execution result - */ - public void setOutputData(Map outputData) { - this.outputData = outputData; - } - - /** - * Adds output - * @param key output field - * @param value value - * @return current instance - */ - public TaskResult addOutputData(String key, Object value) { - this.outputData.put(key, value); - return this; - } - - /** - * - * @return Task execution logs - */ - public List getLogs() { - return logs; - } - - /** - * - * @param logs Task execution logs - */ - public void setLogs(List logs) { - this.logs = logs; - } - - - /** - * - * @param log Log line to be added - * @return Instance of TaskResult - */ - public TaskResult log(String log) { - this.logs.add(new TaskExecLog(log)); - return this; - } - - @Override - public String toString() { - return "TaskResult{" + - "workflowInstanceId='" + workflowInstanceId + '\'' + - ", taskId='" + taskId + '\'' + - ", reasonForIncompletion='" + reasonForIncompletion + '\'' + - ", callbackAfterSeconds=" + callbackAfterSeconds + - ", workerId='" + workerId + '\'' + - ", status=" + status + - ", outputData=" + outputData + - ", logs=" + logs + - '}'; - } - - public static TaskResult complete() { - return newTaskResult(Status.COMPLETED); - } - - public static TaskResult failed() { - return newTaskResult(Status.FAILED); - } - - public static TaskResult failed(String failureReason) { - TaskResult result = newTaskResult(Status.FAILED); - result.setReasonForIncompletion(failureReason); - return result; - } - - public static TaskResult inProgress() { - return newTaskResult(Status.IN_PROGRESS); - } - - public static TaskResult newTaskResult(Status status) { - TaskResult result = new TaskResult(); - result.setStatus(status); - return result; - } + @ProtoEnum + public enum Status { + IN_PROGRESS, FAILED, COMPLETED, SCHEDULED; //SCHEDULED is added for the backward compatibility and should NOT be used when updating the task result + } + + @ProtoField(id = 1) + private String workflowInstanceId; + + @ProtoField(id = 2) + private String taskId; + + @ProtoField(id = 3) + private String reasonForIncompletion; + + @ProtoField(id = 4) + private long callbackAfterSeconds; + + @ProtoField(id = 5) + private String workerId; + + @ProtoField(id = 6) + private Status status; + + @ProtoField(id = 7) + private Map outputData = new HashMap<>(); + + private List logs = new CopyOnWriteArrayList<>(); + + public TaskResult(Task task) { + this.workflowInstanceId = task.getWorkflowInstanceId(); + this.taskId = task.getTaskId(); + this.reasonForIncompletion = task.getReasonForIncompletion(); + this.callbackAfterSeconds = task.getCallbackAfterSeconds(); + this.status = Status.valueOf(task.getStatus().name()); + this.workerId = task.getWorkerId(); + this.outputData = task.getOutputData(); + } + + public TaskResult() { + + } + + /** + * + * @return Workflow instance id for which the task result is produced + */ + public String getWorkflowInstanceId() { + return workflowInstanceId; + } + + public void setWorkflowInstanceId(String workflowInstanceId) { + this.workflowInstanceId = workflowInstanceId; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + public String getReasonForIncompletion() { + return reasonForIncompletion; + } + + public void setReasonForIncompletion(String reasonForIncompletion) { + this.reasonForIncompletion = reasonForIncompletion; + } + + public long getCallbackAfterSeconds() { + return callbackAfterSeconds; + } + + /** + * When set to non-zero values, the task remains in the queue for the specified seconds before sent back to the worker when polled. + * Useful for the long running task, where the task is updated as IN_PROGRESS and should not be polled out of the queue for a specified amount of time. (delayed queue implementation) + * @param callbackAfterSeconds Amount of time in seconds the task should be held in the queue before giving it to a polling worker. + */ + public void setCallbackAfterSeconds(long callbackAfterSeconds) { + this.callbackAfterSeconds = callbackAfterSeconds; + } + + public String getWorkerId() { + return workerId; + } + + /** + * + * @param workerId a free form string identifying the worker host. + * Could be hostname, IP Address or any other meaningful identifier that can help identify the host/process which executed the task, in case of troubleshooting. + */ + public void setWorkerId(String workerId) { + this.workerId = workerId; + } + + /** + * @return the status + */ + public Status getStatus() { + return status; + } + + /** + * + * @param status Status of the task + *

+ * IN_PROGRESS: Use this for long running tasks, indicating the task is still in progress and should be checked again at a later time. e.g. the worker checks the status of the job in the DB, while the job is being executed by another process. + *

+ * FAILED, COMPLETED: Terminal statuses for the task. + *

+ * + * @see #setCallbackAfterSeconds(long) + */ + public void setStatus(Status status) { + this.status = status; + } + + public Map getOutputData() { + return outputData; + } + + /** + * + * @param outputData output data to be set for the task execution result + */ + public void setOutputData(Map outputData) { + this.outputData = outputData; + } + + /** + * Adds output + * @param key output field + * @param value value + * @return current instance + */ + public TaskResult addOutputData(String key, Object value) { + this.outputData.put(key, value); + return this; + } + + /** + * + * @return Task execution logs + */ + public List getLogs() { + return logs; + } + + /** + * + * @param logs Task execution logs + */ + public void setLogs(List logs) { + this.logs = logs; + } + + + /** + * + * @param log Log line to be added + * @return Instance of TaskResult + */ + public TaskResult log(String log) { + this.logs.add(new TaskExecLog(log)); + return this; + } + + @Override + public String toString() { + return "TaskResult [workflowInstanceId=" + workflowInstanceId + ", taskId=" + taskId + ", status=" + status + "]"; + } + + public static TaskResult complete() { + return newTaskResult(Status.COMPLETED); + } + + public static TaskResult failed() { + return newTaskResult(Status.FAILED); + } + + public static TaskResult failed(String failureReason) { + TaskResult result = newTaskResult(Status.FAILED); + result.setReasonForIncompletion(failureReason); + return result; + } + + public static TaskResult inProgress() { + return newTaskResult(Status.IN_PROGRESS); + } + + public static TaskResult newTaskResult(Status status) { + TaskResult result = new TaskResult(); + result.setStatus(status); + return result; + } } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java index dbbf1dfe6f..7728a91ab0 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java @@ -18,14 +18,26 @@ import java.util.HashMap; import java.util.Map; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; +@ProtoMessage public class DynamicForkJoinTask { + @ProtoField(id = 1) private String taskName; + + @ProtoField(id = 2) private String workflowName; + + @ProtoField(id = 3) private String referenceName; + + @ProtoField(id = 4) private Map input = new HashMap<>(); + + @ProtoField(id = 5) private String type = Type.SIMPLE.name(); public DynamicForkJoinTask() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java index 88aa6355c3..6b40a6ea10 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java @@ -15,12 +15,17 @@ */ package com.netflix.conductor.common.metadata.workflow; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + import java.util.ArrayList; import java.util.List; import java.util.Map; +@ProtoMessage public class DynamicForkJoinTaskList { - + + @ProtoField(id = 1) private List dynamicTasks = new ArrayList(); public void add(String taskName, String workflowName, String referenceName, Map input){ diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java index d6e4a9112c..2636c3a7e5 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java @@ -15,18 +15,27 @@ */ package com.netflix.conductor.common.metadata.workflow; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + import java.util.Map; +@ProtoMessage(toProto = false) public class RerunWorkflowRequest { + @ProtoField(id = 1) private String reRunFromWorkflowId; + @ProtoField(id = 2) private Map workflowInput; - + + @ProtoField(id = 3) private String reRunFromTaskId; + @ProtoField(id = 4) private Map taskInput; + @ProtoField(id = 5) private String correlationId; public String getReRunFromWorkflowId() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java index 41cb3afe83..cfd76ef34c 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java @@ -15,11 +15,17 @@ */ package com.netflix.conductor.common.metadata.workflow; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + import java.util.Map; +@ProtoMessage(toProto = false) public class SkipTaskRequest { + @ProtoField(id = 1) private Map taskInput; - + + @ProtoField(id = 2) private Map taskOutput; public Map getTaskInput() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java index 595094b23d..3e2e77cc6b 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java @@ -1,13 +1,26 @@ package com.netflix.conductor.common.metadata.workflow; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + import java.util.HashMap; import java.util.Map; +@ProtoMessage(toProto = false) public class StartWorkflowRequest { + @ProtoField(id = 1) private String name; + + @ProtoField(id = 2) private Integer version; + + @ProtoField(id = 3) private String correlationId; + + @ProtoField(id = 4) private Map input = new HashMap<>(); + + @ProtoField(id = 5) private Map taskToDomain = new HashMap<>(); public String getName() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java index 2953ab4bea..d274aca8be 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java @@ -18,15 +18,21 @@ */ package com.netflix.conductor.common.metadata.workflow; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + /** * @author Viren * */ +@ProtoMessage public class SubWorkflowParams { + @ProtoField(id = 1) private String name; //QQ why is this an object ?? + @ProtoField(id = 2) private Object version; /** diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 89a504c7ff..849e606ed7 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -25,28 +25,39 @@ import java.util.Map; import java.util.Optional; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.Auditable; /** * @author Viren * */ +@ProtoMessage public class WorkflowDef extends Auditable { + @ProtoField(id = 1) private String name; - + + @ProtoField(id = 2) private String description; - + + @ProtoField(id = 3) private int version = 1; - + + @ProtoField(id = 4) private LinkedList tasks = new LinkedList(); - + + @ProtoField(id = 5) private List inputParameters = new LinkedList(); - + + @ProtoField(id = 6) private Map outputParameters = new HashMap<>(); + @ProtoField(id = 7) private String failureWorkflow; - + + @ProtoField(id = 8) private int schemaVersion = 1; /** diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 59f3ba8e7c..99219d2874 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -18,6 +18,10 @@ */ package com.netflix.conductor.common.metadata.workflow; +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + import java.util.Collection; import java.util.HashMap; import java.util.HashSet; @@ -32,8 +36,10 @@ * @author Viren * */ +@ProtoMessage public class WorkflowTask { + @ProtoEnum public enum Type { SIMPLE, DYNAMIC, FORK_JOIN, FORK_JOIN_DYNAMIC, DECISION, JOIN, SUB_WORKFLOW, EVENT, WAIT, USER_DEFINED; @@ -55,47 +61,79 @@ public static boolean isSystemTask(String name) { return systemTasks.contains(name); } } - + + @ProtoField(id = 1) private String name; - + + @ProtoField(id = 2) private String taskReferenceName; + @ProtoField(id = 3) private String description; //Key: Name of the input parameter. MUST be one of the keys defined in TaskDef (e.g. fileName) //Value: mapping of the parameter from another task (e.g. task1.someOutputParameterAsFileName) + @ProtoField(id = 4) private Map inputParameters = new HashMap(); + @ProtoField(id = 5) private String type = Type.SIMPLE.name(); + @ProtoField(id = 6) private String dynamicTaskNameParam; - + + @ProtoField(id = 7) private String caseValueParam; - + + @ProtoField(id = 8) private String caseExpression; - + + @ProtoMessage(wrapper = true) + public static class WorkflowTaskList { + public List getTasks() { + return tasks; + } + + public void setTasks(List tasks) { + this.tasks = tasks; + } + + @ProtoField(id = 1) + private List tasks; + } + //Populates for the tasks of the decision type + @ProtoField(id = 9) private Map> decisionCases = new LinkedHashMap<>(); - + @Deprecated private String dynamicForkJoinTasksParam; - + + @ProtoField(id = 10) private String dynamicForkTasksParam; - + + @ProtoField(id = 11) private String dynamicForkTasksInputParamName; - + + @ProtoField(id = 12) private List defaultCase = new LinkedList<>(); - + + @ProtoField(id = 13) private List> forkTasks = new LinkedList<>(); - + + @ProtoField(id = 14) private int startDelay; //No. of seconds (at-least) to wait before starting a task. - private SubWorkflowParams subWorkflow; - + @ProtoField(id = 15) + private SubWorkflowParams subWorkflowParam; + + @ProtoField(id = 16) private List joinOn = new LinkedList<>(); - + + @ProtoField(id = 17) private String sink; - + + @ProtoField(id = 18) private Boolean optional; /** @@ -309,14 +347,14 @@ public void setCaseExpression(String caseExpression) { * @return the subWorkflow */ public SubWorkflowParams getSubWorkflowParam() { - return subWorkflow; + return subWorkflowParam; } /** * @param subWorkflow the subWorkflowParam to set */ public void setSubWorkflowParam(SubWorkflowParams subWorkflow) { - this.subWorkflow = subWorkflow; + this.subWorkflowParam = subWorkflow; } /** diff --git a/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java b/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java index 383da3a8a5..3c6c40e7c6 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java +++ b/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java @@ -22,6 +22,8 @@ import java.util.Date; import java.util.TimeZone; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; @@ -29,45 +31,62 @@ * @author Viren * */ +@ProtoMessage(fromProto = false) public class TaskSummary { /** * The time should be stored as GMT */ private static final TimeZone gmt = TimeZone.getTimeZone("GMT"); - + + @ProtoField(id = 1) private String workflowId; + @ProtoField(id = 2) private String workflowType; - + + @ProtoField(id = 3) private String correlationId; - + + @ProtoField(id = 4) private String scheduledTime; - + + @ProtoField(id = 5) private String startTime; - + + @ProtoField(id = 6) private String updateTime; - + + @ProtoField(id = 7) private String endTime; - + + @ProtoField(id = 8) private Status status; - + + @ProtoField(id = 9) private String reasonForIncompletion; - + + @ProtoField(id = 10) private long executionTime; - + + @ProtoField(id = 11) private long queueWaitTime; - + + @ProtoField(id = 12) private String taskDefName; - + + @ProtoField(id = 13) private String taskType; - + + @ProtoField(id = 14) private String input; - + + @ProtoField(id = 15) private String output; - + + @ProtoField(id = 16) private String taskId; - + public TaskSummary(Task task) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index 783954452b..1c3fcaed03 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -22,12 +22,17 @@ import java.util.Map; import java.util.Set; +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.Auditable; import com.netflix.conductor.common.metadata.tasks.Task; +@ProtoMessage public class Workflow extends Auditable{ - + + @ProtoEnum public enum WorkflowStatus { RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), PAUSED(false, true); @@ -48,39 +53,56 @@ public boolean isSuccessful(){ return successful; } } - + + @ProtoField(id = 1) private WorkflowStatus status = WorkflowStatus.RUNNING; - + + @ProtoField(id = 2) private long endTime; + @ProtoField(id = 3) private String workflowId; - + + @ProtoField(id = 4) private String parentWorkflowId; + @ProtoField(id = 5) private String parentWorkflowTaskId; + @ProtoField(id = 6) private List tasks = new LinkedList<>(); - + + @ProtoField(id = 8) private Map input = new HashMap<>(); - + + @ProtoField(id = 9) private Map output = new HashMap<>();; - + + @ProtoField(id = 10) private String workflowType; - + + @ProtoField(id = 11) private int version; - + + @ProtoField(id = 12) private String correlationId; - + + @ProtoField(id = 13) private String reRunFromWorkflowId; - + + @ProtoField(id = 14) private String reasonForIncompletion; - + + @ProtoField(id = 15) private int schemaVersion; - + + @ProtoField(id = 16) private String event; + @ProtoField(id = 17) private Map taskToDomain = new HashMap<>(); + @ProtoField(id = 18) private Set failedReferenceTaskNames = new HashSet<>(); public Workflow(){ diff --git a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java index 43afb30174..9048ff19c2 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java +++ b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java @@ -23,6 +23,8 @@ import java.util.TimeZone; import java.util.stream.Collectors; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; /** @@ -30,39 +32,54 @@ * * @author Viren */ +@ProtoMessage(fromProto = false) public class WorkflowSummary { /** * The time should be stored as GMT */ private static final TimeZone gmt = TimeZone.getTimeZone("GMT"); - + + @ProtoField(id = 1) private String workflowType; - + + @ProtoField(id = 2) private int version; - + + @ProtoField(id = 3) private String workflowId; - + + @ProtoField(id = 4) private String correlationId; - + + @ProtoField(id = 5) private String startTime; - + + @ProtoField(id = 6) private String updateTime; - + + @ProtoField(id = 7) private String endTime; - + + @ProtoField(id = 8) private WorkflowStatus status; - + + @ProtoField(id = 9) private String input; - + + @ProtoField(id = 10) private String output; - + + @ProtoField(id = 11) private String reasonForIncompletion; - + + @ProtoField(id = 12) private long executionTime; - + + @ProtoField(id = 13) private String event; + @ProtoField(id = 14) private String failedReferenceTaskNames = ""; public WorkflowSummary() { From d8a3b806ac1c8ecc281b23587b3177a2c8302ead Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 28 May 2018 15:46:51 +0200 Subject: [PATCH 009/163] grpc: Add autogenerated .proto schemas Once all the internal domain objects have been annotated, we can run `protogen` to generate all the `.proto` schemas for these objects. This commit checks in the autogenerated code into the repository as we want to keep a consistent record of it to ensure it can also be used by non-Java clients of the service. Furthermore, it also introduces hand-written ProtoBuf schema for service definitions, which will be exposed by the eventual GRPC server. All these .proto schemas have been added on a new `grpc` module, with a corresponding Gradle setup that ensures they get translated into the Java protobuf generated code at compile time. This `grpc` package will be depdended on by our GRPC Server. --- grpc/build.gradle | 40 +++++++ .../main/proto/grpc/metadata_service.proto | 55 +++++++++ grpc/src/main/proto/grpc/search.proto | 20 ++++ grpc/src/main/proto/grpc/task_service.proto | 70 +++++++++++ .../main/proto/grpc/workflow_service.proto | 110 ++++++++++++++++++ .../proto/model/dynamicforkjointask.proto | 15 +++ .../proto/model/dynamicforkjointasklist.proto | 11 ++ .../src/main/proto/model/eventexecution.proto | 23 ++++ grpc/src/main/proto/model/polldata.proto | 13 +++ .../proto/model/rerunworkflowrequest.proto | 15 +++ .../main/proto/model/skiptaskrequest.proto | 12 ++ .../proto/model/startworkflowrequest.proto | 15 +++ .../main/proto/model/subworkflowparams.proto | 12 ++ grpc/src/main/proto/model/task.proto | 49 ++++++++ grpc/src/main/proto/model/taskdef.proto | 31 +++++ grpc/src/main/proto/model/taskexeclog.proto | 12 ++ grpc/src/main/proto/model/taskresult.proto | 23 ++++ grpc/src/main/proto/model/tasksummary.proto | 26 +++++ grpc/src/main/proto/model/workflow.proto | 36 ++++++ grpc/src/main/proto/model/workflowdef.proto | 19 +++ .../main/proto/model/workflowsummary.proto | 24 ++++ grpc/src/main/proto/model/workflowtask.proto | 44 +++++++ settings.gradle | 1 + versionsOfDependencies.gradle | 1 + 24 files changed, 677 insertions(+) create mode 100644 grpc/build.gradle create mode 100644 grpc/src/main/proto/grpc/metadata_service.proto create mode 100644 grpc/src/main/proto/grpc/search.proto create mode 100644 grpc/src/main/proto/grpc/task_service.proto create mode 100644 grpc/src/main/proto/grpc/workflow_service.proto create mode 100644 grpc/src/main/proto/model/dynamicforkjointask.proto create mode 100644 grpc/src/main/proto/model/dynamicforkjointasklist.proto create mode 100644 grpc/src/main/proto/model/eventexecution.proto create mode 100644 grpc/src/main/proto/model/polldata.proto create mode 100644 grpc/src/main/proto/model/rerunworkflowrequest.proto create mode 100644 grpc/src/main/proto/model/skiptaskrequest.proto create mode 100644 grpc/src/main/proto/model/startworkflowrequest.proto create mode 100644 grpc/src/main/proto/model/subworkflowparams.proto create mode 100644 grpc/src/main/proto/model/task.proto create mode 100644 grpc/src/main/proto/model/taskdef.proto create mode 100644 grpc/src/main/proto/model/taskexeclog.proto create mode 100644 grpc/src/main/proto/model/taskresult.proto create mode 100644 grpc/src/main/proto/model/tasksummary.proto create mode 100644 grpc/src/main/proto/model/workflow.proto create mode 100644 grpc/src/main/proto/model/workflowdef.proto create mode 100644 grpc/src/main/proto/model/workflowsummary.proto create mode 100644 grpc/src/main/proto/model/workflowtask.proto diff --git a/grpc/build.gradle b/grpc/build.gradle new file mode 100644 index 0000000000..ac0eb4cd48 --- /dev/null +++ b/grpc/build.gradle @@ -0,0 +1,40 @@ +buildscript { + dependencies { + classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.5' + } +} + +plugins { + id 'java' + id 'idea' + id "com.google.protobuf" version "0.8.5" +} + +dependencies { + compile "com.google.api.grpc:proto-google-common-protos:1.0.0" + compile "io.grpc:grpc-protobuf:${revGrpc}" + compile "io.grpc:grpc-stub:${revGrpc}" +} + +protobuf { + protoc { + artifact = 'com.google.protobuf:protoc:3.5.1-1' + } + plugins { + grpc { + artifact = "io.grpc:protoc-gen-grpc-java:${revGrpc}" + } + } + generateProtoTasks { + all()*.plugins { + grpc {} + } + } +} + +idea { + module { + sourceDirs += file("${projectDir}/build/generated/source/proto/main/java"); + sourceDirs += file("${projectDir}/build/generated/source/proto/main/grpc"); + } +} diff --git a/grpc/src/main/proto/grpc/metadata_service.proto b/grpc/src/main/proto/grpc/metadata_service.proto new file mode 100644 index 0000000000..92402a9f36 --- /dev/null +++ b/grpc/src/main/proto/grpc/metadata_service.proto @@ -0,0 +1,55 @@ +syntax = "proto3"; +package com.netflix.conductor.grpc; + +import "google/protobuf/empty.proto"; +import "model/taskdef.proto"; +import "model/workflowdef.proto"; + +option java_outer_classname = "MetadataServicePb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; + +service MetadataService { + // POST /workflow + rpc CreateWorkflow(com.netflix.conductor.proto.WorkflowDef) returns (google.protobuf.Empty); + + // PUT /workflow + rpc UpdateWorkflows(UpdateWorkflowsRequest) returns (google.protobuf.Empty); + + // GET /workflow/{name} + rpc GetWorkflow(GetWorkflowRequest) returns (com.netflix.conductor.proto.WorkflowDef); + + // GET /workflow + rpc GetAllWorkflows(google.protobuf.Empty) returns (stream com.netflix.conductor.proto.WorkflowDef); + + // POST /taskdefs + rpc CreateTasks(CreateTasksRequest) returns (google.protobuf.Empty); + + // PUT /taskdefs + rpc UpdateTask(com.netflix.conductor.proto.TaskDef) returns (google.protobuf.Empty); + + // GET /taskdefs/{tasktype} + rpc GetTask(GetTaskRequest) returns (com.netflix.conductor.proto.TaskDef); + + // GET /taskdefs + rpc GetAllTasks(google.protobuf.Empty) returns (stream com.netflix.conductor.proto.TaskDef); + + // DELETE /taskdefs/{tasktype} + rpc DeleteTask(GetTaskRequest) returns (google.protobuf.Empty); +} + +message UpdateWorkflowsRequest { + repeated com.netflix.conductor.proto.WorkflowDef defs = 1; +} + +message CreateTasksRequest { + repeated com.netflix.conductor.proto.TaskDef defs = 1; +} + +message GetWorkflowRequest { + string name = 1; + int32 version = 2; +} + +message GetTaskRequest { + string task_type = 1; +} diff --git a/grpc/src/main/proto/grpc/search.proto b/grpc/src/main/proto/grpc/search.proto new file mode 100644 index 0000000000..48faf2c746 --- /dev/null +++ b/grpc/src/main/proto/grpc/search.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; +package com.netflix.conductor.grpc; + +import "model/workflowsummary.proto"; + +option java_outer_classname = "SearchPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; + +message SearchRequest { + int32 start = 1; + int32 size = 2; + string sort = 3; + string free_text = 4; + string query = 5; +} + +message WorkflowSummarySearchResult { + int64 total_hits = 1; + repeated com.netflix.conductor.proto.WorkflowSummary results = 2; +} \ No newline at end of file diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto new file mode 100644 index 0000000000..362b4c652d --- /dev/null +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -0,0 +1,70 @@ +syntax = "proto3"; +package com.netflix.conductor.grpc; + +import "google/protobuf/empty.proto"; +import "model/taskexeclog.proto"; +import "model/taskresult.proto"; +import "model/task.proto"; + +option java_outer_classname = "TaskServicePb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; + +service TaskService { + rpc Poll(PollRequest) returns (com.netflix.conductor.proto.Task); + rpc PollStream(stream PollRequest) returns (stream com.netflix.conductor.proto.Task); + rpc GetTasksInProgress(TasksInProgressRequest) returns (TasksInProgressResponse); + rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (com.netflix.conductor.proto.Task); + rpc UpdateTask(com.netflix.conductor.proto.TaskResult) returns (TaskUpdateResponse); + rpc AckTask(AckTaskRequest) returns (AckTaskResponse); + + rpc AddLog(AddLogRequest) returns (google.protobuf.Empty); + rpc GetLogs(TaskId) returns (GetLogsResponse); +} + +message PollRequest { + string task_type = 1; + string worker_id = 2; + string domain = 3; + int32 task_count = 4; +} + +message TasksInProgressRequest { + string task_type = 1; + string start_key = 2; + int32 count = 3; +} + +message TasksInProgressResponse { + repeated com.netflix.conductor.proto.Task tasks = 1; +} + +message PendingTaskRequest { + string workflow_id = 1; + string task_ref_name = 2; +} + +message TaskUpdateResponse { + string task_id = 1; +} + +message AckTaskRequest { + string task_id = 1; + string worker_id = 2; +} + +message AckTaskResponse { + bool ack = 1; +} + +message AddLogRequest { + string task_id = 1; + string log = 2; +} + +message TaskId { + string task_id = 1; +} + +message GetLogsResponse { + repeated com.netflix.conductor.proto.TaskExecLog logs = 1; +} diff --git a/grpc/src/main/proto/grpc/workflow_service.proto b/grpc/src/main/proto/grpc/workflow_service.proto new file mode 100644 index 0000000000..4f01c85cd2 --- /dev/null +++ b/grpc/src/main/proto/grpc/workflow_service.proto @@ -0,0 +1,110 @@ +syntax = "proto3"; +package com.netflix.conductor.grpc; + +import "google/protobuf/empty.proto"; +import "grpc/search.proto"; +import "model/workflow.proto"; +import "model/skiptaskrequest.proto"; +import "model/startworkflowrequest.proto"; +import "model/rerunworkflowrequest.proto"; + +option java_outer_classname = "WorkflowServicePb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; + +service WorkflowService { + // POST / + rpc StartWorkflow(com.netflix.conductor.proto.StartWorkflowRequest) returns (WorkflowId); + + // GET /{name}/correlated/{correlationId} + rpc GetWorkflows(GetWorkflowsRequest) returns (GetWorkflowsResponse); + + // GET /{workflowId} + rpc GetWorkflowStatus(GetWorkflowStatusRequest) returns (com.netflix.conductor.proto.Workflow); + + // DELETE /{workflodId}/remove + rpc RemoveWorkflow(RemoveWorkflowRequest) returns (google.protobuf.Empty); + + // GET /running/{name} + rpc GetRunningWorkflows(GetRunningWorkflowsRequest) returns (GetRunningWorkflowsResponse); + + // PUT /decide/{workflowId} + rpc DecideWorkflow(WorkflowId) returns (google.protobuf.Empty); + + // PUT /{workflowId}/pause + rpc PauseWorkflow(WorkflowId) returns (google.protobuf.Empty); + + // PUT /{workflowId}/pause + rpc ResumeWorkflow(WorkflowId) returns (google.protobuf.Empty); + + // PUT /{workflowId}/skiptask/{taskReferenceName} + rpc SkipTaskFromWorkflow(SkipTaskRequest) returns (google.protobuf.Empty); + + // POST /{workflowId}/rerun + rpc RerunWorkflow(com.netflix.conductor.proto.RerunWorkflowRequest) returns (WorkflowId); + + // POST /{workflowId}/restart + rpc RestartWorkflow(WorkflowId) returns (google.protobuf.Empty); + + // POST /{workflowId}retry + rpc RetryWorkflow(WorkflowId) returns (google.protobuf.Empty); + + // POST /{workflowId}/resetcallbacks + rpc ResetWorkflowCallbacks(WorkflowId) returns (google.protobuf.Empty); + + // DELETE /{workflowId} + rpc TerminateWorkflow(TerminateWorkflowRequest) returns (google.protobuf.Empty); + + // GET /search + rpc Search(SearchRequest) returns (WorkflowSummarySearchResult); + rpc SearchByTasks(SearchRequest) returns (WorkflowSummarySearchResult); +} + +message GetWorkflowsRequest { + string name = 1; + repeated string correlation_id = 2; + bool include_closed = 3; + bool include_tasks = 4; +} + +message GetWorkflowsResponse { + message Workflows { + repeated com.netflix.conductor.proto.Workflow workflows = 1; + } + map workflows_by_id = 1; +} + +message GetWorkflowStatusRequest { + string workflow_id = 1; + bool include_tasks = 2; +} + +message RemoveWorkflowRequest { + string workflod_id = 1; + bool archive_workflow = 2; +} + +message GetRunningWorkflowsRequest { + string name = 1; + int32 version = 2; + int64 start_time = 3; + int64 end_time = 4; +} + +message GetRunningWorkflowsResponse { + repeated string workflow_ids = 1; +} + +message WorkflowId { + string workflow_id = 1; +} + +message SkipTaskRequest { + string workflow_id = 1; + string task_reference_name = 2; + com.netflix.conductor.proto.SkipTaskRequest request = 3; +} + +message TerminateWorkflowRequest { + string workflow_id = 1; + string reason = 2; +} \ No newline at end of file diff --git a/grpc/src/main/proto/model/dynamicforkjointask.proto b/grpc/src/main/proto/model/dynamicforkjointask.proto new file mode 100644 index 0000000000..e7f33426ac --- /dev/null +++ b/grpc/src/main/proto/model/dynamicforkjointask.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "DynamicForkJoinTaskPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message DynamicForkJoinTask { + string task_name = 1; + string workflow_name = 2; + string reference_name = 3; + map input = 4; + string type = 5; +} diff --git a/grpc/src/main/proto/model/dynamicforkjointasklist.proto b/grpc/src/main/proto/model/dynamicforkjointasklist.proto new file mode 100644 index 0000000000..0fc1ccb4e6 --- /dev/null +++ b/grpc/src/main/proto/model/dynamicforkjointasklist.proto @@ -0,0 +1,11 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/dynamicforkjointask.proto"; + +option java_outer_classname = "DynamicForkJoinTaskListPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message DynamicForkJoinTaskList { + repeated DynamicForkJoinTask dynamic_tasks = 1; +} diff --git a/grpc/src/main/proto/model/eventexecution.proto b/grpc/src/main/proto/model/eventexecution.proto new file mode 100644 index 0000000000..6d6ac7d621 --- /dev/null +++ b/grpc/src/main/proto/model/eventexecution.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "EventExecutionPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message EventExecution { + enum Status { + IN_PROGRESS = 0; + COMPLETED = 1; + FAILED = 2; + SKIPPED = 3; + } + string id = 1; + string message_id = 2; + string name = 3; + string event = 4; + int64 created = 5; + EventExecution.Status status = 6; + map output = 8; +} diff --git a/grpc/src/main/proto/model/polldata.proto b/grpc/src/main/proto/model/polldata.proto new file mode 100644 index 0000000000..b19b579e2c --- /dev/null +++ b/grpc/src/main/proto/model/polldata.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + + +option java_outer_classname = "PollDataPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message PollData { + string queue_name = 1; + string domain = 2; + string worker_id = 3; + int64 last_poll_time = 4; +} diff --git a/grpc/src/main/proto/model/rerunworkflowrequest.proto b/grpc/src/main/proto/model/rerunworkflowrequest.proto new file mode 100644 index 0000000000..a23b59efda --- /dev/null +++ b/grpc/src/main/proto/model/rerunworkflowrequest.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "RerunWorkflowRequestPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message RerunWorkflowRequest { + string re_run_from_workflow_id = 1; + map workflow_input = 2; + string re_run_from_task_id = 3; + map task_input = 4; + string correlation_id = 5; +} diff --git a/grpc/src/main/proto/model/skiptaskrequest.proto b/grpc/src/main/proto/model/skiptaskrequest.proto new file mode 100644 index 0000000000..9b8f77f6cf --- /dev/null +++ b/grpc/src/main/proto/model/skiptaskrequest.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "SkipTaskRequestPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message SkipTaskRequest { + map task_input = 1; + map task_output = 2; +} diff --git a/grpc/src/main/proto/model/startworkflowrequest.proto b/grpc/src/main/proto/model/startworkflowrequest.proto new file mode 100644 index 0000000000..61fe3db0ef --- /dev/null +++ b/grpc/src/main/proto/model/startworkflowrequest.proto @@ -0,0 +1,15 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "StartWorkflowRequestPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message StartWorkflowRequest { + string name = 1; + int32 version = 2; + string correlation_id = 3; + map input = 4; + map task_to_domain = 5; +} diff --git a/grpc/src/main/proto/model/subworkflowparams.proto b/grpc/src/main/proto/model/subworkflowparams.proto new file mode 100644 index 0000000000..50fa03846b --- /dev/null +++ b/grpc/src/main/proto/model/subworkflowparams.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "SubWorkflowParamsPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message SubWorkflowParams { + string name = 1; + google.protobuf.Value version = 2; +} diff --git a/grpc/src/main/proto/model/task.proto b/grpc/src/main/proto/model/task.proto new file mode 100644 index 0000000000..1176cc729e --- /dev/null +++ b/grpc/src/main/proto/model/task.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/workflowtask.proto"; +import "google/protobuf/struct.proto"; + +option java_outer_classname = "TaskPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message Task { + enum Status { + IN_PROGRESS = 0; + CANCELED = 1; + FAILED = 2; + COMPLETED = 3; + COMPLETED_WITH_ERRORS = 4; + SCHEDULED = 5; + TIMED_OUT = 6; + READY_FOR_RERUN = 7; + SKIPPED = 8; + } + string task_type = 1; + Task.Status status = 2; + map input_data = 3; + string reference_task_name = 4; + int32 retry_count = 5; + int32 seq = 6; + string correlation_id = 7; + int32 poll_count = 8; + string task_def_name = 9; + int64 scheduled_time = 10; + int64 start_time = 11; + int64 end_time = 12; + int64 update_time = 13; + int32 start_delay_in_seconds = 14; + string retried_task_id = 15; + bool retried = 16; + bool callback_from_worker = 17; + int32 response_timeout_seconds = 18; + string workflow_instance_id = 19; + string workflow_type = 20; + string task_id = 21; + string reason_for_incompletion = 22; + int64 callback_after_seconds = 23; + string worker_id = 24; + map output_data = 25; + WorkflowTask workflow_task = 26; + string domain = 27; +} diff --git a/grpc/src/main/proto/model/taskdef.proto b/grpc/src/main/proto/model/taskdef.proto new file mode 100644 index 0000000000..ae5f833bd8 --- /dev/null +++ b/grpc/src/main/proto/model/taskdef.proto @@ -0,0 +1,31 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "TaskDefPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message TaskDef { + enum RetryLogic { + FIXED = 0; + EXPONENTIAL_BACKOFF = 1; + } + enum TimeoutPolicy { + RETRY = 0; + TIME_OUT_WF = 1; + ALERT_ONLY = 2; + } + string name = 1; + string description = 2; + int32 retry_count = 3; + int64 timeout_seconds = 4; + repeated string input_keys = 5; + repeated string output_keys = 6; + TaskDef.TimeoutPolicy timeout_policy = 7; + TaskDef.RetryLogic retry_logic = 8; + int32 retry_delay_seconds = 9; + int32 response_timeout_seconds = 10; + int32 concurrent_exec_limit = 11; + map input_template = 12; +} diff --git a/grpc/src/main/proto/model/taskexeclog.proto b/grpc/src/main/proto/model/taskexeclog.proto new file mode 100644 index 0000000000..3b3faf8f4a --- /dev/null +++ b/grpc/src/main/proto/model/taskexeclog.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + + +option java_outer_classname = "TaskExecLogPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message TaskExecLog { + string log = 1; + string task_id = 2; + int64 created_time = 3; +} diff --git a/grpc/src/main/proto/model/taskresult.proto b/grpc/src/main/proto/model/taskresult.proto new file mode 100644 index 0000000000..07d51e0f10 --- /dev/null +++ b/grpc/src/main/proto/model/taskresult.proto @@ -0,0 +1,23 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "TaskResultPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message TaskResult { + enum Status { + IN_PROGRESS = 0; + FAILED = 1; + COMPLETED = 2; + SCHEDULED = 3; + } + string workflow_instance_id = 1; + string task_id = 2; + string reason_for_incompletion = 3; + int64 callback_after_seconds = 4; + string worker_id = 5; + TaskResult.Status status = 6; + map output_data = 7; +} diff --git a/grpc/src/main/proto/model/tasksummary.proto b/grpc/src/main/proto/model/tasksummary.proto new file mode 100644 index 0000000000..2ff72d2afa --- /dev/null +++ b/grpc/src/main/proto/model/tasksummary.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/task.proto"; + +option java_outer_classname = "TaskSummaryPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message TaskSummary { + string workflow_id = 1; + string workflow_type = 2; + string correlation_id = 3; + string scheduled_time = 4; + string start_time = 5; + string update_time = 6; + string end_time = 7; + Task.Status status = 8; + string reason_for_incompletion = 9; + int64 execution_time = 10; + int64 queue_wait_time = 11; + string task_def_name = 12; + string task_type = 13; + string input = 14; + string output = 15; + string task_id = 16; +} diff --git a/grpc/src/main/proto/model/workflow.proto b/grpc/src/main/proto/model/workflow.proto new file mode 100644 index 0000000000..59575d57e0 --- /dev/null +++ b/grpc/src/main/proto/model/workflow.proto @@ -0,0 +1,36 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/task.proto"; +import "google/protobuf/struct.proto"; + +option java_outer_classname = "WorkflowPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message Workflow { + enum WorkflowStatus { + RUNNING = 0; + COMPLETED = 1; + FAILED = 2; + TIMED_OUT = 3; + TERMINATED = 4; + PAUSED = 5; + } + Workflow.WorkflowStatus status = 1; + int64 end_time = 2; + string workflow_id = 3; + string parent_workflow_id = 4; + string parent_workflow_task_id = 5; + repeated Task tasks = 6; + map input = 8; + map output = 9; + string workflow_type = 10; + int32 version = 11; + string correlation_id = 12; + string re_run_from_workflow_id = 13; + string reason_for_incompletion = 14; + int32 schema_version = 15; + string event = 16; + map task_to_domain = 17; + repeated string failed_reference_task_names = 18; +} diff --git a/grpc/src/main/proto/model/workflowdef.proto b/grpc/src/main/proto/model/workflowdef.proto new file mode 100644 index 0000000000..5a22086a98 --- /dev/null +++ b/grpc/src/main/proto/model/workflowdef.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/workflowtask.proto"; +import "google/protobuf/struct.proto"; + +option java_outer_classname = "WorkflowDefPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message WorkflowDef { + string name = 1; + string description = 2; + int32 version = 3; + repeated WorkflowTask tasks = 4; + repeated string input_parameters = 5; + map output_parameters = 6; + string failure_workflow = 7; + int32 schema_version = 8; +} diff --git a/grpc/src/main/proto/model/workflowsummary.proto b/grpc/src/main/proto/model/workflowsummary.proto new file mode 100644 index 0000000000..463bf01678 --- /dev/null +++ b/grpc/src/main/proto/model/workflowsummary.proto @@ -0,0 +1,24 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/workflow.proto"; + +option java_outer_classname = "WorkflowSummaryPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message WorkflowSummary { + string workflow_type = 1; + int32 version = 2; + string workflow_id = 3; + string correlation_id = 4; + string start_time = 5; + string update_time = 6; + string end_time = 7; + Workflow.WorkflowStatus status = 8; + string input = 9; + string output = 10; + string reason_for_incompletion = 11; + int64 execution_time = 12; + string event = 13; + string failed_reference_task_names = 14; +} diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto new file mode 100644 index 0000000000..9bad330d3d --- /dev/null +++ b/grpc/src/main/proto/model/workflowtask.proto @@ -0,0 +1,44 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "model/subworkflowparams.proto"; +import "google/protobuf/struct.proto"; + +option java_outer_classname = "WorkflowTaskPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message WorkflowTask { + message WorkflowTaskList { + repeated WorkflowTask tasks = 1; + } + enum Type { + SIMPLE = 0; + DYNAMIC = 1; + FORK_JOIN = 2; + FORK_JOIN_DYNAMIC = 3; + DECISION = 4; + JOIN = 5; + SUB_WORKFLOW = 6; + EVENT = 7; + WAIT = 8; + USER_DEFINED = 9; + } + string name = 1; + string task_reference_name = 2; + string description = 3; + map input_parameters = 4; + string type = 5; + string dynamic_task_name_param = 6; + string case_value_param = 7; + string case_expression = 8; + map decision_cases = 9; + string dynamic_fork_tasks_param = 10; + string dynamic_fork_tasks_input_param_name = 11; + repeated WorkflowTask default_case = 12; + repeated WorkflowTask.WorkflowTaskList fork_tasks = 13; + int32 start_delay = 14; + SubWorkflowParams sub_workflow_param = 15; + repeated string join_on = 16; + string sink = 17; + bool optional = 18; +} diff --git a/settings.gradle b/settings.gradle index efc2eb4a20..8823e1c8e9 100644 --- a/settings.gradle +++ b/settings.gradle @@ -3,5 +3,6 @@ rootProject.name='conductor' include 'client','common','contribs','core','es5-persistence','jersey','mysql-persistence' include 'redis-persistence','server','test-harness','ui' include 'protogen' +include 'grpc' rootProject.children.each {it.name="conductor-${it.name}"} diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index 6effde720d..b3dbcca896 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -14,6 +14,7 @@ ext { revElasticSearch5Client = '5.6.8' revEurekaClient = '1.8.7' revFlywayCore ='4.0.3' + revGrpc = '1.11.0' revGuavaRetrying = '2.0.0' revGuice = '4.1.0' revGuiceMultiBindings = '4.1.0' From a44ebeeb8a9a149f3da34c90da54011737c3334d Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 28 May 2018 15:47:52 +0200 Subject: [PATCH 010/163] grpc-server: Implement a GRPC interface for Conductor With our generated protobuf schemas in place, it is now possible to implement the GRPC interface that Conductor will expose. The implementation is straightforward as all the exposed APIs follow as closely as possible the existing REST HTTP API. We have only ported over the most important service interfaces so far (metadata, tasks and workflows). The .proto schemas in the `grpc` package contain comments mapping every single GRPC call to its corresponding REST API, and there should be a 1:1 match except in rare cases where several HTTP endpoints could be merged into a single RPC call. The only notable addition to the GRPC interface is a `PollStream` call that has no REST-ful equivalent. `PollStream` uses GRPC's streaming capabilities to provide a long-running RPC connection that can keep providing a Conductor client with a constant stream of tasks to execute. The current implementation for this API in the backend is not ideal, as it has to use the existing Executor DAO, which only provides `poll` methods. Because of this, we ask the client to submit a periodic "ping" packet with information on the amount of tasks that the client is capable of processing at that moment, and as many tasks as possible are returned to the client. --- grpc-server/build.gradle | 15 + .../conductor/grpc/server/GRPCModule.java | 48 ++ .../conductor/grpc/server/GRPCServer.java | 46 ++ .../netflix/conductor/grpc/server/Main.java | 45 ++ .../grpc/server/MetadataServiceImpl.java | 118 +++ .../conductor/grpc/server/ProtoMapper.java | 746 ++++++++++++++++++ .../grpc/server/ProtoMapperBase.java | 81 ++ .../grpc/server/TaskServiceImpl.java | 171 ++++ .../grpc/server/WorkflowServiceImpl.java | 282 +++++++ settings.gradle | 2 +- 10 files changed, 1553 insertions(+), 1 deletion(-) create mode 100644 grpc-server/build.gradle create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle new file mode 100644 index 0000000000..86f8e1c04d --- /dev/null +++ b/grpc-server/build.gradle @@ -0,0 +1,15 @@ +plugins { + id 'net.ltgt.apt' version '0.8' +} + +dependencies { + compile project(':conductor-common') + compile project(':conductor-core') + compile project(':conductor-grpc') + + //FIXME Right now this brings a lot of stuff along for the ride. :-( + compile project(':conductor-server') + + compile "io.grpc:grpc-netty:${revGrpc}" + compile "log4j:log4j:1.2.17" +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java new file mode 100644 index 0000000000..7df8aa49cd --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java @@ -0,0 +1,48 @@ +package com.netflix.conductor.grpc.server; + +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dao.index.ElasticsearchModule; +import com.netflix.conductor.grpc.TaskServiceGrpc; +import com.netflix.conductor.grpc.WorkflowServiceGrpc; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; + +public class GRPCModule extends AbstractModule { + + // FIXME Eventually this should be shared with the Jersey code and provided by the server module. + private final int maxThreads = 50; + private final Configuration configuration; + private ExecutorService es; + + public GRPCModule(Configuration configuration){ + this.configuration = configuration; + } + + @Override + protected void configure() { + configureExecutorService(); + + install(new ElasticsearchModule()); + bind(Configuration.class).toInstance(configuration); + bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); + bind(WorkflowServiceGrpc.WorkflowServiceImplBase.class).to(WorkflowServiceImpl.class); + bind(GRPCServer.class).to(GRPCServer.class); + } + + @Provides + public ExecutorService getExecutorService(){ + return this.es; + } + + private void configureExecutorService(){ + AtomicInteger count = new AtomicInteger(0); + this.es = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { + Thread conductorWorkerThread = new Thread(runnable); + conductorWorkerThread.setName("conductor-worker-" + count.getAndIncrement()); + return conductorWorkerThread; + }); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java new file mode 100644 index 0000000000..37adb7e023 --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -0,0 +1,46 @@ +package com.netflix.conductor.grpc.server; + +import com.google.inject.Inject; +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.grpc.TaskServiceGrpc; +import com.netflix.conductor.grpc.WorkflowServiceGrpc; +import io.grpc.Server; +import io.grpc.ServerBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Singleton; +import java.io.IOException; + +@Singleton +public class GRPCServer { + private static final Logger logger = LoggerFactory.getLogger(GRPCServer.class); + + private final Server server; + + public final static String CONFIG_PORT = "grpc.port"; + public final static int CONFIG_PORT_DEFAULT = 8080; + + @Inject + public GRPCServer(TaskServiceGrpc.TaskServiceImplBase taskImpl, + WorkflowServiceGrpc.WorkflowServiceImplBase workflowImpl, + Configuration conf) { + final int port = conf.getIntProperty(CONFIG_PORT, CONFIG_PORT_DEFAULT); + server = ServerBuilder.forPort(port) + .addService(taskImpl) + .addService(workflowImpl) + .build(); + } + + public void start() throws IOException { + server.start(); + logger.info("grpc: Server started, listening on " + server.getPort()); + } + + public void stop() { + if (server != null) { + logger.info("grpc: server shutting down"); + server.shutdown(); + } + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java new file mode 100644 index 0000000000..46bcd3472f --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java @@ -0,0 +1,45 @@ +package com.netflix.conductor.grpc.server; + +import com.google.inject.Guice; +import com.google.inject.Injector; + +import com.netflix.conductor.server.ConductorConfig; + +import org.apache.log4j.PropertyConfigurator; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +public class Main { + + public static void main(String args[]) throws Exception { + + //FIXME This was copy pasted and seems like a bad way to load a config, given that is has side affects. + loadConfigFile(args.length > 0 ? args[0] : System.getenv("CONDUCTOR_CONFIG_FILE")); + + if (args.length == 2) { + System.out.println("Using log4j config " + args[1]); + PropertyConfigurator.configure(new FileInputStream(new File(args[1]))); + } + + Injector injector = Guice.createInjector(new GRPCModule(new ConductorConfig())); + GRPCServer server = injector.getInstance(GRPCServer.class); + + Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { + @Override + public void run() { + server.stop(); + } + })); + } + + private static void loadConfigFile(String propertyFile) throws IOException { + if (propertyFile == null) return; + System.out.println("Using config file" + propertyFile); + Properties props = new Properties(System.getProperties()); + props.load(new FileInputStream(propertyFile)); + System.setProperties(props); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java new file mode 100644 index 0000000000..e9d35aac14 --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -0,0 +1,118 @@ +package com.netflix.conductor.grpc.server; + +import com.google.protobuf.Empty; +import com.netflix.conductor.common.annotations.ProtoMessage; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.grpc.MetadataServiceGrpc; +import com.netflix.conductor.grpc.MetadataServicePb; +import com.netflix.conductor.proto.TaskDefPb; +import com.netflix.conductor.proto.WorkflowDefPb; +import com.netflix.conductor.service.MetadataService; +import io.grpc.Status; +import io.grpc.stub.StreamObserver; + +import javax.inject.Inject; +import java.util.ArrayList; +import java.util.List; + +public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { + private MetadataService service; + + @Inject + public MetadataServiceImpl(MetadataService service) { + this.service = service; + } + + @Override + public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver response) { + try { + service.registerWorkflowDef(ProtoMapper.fromProto(req)); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver response) { + List workflows = new ArrayList<>(); + for (WorkflowDefPb.WorkflowDef def : req.getDefsList()) { + workflows.add(ProtoMapper.fromProto(def)); + } + + try { + service.updateWorkflowDef(workflows); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { + // TODO: req.getVersion optional + WorkflowDef def = service.getWorkflowDef(req.getName(), req.getVersion()); + if (def != null) { + response.onNext(ProtoMapper.toProto(def)); + response.onCompleted(); + } else { + response.onError(Status.NOT_FOUND + .withDescription("No such workflow found by name="+req.getName()) + .asRuntimeException() + ); + } + } + + @Override + public void getAllWorkflows(Empty _request, StreamObserver response) { + for (WorkflowDef def : service.getWorkflowDefs()) { + response.onNext(ProtoMapper.toProto(def)); + } + response.onCompleted(); + } + + @Override + public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver response) { + List allTasks = new ArrayList<>(); + for (TaskDefPb.TaskDef task : req.getDefsList()) { + allTasks.add(ProtoMapper.fromProto(task)); + } + service.registerTaskDef(allTasks); + response.onCompleted(); + } + + @Override + public void updateTask(TaskDefPb.TaskDef req, StreamObserver response) { + service.updateTaskDef(ProtoMapper.fromProto(req)); + response.onCompleted(); + } + + @Override + public void getAllTasks(Empty _request, StreamObserver response) { + for (TaskDef def : service.getTaskDefs()) { + response.onNext(ProtoMapper.toProto(def)); + } + response.onCompleted(); + } + + @Override + public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { + TaskDef def = service.getTaskDef(req.getTaskType()); + if (def != null) { + response.onNext(ProtoMapper.toProto(def)); + response.onCompleted(); + } else { + response.onError(Status.NOT_FOUND + .withDescription("No such TaskDef found by taskType="+req.getTaskType()) + .asRuntimeException() + ); + } + } + + @Override + public void deleteTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { + service.unregisterTaskDef(req.getTaskType()); + response.onCompleted(); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java new file mode 100644 index 0000000000..2462838a5d --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java @@ -0,0 +1,746 @@ +package com.netflix.conductor.grpc.server; + +import com.google.protobuf.Value; +import com.netflix.conductor.common.metadata.events.EventExecution; +import com.netflix.conductor.common.metadata.tasks.PollData; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskExecLog; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask; +import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; +import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.TaskSummary; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.proto.DynamicForkJoinTaskListPb; +import com.netflix.conductor.proto.DynamicForkJoinTaskPb; +import com.netflix.conductor.proto.EventExecutionPb; +import com.netflix.conductor.proto.PollDataPb; +import com.netflix.conductor.proto.RerunWorkflowRequestPb; +import com.netflix.conductor.proto.SkipTaskRequestPb; +import com.netflix.conductor.proto.StartWorkflowRequestPb; +import com.netflix.conductor.proto.SubWorkflowParamsPb; +import com.netflix.conductor.proto.TaskDefPb; +import com.netflix.conductor.proto.TaskExecLogPb; +import com.netflix.conductor.proto.TaskPb; +import com.netflix.conductor.proto.TaskResultPb; +import com.netflix.conductor.proto.TaskSummaryPb; +import com.netflix.conductor.proto.WorkflowDefPb; +import com.netflix.conductor.proto.WorkflowPb; +import com.netflix.conductor.proto.WorkflowSummaryPb; +import com.netflix.conductor.proto.WorkflowTaskPb; +import java.lang.IllegalArgumentException; +import java.lang.Object; +import java.lang.String; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Generated; + +@Generated("com.netflix.conductor.protogen.ProtoGen") +public final class ProtoMapper extends ProtoMapperBase { + public static EventExecutionPb.EventExecution toProto(EventExecution from) { + EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); + to.setId( from.getId() ); + to.setMessageId( from.getMessageId() ); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCreated( from.getCreated() ); + to.setStatus( toProto( from.getStatus() ) ); + for (Map.Entry pair : from.getOutput().entrySet()) { + to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public static EventExecution fromProto(EventExecutionPb.EventExecution from) { + EventExecution to = new EventExecution(); + to.setId( from.getId() ); + to.setMessageId( from.getMessageId() ); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCreated( from.getCreated() ); + to.setStatus( fromProto( from.getStatus() ) ); + Map outputMap = new HashMap(); + for (Map.Entry pair : from.getOutputMap().entrySet()) { + outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutput(outputMap); + return to; + } + + public static EventExecutionPb.EventExecution.Status toProto(EventExecution.Status from) { + EventExecutionPb.EventExecution.Status to; + switch (from) { + case IN_PROGRESS: to = EventExecutionPb.EventExecution.Status.IN_PROGRESS; break; + case COMPLETED: to = EventExecutionPb.EventExecution.Status.COMPLETED; break; + case FAILED: to = EventExecutionPb.EventExecution.Status.FAILED; break; + case SKIPPED: to = EventExecutionPb.EventExecution.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static EventExecution.Status fromProto(EventExecutionPb.EventExecution.Status from) { + EventExecution.Status to; + switch (from) { + case IN_PROGRESS: to = EventExecution.Status.IN_PROGRESS; break; + case COMPLETED: to = EventExecution.Status.COMPLETED; break; + case FAILED: to = EventExecution.Status.FAILED; break; + case SKIPPED: to = EventExecution.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static PollDataPb.PollData toProto(PollData from) { + PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); + to.setQueueName( from.getQueueName() ); + to.setDomain( from.getDomain() ); + to.setWorkerId( from.getWorkerId() ); + to.setLastPollTime( from.getLastPollTime() ); + return to.build(); + } + + public static PollData fromProto(PollDataPb.PollData from) { + PollData to = new PollData(); + to.setQueueName( from.getQueueName() ); + to.setDomain( from.getDomain() ); + to.setWorkerId( from.getWorkerId() ); + to.setLastPollTime( from.getLastPollTime() ); + return to; + } + + public static TaskPb.Task toProto(Task from) { + TaskPb.Task.Builder to = TaskPb.Task.newBuilder(); + to.setTaskType( from.getTaskType() ); + to.setStatus( toProto( from.getStatus() ) ); + for (Map.Entry pair : from.getInputData().entrySet()) { + to.putInputData( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setReferenceTaskName( from.getReferenceTaskName() ); + to.setRetryCount( from.getRetryCount() ); + to.setSeq( from.getSeq() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setPollCount( from.getPollCount() ); + to.setTaskDefName( from.getTaskDefName() ); + to.setScheduledTime( from.getScheduledTime() ); + to.setStartTime( from.getStartTime() ); + to.setEndTime( from.getEndTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); + to.setRetriedTaskId( from.getRetriedTaskId() ); + to.setRetried( from.isRetried() ); + to.setCallbackFromWorker( from.isCallbackFromWorker() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setWorkflowType( from.getWorkflowType() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + for (Map.Entry pair : from.getOutputData().entrySet()) { + to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); + to.setDomain( from.getDomain() ); + return to.build(); + } + + public static Task fromProto(TaskPb.Task from) { + Task to = new Task(); + to.setTaskType( from.getTaskType() ); + to.setStatus( fromProto( from.getStatus() ) ); + Map inputDataMap = new HashMap(); + for (Map.Entry pair : from.getInputDataMap().entrySet()) { + inputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInputData(inputDataMap); + to.setReferenceTaskName( from.getReferenceTaskName() ); + to.setRetryCount( from.getRetryCount() ); + to.setSeq( from.getSeq() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setPollCount( from.getPollCount() ); + to.setTaskDefName( from.getTaskDefName() ); + to.setScheduledTime( from.getScheduledTime() ); + to.setStartTime( from.getStartTime() ); + to.setEndTime( from.getEndTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); + to.setRetriedTaskId( from.getRetriedTaskId() ); + to.setRetried( from.getRetried() ); + to.setCallbackFromWorker( from.getCallbackFromWorker() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setWorkflowType( from.getWorkflowType() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + Map outputDataMap = new HashMap(); + for (Map.Entry pair : from.getOutputDataMap().entrySet()) { + outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutputData(outputDataMap); + to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); + to.setDomain( from.getDomain() ); + return to; + } + + public static TaskPb.Task.Status toProto(Task.Status from) { + TaskPb.Task.Status to; + switch (from) { + case IN_PROGRESS: to = TaskPb.Task.Status.IN_PROGRESS; break; + case CANCELED: to = TaskPb.Task.Status.CANCELED; break; + case FAILED: to = TaskPb.Task.Status.FAILED; break; + case COMPLETED: to = TaskPb.Task.Status.COMPLETED; break; + case COMPLETED_WITH_ERRORS: to = TaskPb.Task.Status.COMPLETED_WITH_ERRORS; break; + case SCHEDULED: to = TaskPb.Task.Status.SCHEDULED; break; + case TIMED_OUT: to = TaskPb.Task.Status.TIMED_OUT; break; + case READY_FOR_RERUN: to = TaskPb.Task.Status.READY_FOR_RERUN; break; + case SKIPPED: to = TaskPb.Task.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static Task.Status fromProto(TaskPb.Task.Status from) { + Task.Status to; + switch (from) { + case IN_PROGRESS: to = Task.Status.IN_PROGRESS; break; + case CANCELED: to = Task.Status.CANCELED; break; + case FAILED: to = Task.Status.FAILED; break; + case COMPLETED: to = Task.Status.COMPLETED; break; + case COMPLETED_WITH_ERRORS: to = Task.Status.COMPLETED_WITH_ERRORS; break; + case SCHEDULED: to = Task.Status.SCHEDULED; break; + case TIMED_OUT: to = Task.Status.TIMED_OUT; break; + case READY_FOR_RERUN: to = Task.Status.READY_FOR_RERUN; break; + case SKIPPED: to = Task.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskDefPb.TaskDef toProto(TaskDef from) { + TaskDefPb.TaskDef.Builder to = TaskDefPb.TaskDef.newBuilder(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setRetryCount( from.getRetryCount() ); + to.setTimeoutSeconds( from.getTimeoutSeconds() ); + to.addAllInputKeys( from.getInputKeys() ); + to.addAllOutputKeys( from.getOutputKeys() ); + to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); + to.setRetryLogic( toProto( from.getRetryLogic() ) ); + to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); + for (Map.Entry pair : from.getInputTemplate().entrySet()) { + to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public static TaskDef fromProto(TaskDefPb.TaskDef from) { + TaskDef to = new TaskDef(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setRetryCount( from.getRetryCount() ); + to.setTimeoutSeconds( from.getTimeoutSeconds() ); + to.setInputKeys( from.getInputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + to.setOutputKeys( from.getOutputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); + to.setRetryLogic( fromProto( from.getRetryLogic() ) ); + to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); + Map inputTemplateMap = new HashMap(); + for (Map.Entry pair : from.getInputTemplateMap().entrySet()) { + inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInputTemplate(inputTemplateMap); + return to; + } + + public static TaskDefPb.TaskDef.RetryLogic toProto(TaskDef.RetryLogic from) { + TaskDefPb.TaskDef.RetryLogic to; + switch (from) { + case FIXED: to = TaskDefPb.TaskDef.RetryLogic.FIXED; break; + case EXPONENTIAL_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskDef.RetryLogic fromProto(TaskDefPb.TaskDef.RetryLogic from) { + TaskDef.RetryLogic to; + switch (from) { + case FIXED: to = TaskDef.RetryLogic.FIXED; break; + case EXPONENTIAL_BACKOFF: to = TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskDefPb.TaskDef.TimeoutPolicy toProto(TaskDef.TimeoutPolicy from) { + TaskDefPb.TaskDef.TimeoutPolicy to; + switch (from) { + case RETRY: to = TaskDefPb.TaskDef.TimeoutPolicy.RETRY; break; + case TIME_OUT_WF: to = TaskDefPb.TaskDef.TimeoutPolicy.TIME_OUT_WF; break; + case ALERT_ONLY: to = TaskDefPb.TaskDef.TimeoutPolicy.ALERT_ONLY; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskDef.TimeoutPolicy fromProto(TaskDefPb.TaskDef.TimeoutPolicy from) { + TaskDef.TimeoutPolicy to; + switch (from) { + case RETRY: to = TaskDef.TimeoutPolicy.RETRY; break; + case TIME_OUT_WF: to = TaskDef.TimeoutPolicy.TIME_OUT_WF; break; + case ALERT_ONLY: to = TaskDef.TimeoutPolicy.ALERT_ONLY; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskExecLogPb.TaskExecLog toProto(TaskExecLog from) { + TaskExecLogPb.TaskExecLog.Builder to = TaskExecLogPb.TaskExecLog.newBuilder(); + to.setLog( from.getLog() ); + to.setTaskId( from.getTaskId() ); + to.setCreatedTime( from.getCreatedTime() ); + return to.build(); + } + + public static TaskExecLog fromProto(TaskExecLogPb.TaskExecLog from) { + TaskExecLog to = new TaskExecLog(); + to.setLog( from.getLog() ); + to.setTaskId( from.getTaskId() ); + to.setCreatedTime( from.getCreatedTime() ); + return to; + } + + public static TaskResultPb.TaskResult toProto(TaskResult from) { + TaskResultPb.TaskResult.Builder to = TaskResultPb.TaskResult.newBuilder(); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + to.setStatus( toProto( from.getStatus() ) ); + for (Map.Entry pair : from.getOutputData().entrySet()) { + to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public static TaskResult fromProto(TaskResultPb.TaskResult from) { + TaskResult to = new TaskResult(); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + to.setStatus( fromProto( from.getStatus() ) ); + Map outputDataMap = new HashMap(); + for (Map.Entry pair : from.getOutputDataMap().entrySet()) { + outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutputData(outputDataMap); + return to; + } + + public static TaskResultPb.TaskResult.Status toProto(TaskResult.Status from) { + TaskResultPb.TaskResult.Status to; + switch (from) { + case IN_PROGRESS: to = TaskResultPb.TaskResult.Status.IN_PROGRESS; break; + case FAILED: to = TaskResultPb.TaskResult.Status.FAILED; break; + case COMPLETED: to = TaskResultPb.TaskResult.Status.COMPLETED; break; + case SCHEDULED: to = TaskResultPb.TaskResult.Status.SCHEDULED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { + TaskResult.Status to; + switch (from) { + case IN_PROGRESS: to = TaskResult.Status.IN_PROGRESS; break; + case FAILED: to = TaskResult.Status.FAILED; break; + case COMPLETED: to = TaskResult.Status.COMPLETED; break; + case SCHEDULED: to = TaskResult.Status.SCHEDULED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static DynamicForkJoinTaskPb.DynamicForkJoinTask toProto(DynamicForkJoinTask from) { + DynamicForkJoinTaskPb.DynamicForkJoinTask.Builder to = DynamicForkJoinTaskPb.DynamicForkJoinTask.newBuilder(); + to.setTaskName( from.getTaskName() ); + to.setWorkflowName( from.getWorkflowName() ); + to.setReferenceName( from.getReferenceName() ); + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setType( from.getType() ); + return to.build(); + } + + public static DynamicForkJoinTask fromProto(DynamicForkJoinTaskPb.DynamicForkJoinTask from) { + DynamicForkJoinTask to = new DynamicForkJoinTask(); + to.setTaskName( from.getTaskName() ); + to.setWorkflowName( from.getWorkflowName() ); + to.setReferenceName( from.getReferenceName() ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + to.setType( from.getType() ); + return to; + } + + public static DynamicForkJoinTaskListPb.DynamicForkJoinTaskList toProto( + DynamicForkJoinTaskList from) { + DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.Builder to = DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.newBuilder(); + for (DynamicForkJoinTask elem : from.getDynamicTasks()) { + to.addDynamicTasks( toProto(elem) ); + } + return to.build(); + } + + public static DynamicForkJoinTaskList fromProto( + DynamicForkJoinTaskListPb.DynamicForkJoinTaskList from) { + DynamicForkJoinTaskList to = new DynamicForkJoinTaskList(); + to.setDynamicTasks( from.getDynamicTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + return to; + } + + public static RerunWorkflowRequest fromProto(RerunWorkflowRequestPb.RerunWorkflowRequest from) { + RerunWorkflowRequest to = new RerunWorkflowRequest(); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + Map workflowInputMap = new HashMap(); + for (Map.Entry pair : from.getWorkflowInputMap().entrySet()) { + workflowInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setWorkflowInput(workflowInputMap); + to.setReRunFromTaskId( from.getReRunFromTaskId() ); + Map taskInputMap = new HashMap(); + for (Map.Entry pair : from.getTaskInputMap().entrySet()) { + taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setTaskInput(taskInputMap); + to.setCorrelationId( from.getCorrelationId() ); + return to; + } + + public static SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { + SkipTaskRequest to = new SkipTaskRequest(); + Map taskInputMap = new HashMap(); + for (Map.Entry pair : from.getTaskInputMap().entrySet()) { + taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setTaskInput(taskInputMap); + Map taskOutputMap = new HashMap(); + for (Map.Entry pair : from.getTaskOutputMap().entrySet()) { + taskOutputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setTaskOutput(taskOutputMap); + return to; + } + + public static StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowRequest from) { + StartWorkflowRequest to = new StartWorkflowRequest(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + to.setTaskToDomain( from.getTaskToDomainMap() ); + return to; + } + + public static SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { + SubWorkflowParamsPb.SubWorkflowParams.Builder to = SubWorkflowParamsPb.SubWorkflowParams.newBuilder(); + to.setName( from.getName() ); + to.setVersion( toProto( from.getVersion() ) ); + return to.build(); + } + + public static SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { + SubWorkflowParams to = new SubWorkflowParams(); + to.setName( from.getName() ); + to.setVersion( fromProto( from.getVersion() ) ); + return to; + } + + public static WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { + WorkflowDefPb.WorkflowDef.Builder to = WorkflowDefPb.WorkflowDef.newBuilder(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setVersion( from.getVersion() ); + for (WorkflowTask elem : from.getTasks()) { + to.addTasks( toProto(elem) ); + } + to.addAllInputParameters( from.getInputParameters() ); + for (Map.Entry pair : from.getOutputParameters().entrySet()) { + to.putOutputParameters( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setFailureWorkflow( from.getFailureWorkflow() ); + to.setSchemaVersion( from.getSchemaVersion() ); + return to.build(); + } + + public static WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { + WorkflowDef to = new WorkflowDef(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setVersion( from.getVersion() ); + to.setTasks( from.getTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(LinkedList::new)) ); + to.setInputParameters( from.getInputParametersList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + Map outputParametersMap = new HashMap(); + for (Map.Entry pair : from.getOutputParametersMap().entrySet()) { + outputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutputParameters(outputParametersMap); + to.setFailureWorkflow( from.getFailureWorkflow() ); + to.setSchemaVersion( from.getSchemaVersion() ); + return to; + } + + public static WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { + WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(); + to.setName( from.getName() ); + to.setTaskReferenceName( from.getTaskReferenceName() ); + to.setDescription( from.getDescription() ); + for (Map.Entry pair : from.getInputParameters().entrySet()) { + to.putInputParameters( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setType( from.getType() ); + to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); + to.setCaseValueParam( from.getCaseValueParam() ); + to.setCaseExpression( from.getCaseExpression() ); + for (Map.Entry> pair : from.getDecisionCases().entrySet()) { + to.putDecisionCases( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); + to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); + for (WorkflowTask elem : from.getDefaultCase()) { + to.addDefaultCase( toProto(elem) ); + } + for (List elem : from.getForkTasks()) { + to.addForkTasks( toProto(elem) ); + } + to.setStartDelay( from.getStartDelay() ); + to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); + to.addAllJoinOn( from.getJoinOn() ); + to.setSink( from.getSink() ); + to.setOptional( from.isOptional() ); + return to.build(); + } + + public static WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { + WorkflowTask to = new WorkflowTask(); + to.setName( from.getName() ); + to.setTaskReferenceName( from.getTaskReferenceName() ); + to.setDescription( from.getDescription() ); + Map inputParametersMap = new HashMap(); + for (Map.Entry pair : from.getInputParametersMap().entrySet()) { + inputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInputParameters(inputParametersMap); + to.setType( from.getType() ); + to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); + to.setCaseValueParam( from.getCaseValueParam() ); + to.setCaseExpression( from.getCaseExpression() ); + Map> decisionCasesMap = new HashMap>(); + for (Map.Entry pair : from.getDecisionCasesMap().entrySet()) { + decisionCasesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setDecisionCases(decisionCasesMap); + to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); + to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); + to.setDefaultCase( from.getDefaultCaseList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + to.setForkTasks( from.getForkTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + to.setStartDelay( from.getStartDelay() ); + to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); + to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + to.setSink( from.getSink() ); + to.setOptional( from.getOptional() ); + return to; + } + + public static WorkflowTaskPb.WorkflowTask.Type toProto(WorkflowTask.Type from) { + WorkflowTaskPb.WorkflowTask.Type to; + switch (from) { + case SIMPLE: to = WorkflowTaskPb.WorkflowTask.Type.SIMPLE; break; + case DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.DYNAMIC; break; + case FORK_JOIN: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN; break; + case FORK_JOIN_DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; + case DECISION: to = WorkflowTaskPb.WorkflowTask.Type.DECISION; break; + case JOIN: to = WorkflowTaskPb.WorkflowTask.Type.JOIN; break; + case SUB_WORKFLOW: to = WorkflowTaskPb.WorkflowTask.Type.SUB_WORKFLOW; break; + case EVENT: to = WorkflowTaskPb.WorkflowTask.Type.EVENT; break; + case WAIT: to = WorkflowTaskPb.WorkflowTask.Type.WAIT; break; + case USER_DEFINED: to = WorkflowTaskPb.WorkflowTask.Type.USER_DEFINED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static WorkflowTask.Type fromProto(WorkflowTaskPb.WorkflowTask.Type from) { + WorkflowTask.Type to; + switch (from) { + case SIMPLE: to = WorkflowTask.Type.SIMPLE; break; + case DYNAMIC: to = WorkflowTask.Type.DYNAMIC; break; + case FORK_JOIN: to = WorkflowTask.Type.FORK_JOIN; break; + case FORK_JOIN_DYNAMIC: to = WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; + case DECISION: to = WorkflowTask.Type.DECISION; break; + case JOIN: to = WorkflowTask.Type.JOIN; break; + case SUB_WORKFLOW: to = WorkflowTask.Type.SUB_WORKFLOW; break; + case EVENT: to = WorkflowTask.Type.EVENT; break; + case WAIT: to = WorkflowTask.Type.WAIT; break; + case USER_DEFINED: to = WorkflowTask.Type.USER_DEFINED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static TaskSummaryPb.TaskSummary toProto(TaskSummary from) { + TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); + to.setWorkflowId( from.getWorkflowId() ); + to.setWorkflowType( from.getWorkflowType() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setScheduledTime( from.getScheduledTime() ); + to.setStartTime( from.getStartTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setEndTime( from.getEndTime() ); + to.setStatus( toProto( from.getStatus() ) ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setExecutionTime( from.getExecutionTime() ); + to.setQueueWaitTime( from.getQueueWaitTime() ); + to.setTaskDefName( from.getTaskDefName() ); + to.setTaskType( from.getTaskType() ); + to.setInput( from.getInput() ); + to.setOutput( from.getOutput() ); + to.setTaskId( from.getTaskId() ); + return to.build(); + } + + public static WorkflowPb.Workflow toProto(Workflow from) { + WorkflowPb.Workflow.Builder to = WorkflowPb.Workflow.newBuilder(); + to.setStatus( toProto( from.getStatus() ) ); + to.setEndTime( from.getEndTime() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setParentWorkflowId( from.getParentWorkflowId() ); + to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); + for (Task elem : from.getTasks()) { + to.addTasks( toProto(elem) ); + } + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + for (Map.Entry pair : from.getOutput().entrySet()) { + to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setSchemaVersion( from.getSchemaVersion() ); + to.setEvent( from.getEvent() ); + to.putAllTaskToDomain( from.getTaskToDomain() ); + to.addAllFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + return to.build(); + } + + public static Workflow fromProto(WorkflowPb.Workflow from) { + Workflow to = new Workflow(); + to.setStatus( fromProto( from.getStatus() ) ); + to.setEndTime( from.getEndTime() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setParentWorkflowId( from.getParentWorkflowId() ); + to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); + to.setTasks( from.getTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + Map outputMap = new HashMap(); + for (Map.Entry pair : from.getOutputMap().entrySet()) { + outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutput(outputMap); + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setSchemaVersion( from.getSchemaVersion() ); + to.setEvent( from.getEvent() ); + to.setTaskToDomain( from.getTaskToDomainMap() ); + to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); + return to; + } + + public static WorkflowPb.Workflow.WorkflowStatus toProto(Workflow.WorkflowStatus from) { + WorkflowPb.Workflow.WorkflowStatus to; + switch (from) { + case RUNNING: to = WorkflowPb.Workflow.WorkflowStatus.RUNNING; break; + case COMPLETED: to = WorkflowPb.Workflow.WorkflowStatus.COMPLETED; break; + case FAILED: to = WorkflowPb.Workflow.WorkflowStatus.FAILED; break; + case TIMED_OUT: to = WorkflowPb.Workflow.WorkflowStatus.TIMED_OUT; break; + case TERMINATED: to = WorkflowPb.Workflow.WorkflowStatus.TERMINATED; break; + case PAUSED: to = WorkflowPb.Workflow.WorkflowStatus.PAUSED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static Workflow.WorkflowStatus fromProto(WorkflowPb.Workflow.WorkflowStatus from) { + Workflow.WorkflowStatus to; + switch (from) { + case RUNNING: to = Workflow.WorkflowStatus.RUNNING; break; + case COMPLETED: to = Workflow.WorkflowStatus.COMPLETED; break; + case FAILED: to = Workflow.WorkflowStatus.FAILED; break; + case TIMED_OUT: to = Workflow.WorkflowStatus.TIMED_OUT; break; + case TERMINATED: to = Workflow.WorkflowStatus.TERMINATED; break; + case PAUSED: to = Workflow.WorkflowStatus.PAUSED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { + WorkflowSummaryPb.WorkflowSummary.Builder to = WorkflowSummaryPb.WorkflowSummary.newBuilder(); + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setStartTime( from.getStartTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setEndTime( from.getEndTime() ); + to.setStatus( toProto( from.getStatus() ) ); + to.setInput( from.getInput() ); + to.setOutput( from.getOutput() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setExecutionTime( from.getExecutionTime() ); + to.setEvent( from.getEvent() ); + to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + return to.build(); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java new file mode 100644 index 0000000000..897692efed --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java @@ -0,0 +1,81 @@ +package com.netflix.conductor.grpc.server; + +import com.google.protobuf.ListValue; +import com.google.protobuf.NullValue; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.proto.WorkflowTaskPb; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ProtoMapperBase { + public static Value toProto(Object val) { + Value.Builder builder = Value.newBuilder(); + + if (val == null) { + builder.setNullValue(NullValue.NULL_VALUE); + } else if (val instanceof Boolean) { + builder.setBoolValue((Boolean) val); + } else if (val instanceof Double) { + builder.setNumberValue((Double) val); + } else if (val instanceof String) { + builder.setStringValue((String) val); + } else if (val instanceof Map) { + Map map = (Map) val; + Struct.Builder struct = Struct.newBuilder(); + for (Map.Entry pair : map.entrySet()) { + struct.putFields(pair.getKey(), toProto(pair.getValue())); + } + builder.setStructValue(struct.build()); + } else if (val instanceof List) { + ListValue.Builder list = ListValue.newBuilder(); + for (Object obj : (List)val) { + list.addValues(toProto(obj)); + } + builder.setListValue(list.build()); + } else { + throw new ClassCastException("cannot map to Value type: "+val); + } + return builder.build(); + } + + public static Object fromProto(Value any) { + switch (any.getKindCase()) { + case NULL_VALUE: + return null; + case BOOL_VALUE: + return any.getBoolValue(); + case NUMBER_VALUE: + return any.getNumberValue(); + case STRING_VALUE: + return any.getStringValue(); + case STRUCT_VALUE: + Struct struct = any.getStructValue(); + Map map = new HashMap<>(); + for (Map.Entry pair : struct.getFieldsMap().entrySet()) { + map.put(pair.getKey(), fromProto(pair.getValue())); + } + return map; + case LIST_VALUE: + List list = new ArrayList<>(); + for (Value val : any.getListValue().getValuesList()) { + list.add(fromProto(val)); + } + return list; + default: + throw new ClassCastException("unset Value element: "+any); + } + } + + public static List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList list) { + return null; + } + + public static WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List list) { + return null; + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java new file mode 100644 index 0000000000..04f693bdad --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -0,0 +1,171 @@ +package com.netflix.conductor.grpc.server; + +import java.util.List; + +import com.google.protobuf.Empty; +import com.netflix.conductor.common.metadata.tasks.TaskExecLog; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.proto.TaskPb; +import com.netflix.conductor.grpc.TaskServiceGrpc; +import com.netflix.conductor.grpc.TaskServicePb; +import com.netflix.conductor.proto.TaskResultPb; +import io.grpc.stub.ServerCallStreamObserver; +import io.grpc.stub.StreamObserver; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.service.ExecutionService; +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dao.QueueDAO; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; + +public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { + private static final Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class); + + private static final int MAX_TASK_COUNT = 100; + private static final int POLL_TIMEOUT_MS = 100; + + private final ExecutionService taskService; + private final QueueDAO queues; + + @Inject + public TaskServiceImpl(ExecutionService taskService, QueueDAO queues, Configuration config) { + this.taskService = taskService; + this.queues = queues; + } + + @Override + public void poll(TaskServicePb.PollRequest req, StreamObserver response) { + try { + List tasks = taskService.poll(req.getTaskType(), req.getWorkerId(), req.getDomain(), 1, POLL_TIMEOUT_MS); + if (!tasks.isEmpty()) { + TaskPb.Task t = ProtoMapper.toProto(tasks.get(0)); + response.onNext(t); + } + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public StreamObserver pollStream(StreamObserver observer) { + final ServerCallStreamObserver responseObserver = + (ServerCallStreamObserver) observer; + + return new StreamObserver() { + int pending = 0; + + @Override + public void onNext(TaskServicePb.PollRequest req) { + pending += req.getTaskCount(); + + try { + List tasks = taskService.poll( + req.getTaskType(), req.getWorkerId(), req.getDomain(), + pending, POLL_TIMEOUT_MS); + + for (Task task : tasks) { + responseObserver.onNext(ProtoMapper.toProto(task)); + pending--; + } + } catch (Exception e) { + responseObserver.onError(e); + } + } + + @Override + public void onError(Throwable t) { + responseObserver.onError(t); + } + + @Override + public void onCompleted() { + responseObserver.onCompleted(); + } + }; + } + + @Override + public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamObserver response) { + final int count = (req.getCount() != 0) ? req.getCount() : MAX_TASK_COUNT; + + try { + List tasks = taskService.getTasks(req.getTaskType(), req.getStartKey(), count); + TaskServicePb.TasksInProgressResponse.Builder builder = + TaskServicePb.TasksInProgressResponse.newBuilder(); + + for (Task t : tasks) { + builder.addTasks(ProtoMapper.toProto(t)); + } + + response.onNext(builder.build()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, StreamObserver response) { + try { + Task t = taskService.getPendingTaskForWorkflow(req.getTaskRefName(), req.getWorkflowId()); + response.onNext(ProtoMapper.toProto(t)); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void updateTask(TaskResultPb.TaskResult req, StreamObserver response) { + try { + TaskResult task = ProtoMapper.fromProto(req); + taskService.updateTask(task); + + TaskServicePb.TaskUpdateResponse resp = TaskServicePb.TaskUpdateResponse + .newBuilder() + .setTaskId(task.getTaskId()) + .build(); + response.onNext(resp); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void ackTask(TaskServicePb.AckTaskRequest req, StreamObserver response) { + try { + boolean ack = taskService.ackTaskReceived(req.getTaskId()); + TaskServicePb.AckTaskResponse resp = TaskServicePb.AckTaskResponse + .newBuilder().setAck(ack).build(); + response.onNext(resp); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void addLog(TaskServicePb.AddLogRequest req, StreamObserver response) { + taskService.log(req.getTaskId(), req.getLog()); + response.onCompleted(); + } + + @Override + public void getLogs(TaskServicePb.TaskId req, StreamObserver response) { + List logs = taskService.getTaskLogs(req.getTaskId()); + TaskServicePb.GetLogsResponse.Builder builder = TaskServicePb.GetLogsResponse.newBuilder(); + + for (TaskExecLog l : logs) { + builder.addLogs(ProtoMapper.toProto(l)); + } + + response.onNext(builder.build()); + response.onCompleted(); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java new file mode 100644 index 0000000000..f1d1f1e42c --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java @@ -0,0 +1,282 @@ +package com.netflix.conductor.grpc.server; + +import com.google.protobuf.Empty; +import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.run.SearchResult; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.core.execution.WorkflowExecutor; +import com.netflix.conductor.grpc.SearchPb; +import com.netflix.conductor.proto.RerunWorkflowRequestPb; +import com.netflix.conductor.proto.StartWorkflowRequestPb; +import com.netflix.conductor.proto.WorkflowPb; +import com.netflix.conductor.grpc.WorkflowServiceGrpc; +import com.netflix.conductor.grpc.WorkflowServicePb; +import com.netflix.conductor.service.ExecutionService; +import com.netflix.conductor.service.MetadataService; +import io.grpc.Status; +import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +public class WorkflowServiceImpl extends WorkflowServiceGrpc.WorkflowServiceImplBase { + private static final Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class); + + private WorkflowExecutor executor; + + private ExecutionService service; + + private MetadataService metadata; + + private int maxSearchSize; + + @Inject + public WorkflowServiceImpl(WorkflowExecutor executor, ExecutionService service, MetadataService metadata, Configuration config) { + this.executor = executor; + this.service = service; + this.metadata = metadata; + this.maxSearchSize = config.getIntProperty("workflow.max.search.size", 5_000); + } + + private WorkflowServicePb.WorkflowId newWorkflowId(String id) { + return WorkflowServicePb.WorkflowId + .newBuilder() + .setWorkflowId(id) + .build(); + } + + @Override + public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, StreamObserver response) { + StartWorkflowRequest request = ProtoMapper.fromProto(pbRequest); + WorkflowDef def = metadata.getWorkflowDef(request.getName(), request.getVersion()); + if(def == null){ + response.onError(Status.NOT_FOUND + .withDescription("No such workflow found by name="+request.getName()) + .asRuntimeException() + ); + return; + } + + try { + String id = executor.startWorkflow( + def.getName(), def.getVersion(), request.getCorrelationId(), + request.getInput(), null, request.getTaskToDomain()); + response.onNext(newWorkflowId(id)); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void getWorkflows(WorkflowServicePb.GetWorkflowsRequest req, StreamObserver response) { + final String name = req.getName(); + final boolean includeClosed = req.getIncludeClosed(); + final boolean includeTasks = req.getIncludeTasks(); + + WorkflowServicePb.GetWorkflowsResponse.Builder builder = WorkflowServicePb.GetWorkflowsResponse.newBuilder(); + + for (String correlationId : req.getCorrelationIdList()) { + WorkflowServicePb.GetWorkflowsResponse.Workflows.Builder pbWorkflows = + WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder(); + for (Workflow wf : service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks)) { + pbWorkflows.addWorkflows(ProtoMapper.toProto(wf)); + } + builder.putWorkflowsById(correlationId, pbWorkflows.build()); + } + + response.onNext(builder.build()); + response.onCompleted(); + } + + @Override + public void getWorkflowStatus(WorkflowServicePb.GetWorkflowStatusRequest req, StreamObserver response) { + try { + Workflow workflow = service.getExecutionStatus(req.getWorkflowId(), req.getIncludeTasks()); + response.onNext(ProtoMapper.toProto(workflow)); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void removeWorkflow(WorkflowServicePb.RemoveWorkflowRequest req, StreamObserver response) { + try { + service.removeWorkflow(req.getWorkflodId(), req.getArchiveWorkflow()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void getRunningWorkflows(WorkflowServicePb.GetRunningWorkflowsRequest req, StreamObserver response) { + try { + List workflowIds; + + if (req.getStartTime() != 0 && req.getEndTime() != 0) { + workflowIds = executor.getWorkflows(req.getName(), req.getVersion(), req.getStartTime(), req.getEndTime()); + } else { + workflowIds = executor.getRunningWorkflowIds(req.getName()); + } + + response.onNext( + WorkflowServicePb.GetRunningWorkflowsResponse.newBuilder() + .addAllWorkflowIds(workflowIds) + .build() + ); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void decideWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + try { + executor.decide(req.getWorkflowId()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void pauseWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + try { + executor.pauseWorkflow(req.getWorkflowId()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void resumeWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + try { + executor.resumeWorkflow(req.getWorkflowId()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest req, StreamObserver response) { + try { + SkipTaskRequest skipTask = ProtoMapper.fromProto(req.getRequest()); + executor.skipTaskFromWorkflow(req.getWorkflowId(), req.getTaskReferenceName(), skipTask); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, StreamObserver response) { + try { + String id = executor.rerun(ProtoMapper.fromProto(req)); + response.onNext(newWorkflowId(id)); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void restartWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + try { + executor.rewind(req.getWorkflowId()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void retryWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + try { + executor.retry(req.getWorkflowId()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void resetWorkflowCallbacks(WorkflowServicePb.WorkflowId req, StreamObserver response) { + try { + executor.resetCallbacksForInProgressTasks(req.getWorkflowId()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + @Override + public void terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest req, StreamObserver response) { + try { + executor.terminateWorkflow(req.getWorkflowId(), req.getReason()); + response.onCompleted(); + } catch (Exception e) { + response.onError(e); + } + } + + private void doSearch(boolean searchByTask, SearchPb.SearchRequest req, StreamObserver response) { + final int start = req.getStart(); + final int size = (req.getSize() != 0) ? req.getSize() : maxSearchSize; + final List sort = convertSort(req.getSort()); + final String freeText = req.getFreeText().isEmpty() ? "*" : req.getFreeText(); + final String query = req.getQuery(); + + if (size > maxSearchSize) { + response.onError( + Status.INVALID_ARGUMENT + .withDescription("Cannot return more than "+maxSearchSize+" results") + .asRuntimeException() + ); + return; + } + + SearchResult searchResult; + if (searchByTask) { + searchResult = service.searchWorkflowByTasks(query, freeText, start, size, sort); + } else { + searchResult = service.search(query, freeText, start, size, sort); + } + + // TODO + // response.onNext(ProtoMapper.toProto(searchResult)); + response.onCompleted(); + } + + private List convertSort(String sortStr) { + List list = new ArrayList(); + if(sortStr != null && sortStr.length() != 0){ + list = Arrays.asList(sortStr.split("\\|")); + } + return list; + } + + @Override + public void search(SearchPb.SearchRequest request, StreamObserver responseObserver) { + doSearch(false, request, responseObserver); + } + + @Override + public void searchByTasks(SearchPb.SearchRequest request, StreamObserver responseObserver) { + doSearch(true, request, responseObserver); + } +} diff --git a/settings.gradle b/settings.gradle index 8823e1c8e9..c73f6287f9 100644 --- a/settings.gradle +++ b/settings.gradle @@ -3,6 +3,6 @@ rootProject.name='conductor' include 'client','common','contribs','core','es5-persistence','jersey','mysql-persistence' include 'redis-persistence','server','test-harness','ui' include 'protogen' -include 'grpc' +include 'grpc', 'grpc-server' rootProject.children.each {it.name="conductor-${it.name}"} From 5ef5ce269ec68e12a1fbd773cd99506786e01ab2 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 28 May 2018 15:48:09 +0200 Subject: [PATCH 011/163] client/gogrpc: Implement a GRPC-based Go client (WIP) This is the first iteration of the GRPC wrapper for Go. It has been implemented separately to the old HTTP/REST based client as to fix some shortcomings it had: most notably, this version plays well with the Go ecosystem (as it is properly placed in the Gopath and requires no installation), can be vendored as an external dependency, and uses a streaming GRPC API to provide a low latency and high throughput interface for job resolution. --- client/gogrpc/.gitignore | 1 + client/gogrpc/Gopkg.lock | 95 ++ client/gogrpc/Gopkg.toml | 43 + client/gogrpc/Makefile | 5 + client/gogrpc/conductor/client.go | 61 + .../conductor/grpc/metadata_service.pb.go | 640 +++++++++ client/gogrpc/conductor/grpc/search.pb.go | 166 +++ .../gogrpc/conductor/grpc/task_service.pb.go | 869 ++++++++++++ .../conductor/grpc/workflow_service.pb.go | 1171 +++++++++++++++++ .../conductor/model/dynamicforkjointask.pb.go | 124 ++ .../model/dynamicforkjointasklist.pb.go | 82 ++ .../conductor/model/eventexecution.pb.go | 174 +++ client/gogrpc/conductor/model/polldata.pb.go | 105 ++ .../model/rerunworkflowrequest.pb.go | 128 ++ .../conductor/model/skiptaskrequest.pb.go | 99 ++ .../model/startworkflowrequest.pb.go | 127 ++ .../conductor/model/subworkflowparams.pb.go | 92 ++ client/gogrpc/conductor/model/task.pb.go | 380 ++++++ client/gogrpc/conductor/model/taskdef.pb.go | 244 ++++ .../gogrpc/conductor/model/taskexeclog.pb.go | 98 ++ .../gogrpc/conductor/model/taskresult.pb.go | 176 +++ .../gogrpc/conductor/model/tasksummary.pb.go | 217 +++ client/gogrpc/conductor/model/workflow.pb.go | 278 ++++ .../gogrpc/conductor/model/workflowdef.pb.go | 152 +++ .../conductor/model/workflowsummary.pb.go | 200 +++ .../gogrpc/conductor/model/workflowtask.pb.go | 344 +++++ client/gogrpc/conductor/worker.go | 151 +++ client/gogrpc/conductor/worker_test.go | 88 ++ 28 files changed, 6310 insertions(+) create mode 100644 client/gogrpc/.gitignore create mode 100644 client/gogrpc/Gopkg.lock create mode 100644 client/gogrpc/Gopkg.toml create mode 100644 client/gogrpc/Makefile create mode 100644 client/gogrpc/conductor/client.go create mode 100644 client/gogrpc/conductor/grpc/metadata_service.pb.go create mode 100644 client/gogrpc/conductor/grpc/search.pb.go create mode 100644 client/gogrpc/conductor/grpc/task_service.pb.go create mode 100644 client/gogrpc/conductor/grpc/workflow_service.pb.go create mode 100644 client/gogrpc/conductor/model/dynamicforkjointask.pb.go create mode 100644 client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go create mode 100644 client/gogrpc/conductor/model/eventexecution.pb.go create mode 100644 client/gogrpc/conductor/model/polldata.pb.go create mode 100644 client/gogrpc/conductor/model/rerunworkflowrequest.pb.go create mode 100644 client/gogrpc/conductor/model/skiptaskrequest.pb.go create mode 100644 client/gogrpc/conductor/model/startworkflowrequest.pb.go create mode 100644 client/gogrpc/conductor/model/subworkflowparams.pb.go create mode 100644 client/gogrpc/conductor/model/task.pb.go create mode 100644 client/gogrpc/conductor/model/taskdef.pb.go create mode 100644 client/gogrpc/conductor/model/taskexeclog.pb.go create mode 100644 client/gogrpc/conductor/model/taskresult.pb.go create mode 100644 client/gogrpc/conductor/model/tasksummary.pb.go create mode 100644 client/gogrpc/conductor/model/workflow.pb.go create mode 100644 client/gogrpc/conductor/model/workflowdef.pb.go create mode 100644 client/gogrpc/conductor/model/workflowsummary.pb.go create mode 100644 client/gogrpc/conductor/model/workflowtask.pb.go create mode 100644 client/gogrpc/conductor/worker.go create mode 100644 client/gogrpc/conductor/worker_test.go diff --git a/client/gogrpc/.gitignore b/client/gogrpc/.gitignore new file mode 100644 index 0000000000..49ce3c193f --- /dev/null +++ b/client/gogrpc/.gitignore @@ -0,0 +1 @@ +/vendor \ No newline at end of file diff --git a/client/gogrpc/Gopkg.lock b/client/gogrpc/Gopkg.lock new file mode 100644 index 0000000000..bd5e41573d --- /dev/null +++ b/client/gogrpc/Gopkg.lock @@ -0,0 +1,95 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/golang/protobuf" + packages = [ + "proto", + "ptypes", + "ptypes/any", + "ptypes/duration", + "ptypes/empty", + "ptypes/struct", + "ptypes/timestamp" + ] + revision = "b4deda0973fb4c70b50d226b1af49f3da59f5265" + version = "v1.1.0" + +[[projects]] + branch = "master" + name = "golang.org/x/net" + packages = [ + "context", + "http/httpguts", + "http2", + "http2/hpack", + "idna", + "internal/timeseries", + "trace" + ] + revision = "dfa909b99c79129e1100513e5cd36307665e5723" + +[[projects]] + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable" + ] + revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0" + version = "v0.3.0" + +[[projects]] + branch = "master" + name = "google.golang.org/genproto" + packages = ["googleapis/rpc/status"] + revision = "694d95ba50e67b2e363f3483057db5d4910c18f9" + +[[projects]] + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "balancer/base", + "balancer/roundrobin", + "channelz", + "codes", + "connectivity", + "credentials", + "encoding", + "encoding/proto", + "grpclb/grpc_lb_v1/messages", + "grpclog", + "internal", + "keepalive", + "metadata", + "naming", + "peer", + "resolver", + "resolver/dns", + "resolver/passthrough", + "stats", + "status", + "tap", + "transport" + ] + revision = "41344da2231b913fa3d983840a57a6b1b7b631a1" + version = "v1.12.0" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "e200db17a1db72f216cd0112ddedf0980f58c213db03301286996bc4cb694904" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/client/gogrpc/Gopkg.toml b/client/gogrpc/Gopkg.toml new file mode 100644 index 0000000000..a0c8a0fad4 --- /dev/null +++ b/client/gogrpc/Gopkg.toml @@ -0,0 +1,43 @@ +# Gopkg.toml example +# +# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" +# +# [prune] +# non-go = false +# go-tests = true +# unused-packages = true + + +[[constraint]] + name = "github.com/golang/protobuf" + version = "1.1.0" + +[[constraint]] + branch = "master" + name = "golang.org/x/net" + +[[constraint]] + name = "google.golang.org/grpc" + version = "1.12.0" + +[prune] + go-tests = true + unused-packages = true + non-go = true diff --git a/client/gogrpc/Makefile b/client/gogrpc/Makefile new file mode 100644 index 0000000000..33c9ed0f88 --- /dev/null +++ b/client/gogrpc/Makefile @@ -0,0 +1,5 @@ +PROTO_SRC = ../../grpc/src/main/proto + +proto: + protoc -I $(PROTO_SRC) $(PROTO_SRC)/model/*.proto --go_out=$(GOPATH)/src + protoc -I $(PROTO_SRC) $(PROTO_SRC)/grpc/*.proto --go_out=plugins=grpc:$(GOPATH)/src \ No newline at end of file diff --git a/client/gogrpc/conductor/client.go b/client/gogrpc/conductor/client.go new file mode 100644 index 0000000000..6c4880c989 --- /dev/null +++ b/client/gogrpc/conductor/client.go @@ -0,0 +1,61 @@ +package conductor + +import ( + pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + grpc "google.golang.org/grpc" +) + +type TasksClient interface { + Tasks() pb.TaskServiceClient + Shutdown() +} + +type MetadataClient interface { + Metadata() pb.MetadataServiceClient + Shutdown() +} + +type WorkflowsClient interface { + Workflows() pb.WorkflowServiceClient + Shutdown() +} + +type Client struct { + conn *grpc.ClientConn + tasks pb.TaskServiceClient + metadata pb.MetadataServiceClient + workflows pb.WorkflowServiceClient +} + +func NewClient(address string, options ...grpc.DialOption) (*Client, error) { + conn, err := grpc.Dial(address, options...) + if err != nil { + return nil, err + } + return &Client{conn: conn}, nil +} + +func (client *Client) Shutdown() { + client.conn.Close() +} + +func (client *Client) Tasks() pb.TaskServiceClient { + if client.tasks == nil { + client.tasks = pb.NewTaskServiceClient(client.conn) + } + return client.tasks +} + +func (client *Client) Metadata() pb.MetadataServiceClient { + if client.metadata == nil { + client.metadata = pb.NewMetadataServiceClient(client.conn) + } + return client.metadata +} + +func (client *Client) Workflows() pb.WorkflowServiceClient { + if client.workflows == nil { + client.workflows = pb.NewWorkflowServiceClient(client.conn) + } + return client.workflows +} diff --git a/client/gogrpc/conductor/grpc/metadata_service.pb.go b/client/gogrpc/conductor/grpc/metadata_service.pb.go new file mode 100644 index 0000000000..5782159370 --- /dev/null +++ b/client/gogrpc/conductor/grpc/metadata_service.pb.go @@ -0,0 +1,640 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/metadata_service.proto + +package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type UpdateWorkflowsRequest struct { + Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowsRequest) Reset() { *m = UpdateWorkflowsRequest{} } +func (m *UpdateWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowsRequest) ProtoMessage() {} +func (*UpdateWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{0} +} +func (m *UpdateWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowsRequest.Unmarshal(m, b) +} +func (m *UpdateWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowsRequest.Merge(dst, src) +} +func (m *UpdateWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowsRequest.Size(m) +} +func (m *UpdateWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowsRequest proto.InternalMessageInfo + +func (m *UpdateWorkflowsRequest) GetDefs() []*model.WorkflowDef { + if m != nil { + return m.Defs + } + return nil +} + +type CreateTasksRequest struct { + Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTasksRequest) Reset() { *m = CreateTasksRequest{} } +func (m *CreateTasksRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTasksRequest) ProtoMessage() {} +func (*CreateTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{1} +} +func (m *CreateTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTasksRequest.Unmarshal(m, b) +} +func (m *CreateTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTasksRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTasksRequest.Merge(dst, src) +} +func (m *CreateTasksRequest) XXX_Size() int { + return xxx_messageInfo_CreateTasksRequest.Size(m) +} +func (m *CreateTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTasksRequest proto.InternalMessageInfo + +func (m *CreateTasksRequest) GetDefs() []*model.TaskDef { + if m != nil { + return m.Defs + } + return nil +} + +type GetWorkflowRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowRequest) Reset() { *m = GetWorkflowRequest{} } +func (m *GetWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowRequest) ProtoMessage() {} +func (*GetWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{2} +} +func (m *GetWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowRequest.Unmarshal(m, b) +} +func (m *GetWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowRequest.Merge(dst, src) +} +func (m *GetWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowRequest.Size(m) +} +func (m *GetWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowRequest proto.InternalMessageInfo + +func (m *GetWorkflowRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +type GetTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{3} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func init() { + proto.RegisterType((*UpdateWorkflowsRequest)(nil), "com.netflix.conductor.grpc.UpdateWorkflowsRequest") + proto.RegisterType((*CreateTasksRequest)(nil), "com.netflix.conductor.grpc.CreateTasksRequest") + proto.RegisterType((*GetWorkflowRequest)(nil), "com.netflix.conductor.grpc.GetWorkflowRequest") + proto.RegisterType((*GetTaskRequest)(nil), "com.netflix.conductor.grpc.GetTaskRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MetadataServiceClient is the client API for MetadataService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MetadataServiceClient interface { + // POST /workflow + CreateWorkflow(ctx context.Context, in *model.WorkflowDef, opts ...grpc.CallOption) (*empty.Empty, error) + // PUT /workflow + UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // GET /workflow/{name} + GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*model.WorkflowDef, error) + // GET /workflow + GetAllWorkflows(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllWorkflowsClient, error) + // POST /taskdefs + CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // PUT /taskdefs + UpdateTask(ctx context.Context, in *model.TaskDef, opts ...grpc.CallOption) (*empty.Empty, error) + // GET /taskdefs/{tasktype} + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*model.TaskDef, error) + // GET /taskdefs + GetAllTasks(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllTasksClient, error) + // DELETE /taskdefs/{tasktype} + DeleteTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) +} + +type metadataServiceClient struct { + cc *grpc.ClientConn +} + +func NewMetadataServiceClient(cc *grpc.ClientConn) MetadataServiceClient { + return &metadataServiceClient{cc} +} + +func (c *metadataServiceClient) CreateWorkflow(ctx context.Context, in *model.WorkflowDef, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/CreateWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/UpdateWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*model.WorkflowDef, error) { + out := new(model.WorkflowDef) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/GetWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetAllWorkflows(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllWorkflowsClient, error) { + stream, err := c.cc.NewStream(ctx, &_MetadataService_serviceDesc.Streams[0], "/com.netflix.conductor.grpc.MetadataService/GetAllWorkflows", opts...) + if err != nil { + return nil, err + } + x := &metadataServiceGetAllWorkflowsClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type MetadataService_GetAllWorkflowsClient interface { + Recv() (*model.WorkflowDef, error) + grpc.ClientStream +} + +type metadataServiceGetAllWorkflowsClient struct { + grpc.ClientStream +} + +func (x *metadataServiceGetAllWorkflowsClient) Recv() (*model.WorkflowDef, error) { + m := new(model.WorkflowDef) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *metadataServiceClient) CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/CreateTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) UpdateTask(ctx context.Context, in *model.TaskDef, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/UpdateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*model.TaskDef, error) { + out := new(model.TaskDef) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetAllTasks(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllTasksClient, error) { + stream, err := c.cc.NewStream(ctx, &_MetadataService_serviceDesc.Streams[1], "/com.netflix.conductor.grpc.MetadataService/GetAllTasks", opts...) + if err != nil { + return nil, err + } + x := &metadataServiceGetAllTasksClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type MetadataService_GetAllTasksClient interface { + Recv() (*model.TaskDef, error) + grpc.ClientStream +} + +type metadataServiceGetAllTasksClient struct { + grpc.ClientStream +} + +func (x *metadataServiceGetAllTasksClient) Recv() (*model.TaskDef, error) { + m := new(model.TaskDef) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *metadataServiceClient) DeleteTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/DeleteTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MetadataServiceServer is the server API for MetadataService service. +type MetadataServiceServer interface { + // POST /workflow + CreateWorkflow(context.Context, *model.WorkflowDef) (*empty.Empty, error) + // PUT /workflow + UpdateWorkflows(context.Context, *UpdateWorkflowsRequest) (*empty.Empty, error) + // GET /workflow/{name} + GetWorkflow(context.Context, *GetWorkflowRequest) (*model.WorkflowDef, error) + // GET /workflow + GetAllWorkflows(*empty.Empty, MetadataService_GetAllWorkflowsServer) error + // POST /taskdefs + CreateTasks(context.Context, *CreateTasksRequest) (*empty.Empty, error) + // PUT /taskdefs + UpdateTask(context.Context, *model.TaskDef) (*empty.Empty, error) + // GET /taskdefs/{tasktype} + GetTask(context.Context, *GetTaskRequest) (*model.TaskDef, error) + // GET /taskdefs + GetAllTasks(*empty.Empty, MetadataService_GetAllTasksServer) error + // DELETE /taskdefs/{tasktype} + DeleteTask(context.Context, *GetTaskRequest) (*empty.Empty, error) +} + +func RegisterMetadataServiceServer(s *grpc.Server, srv MetadataServiceServer) { + s.RegisterService(&_MetadataService_serviceDesc, srv) +} + +func _MetadataService_CreateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.WorkflowDef) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).CreateWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/CreateWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).CreateWorkflow(ctx, req.(*model.WorkflowDef)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_UpdateWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).UpdateWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/UpdateWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).UpdateWorkflows(ctx, req.(*UpdateWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).GetWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/GetWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).GetWorkflow(ctx, req.(*GetWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetAllWorkflows_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(empty.Empty) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(MetadataServiceServer).GetAllWorkflows(m, &metadataServiceGetAllWorkflowsServer{stream}) +} + +type MetadataService_GetAllWorkflowsServer interface { + Send(*model.WorkflowDef) error + grpc.ServerStream +} + +type metadataServiceGetAllWorkflowsServer struct { + grpc.ServerStream +} + +func (x *metadataServiceGetAllWorkflowsServer) Send(m *model.WorkflowDef) error { + return x.ServerStream.SendMsg(m) +} + +func _MetadataService_CreateTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).CreateTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/CreateTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).CreateTasks(ctx, req.(*CreateTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.TaskDef) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/UpdateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).UpdateTask(ctx, req.(*model.TaskDef)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetAllTasks_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(empty.Empty) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(MetadataServiceServer).GetAllTasks(m, &metadataServiceGetAllTasksServer{stream}) +} + +type MetadataService_GetAllTasksServer interface { + Send(*model.TaskDef) error + grpc.ServerStream +} + +type metadataServiceGetAllTasksServer struct { + grpc.ServerStream +} + +func (x *metadataServiceGetAllTasksServer) Send(m *model.TaskDef) error { + return x.ServerStream.SendMsg(m) +} + +func _MetadataService_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).DeleteTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.MetadataService/DeleteTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).DeleteTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MetadataService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "com.netflix.conductor.grpc.MetadataService", + HandlerType: (*MetadataServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateWorkflow", + Handler: _MetadataService_CreateWorkflow_Handler, + }, + { + MethodName: "UpdateWorkflows", + Handler: _MetadataService_UpdateWorkflows_Handler, + }, + { + MethodName: "GetWorkflow", + Handler: _MetadataService_GetWorkflow_Handler, + }, + { + MethodName: "CreateTasks", + Handler: _MetadataService_CreateTasks_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _MetadataService_UpdateTask_Handler, + }, + { + MethodName: "GetTask", + Handler: _MetadataService_GetTask_Handler, + }, + { + MethodName: "DeleteTask", + Handler: _MetadataService_DeleteTask_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetAllWorkflows", + Handler: _MetadataService_GetAllWorkflows_Handler, + ServerStreams: true, + }, + { + StreamName: "GetAllTasks", + Handler: _MetadataService_GetAllTasks_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc/metadata_service.proto", +} + +func init() { + proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_0ab7c38dd4dbd338) +} + +var fileDescriptor_metadata_service_0ab7c38dd4dbd338 = []byte{ + // 468 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdf, 0x6b, 0xd3, 0x50, + 0x14, 0xc7, 0x89, 0xce, 0xcd, 0x9d, 0xc2, 0x8a, 0x47, 0x98, 0x25, 0x7d, 0x29, 0xc5, 0x87, 0x22, + 0x78, 0x33, 0xea, 0x8b, 0x82, 0x2f, 0xab, 0x93, 0x82, 0xb0, 0x31, 0xe2, 0xaa, 0xa0, 0xc8, 0x48, + 0x93, 0x93, 0x18, 0x7a, 0x93, 0x1b, 0x93, 0x93, 0xcd, 0xfe, 0x41, 0xfe, 0x9f, 0x72, 0xf3, 0xa3, + 0xad, 0x5b, 0x53, 0xb3, 0xb7, 0xde, 0x73, 0xef, 0xf9, 0x9c, 0x73, 0xbe, 0xdf, 0xd3, 0x40, 0x3f, + 0x48, 0x13, 0xd7, 0x8a, 0x88, 0x1d, 0xcf, 0x61, 0xe7, 0x3a, 0xa3, 0xf4, 0x26, 0x74, 0x49, 0x24, + 0xa9, 0x62, 0x85, 0xa6, 0xab, 0x22, 0x11, 0x13, 0xfb, 0x32, 0xfc, 0x2d, 0x5c, 0x15, 0x7b, 0xb9, + 0xcb, 0x2a, 0x15, 0x3a, 0xc5, 0xec, 0x07, 0x4a, 0x05, 0x92, 0xac, 0xe2, 0xe5, 0x3c, 0xf7, 0x2d, + 0x8a, 0x12, 0x5e, 0x96, 0x89, 0xe6, 0xf3, 0x48, 0x79, 0x24, 0x2d, 0x76, 0xb2, 0x85, 0x47, 0x7e, + 0x15, 0x7c, 0x51, 0x06, 0x6f, 0x55, 0xba, 0xf0, 0xa5, 0xba, 0x5d, 0x5d, 0x0c, 0xbf, 0xc0, 0xf1, + 0x2c, 0xf1, 0x1c, 0xa6, 0xaf, 0xd5, 0x55, 0x66, 0xd3, 0xaf, 0x9c, 0x32, 0xc6, 0xf7, 0xb0, 0xe7, + 0x91, 0x9f, 0xf5, 0x8c, 0xc1, 0xe3, 0x51, 0x67, 0x3c, 0x12, 0xdb, 0xfb, 0x29, 0x28, 0xa2, 0x4e, + 0x3e, 0x23, 0xdf, 0x2e, 0xb2, 0x86, 0x17, 0x80, 0x1f, 0x52, 0x72, 0x98, 0xae, 0x9c, 0x6c, 0xb1, + 0x62, 0xbe, 0xfd, 0x87, 0xf9, 0x72, 0x27, 0x53, 0x27, 0xae, 0x79, 0x13, 0xc0, 0x29, 0x71, 0x5d, + 0xa7, 0xe6, 0x21, 0xec, 0xc5, 0x4e, 0x44, 0x3d, 0x63, 0x60, 0x8c, 0x0e, 0xed, 0xe2, 0x37, 0xf6, + 0xe0, 0xe0, 0x86, 0xd2, 0x2c, 0x54, 0x71, 0xef, 0xd1, 0xc0, 0x18, 0x3d, 0xb1, 0xeb, 0xe3, 0xf0, + 0x35, 0x1c, 0x4d, 0x89, 0x35, 0xb7, 0xce, 0xef, 0xc3, 0xa1, 0xd6, 0xe9, 0x9a, 0x97, 0x49, 0x0d, + 0x79, 0xaa, 0x03, 0x57, 0xcb, 0x84, 0xc6, 0x7f, 0xf6, 0xa1, 0x7b, 0x5e, 0x99, 0xf3, 0xb9, 0xf4, + 0x06, 0x6d, 0x38, 0x2a, 0xc7, 0xaa, 0x3b, 0xc1, 0xd6, 0xc2, 0x98, 0xc7, 0xa2, 0xb4, 0x4d, 0xd4, + 0xb6, 0x89, 0x8f, 0xda, 0x36, 0xfc, 0x01, 0xdd, 0x3b, 0x16, 0xe0, 0x58, 0x34, 0xbb, 0x2f, 0xb6, + 0xfb, 0xd5, 0x88, 0x0f, 0xa0, 0xb3, 0xa1, 0x1c, 0x8a, 0x5d, 0xe8, 0xfb, 0x12, 0x9b, 0xad, 0xe7, + 0xc3, 0x19, 0x74, 0xa7, 0xc4, 0xa7, 0x52, 0xae, 0xe7, 0x68, 0xe8, 0xa9, 0x3d, 0xf4, 0xc4, 0xc0, + 0x19, 0x74, 0x36, 0x36, 0x69, 0x77, 0xff, 0xf7, 0x57, 0xae, 0x51, 0x96, 0x4f, 0x00, 0xa5, 0x90, + 0xfa, 0x35, 0xb6, 0x5a, 0xc5, 0x46, 0xd6, 0x77, 0x38, 0xa8, 0x16, 0x0b, 0x5f, 0xfd, 0x47, 0xde, + 0x8d, 0xed, 0x33, 0x5b, 0x15, 0xc5, 0xf3, 0xc2, 0xbf, 0x53, 0x29, 0xcb, 0xf9, 0x9b, 0x24, 0x6d, + 0x05, 0x3b, 0x31, 0xf0, 0x12, 0xe0, 0x8c, 0x24, 0x55, 0x73, 0x3f, 0xa4, 0xdd, 0x86, 0xca, 0x93, + 0x8b, 0xc9, 0xb3, 0x3b, 0x7f, 0x93, 0xcb, 0xf9, 0xb7, 0x77, 0x41, 0xc8, 0x3f, 0xf3, 0xb9, 0xc6, + 0x5b, 0x15, 0xde, 0x5a, 0xe1, 0x2d, 0x57, 0x86, 0x14, 0xb3, 0x15, 0xa8, 0xe2, 0x0b, 0xb8, 0x8e, + 0xeb, 0xe3, 0x7c, 0xbf, 0xe0, 0xbf, 0xf9, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xa7, 0x96, 0x24, 0x65, + 0x1f, 0x05, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/search.pb.go b/client/gogrpc/conductor/grpc/search.pb.go new file mode 100644 index 0000000000..cebe7a9a12 --- /dev/null +++ b/client/gogrpc/conductor/grpc/search.pb.go @@ -0,0 +1,166 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/search.proto + +package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SearchRequest struct { + Start int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + Size int32 `protobuf:"varint,2,opt,name=size" json:"size,omitempty"` + Sort string `protobuf:"bytes,3,opt,name=sort" json:"sort,omitempty"` + FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText" json:"free_text,omitempty"` + Query string `protobuf:"bytes,5,opt,name=query" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SearchRequest) Reset() { *m = SearchRequest{} } +func (m *SearchRequest) String() string { return proto.CompactTextString(m) } +func (*SearchRequest) ProtoMessage() {} +func (*SearchRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_search_4602c68037d6c24f, []int{0} +} +func (m *SearchRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SearchRequest.Unmarshal(m, b) +} +func (m *SearchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SearchRequest.Marshal(b, m, deterministic) +} +func (dst *SearchRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SearchRequest.Merge(dst, src) +} +func (m *SearchRequest) XXX_Size() int { + return xxx_messageInfo_SearchRequest.Size(m) +} +func (m *SearchRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SearchRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SearchRequest proto.InternalMessageInfo + +func (m *SearchRequest) GetStart() int32 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *SearchRequest) GetSize() int32 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *SearchRequest) GetSort() string { + if m != nil { + return m.Sort + } + return "" +} + +func (m *SearchRequest) GetFreeText() string { + if m != nil { + return m.FreeText + } + return "" +} + +func (m *SearchRequest) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +type WorkflowSummarySearchResult struct { + TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits" json:"total_hits,omitempty"` + Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowSummarySearchResult) Reset() { *m = WorkflowSummarySearchResult{} } +func (m *WorkflowSummarySearchResult) String() string { return proto.CompactTextString(m) } +func (*WorkflowSummarySearchResult) ProtoMessage() {} +func (*WorkflowSummarySearchResult) Descriptor() ([]byte, []int) { + return fileDescriptor_search_4602c68037d6c24f, []int{1} +} +func (m *WorkflowSummarySearchResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowSummarySearchResult.Unmarshal(m, b) +} +func (m *WorkflowSummarySearchResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowSummarySearchResult.Marshal(b, m, deterministic) +} +func (dst *WorkflowSummarySearchResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowSummarySearchResult.Merge(dst, src) +} +func (m *WorkflowSummarySearchResult) XXX_Size() int { + return xxx_messageInfo_WorkflowSummarySearchResult.Size(m) +} +func (m *WorkflowSummarySearchResult) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowSummarySearchResult.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowSummarySearchResult proto.InternalMessageInfo + +func (m *WorkflowSummarySearchResult) GetTotalHits() int64 { + if m != nil { + return m.TotalHits + } + return 0 +} + +func (m *WorkflowSummarySearchResult) GetResults() []*model.WorkflowSummary { + if m != nil { + return m.Results + } + return nil +} + +func init() { + proto.RegisterType((*SearchRequest)(nil), "com.netflix.conductor.grpc.SearchRequest") + proto.RegisterType((*WorkflowSummarySearchResult)(nil), "com.netflix.conductor.grpc.WorkflowSummarySearchResult") +} + +func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_4602c68037d6c24f) } + +var fileDescriptor_search_4602c68037d6c24f = []byte{ + // 292 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x51, 0xcf, 0x4b, 0x33, 0x31, + 0x10, 0x65, 0xfb, 0xe3, 0xfb, 0xda, 0x88, 0x07, 0x83, 0x87, 0xd0, 0x22, 0x94, 0x9e, 0x7a, 0x90, + 0x04, 0xf4, 0xe4, 0xb5, 0xa0, 0x78, 0x94, 0xad, 0x20, 0x78, 0x29, 0xdb, 0x74, 0xba, 0x1b, 0xcc, + 0x6e, 0xda, 0x64, 0x42, 0xb7, 0x9e, 0x3c, 0xf8, 0x87, 0xcb, 0xce, 0xb6, 0x0a, 0xe2, 0x6d, 0xde, + 0x9b, 0x99, 0xf7, 0x5e, 0x32, 0xec, 0x22, 0xf7, 0x5b, 0xad, 0x02, 0x64, 0x5e, 0x17, 0x72, 0xeb, + 0x1d, 0x3a, 0x3e, 0xd2, 0xae, 0x94, 0x15, 0xe0, 0xc6, 0x9a, 0x5a, 0x6a, 0x57, 0xad, 0xa3, 0x46, + 0xe7, 0x65, 0x33, 0x38, 0x1a, 0x97, 0x6e, 0x0d, 0x56, 0xed, 0x9d, 0x7f, 0xdb, 0x58, 0xb7, 0x0f, + 0xb1, 0x2c, 0x33, 0x7f, 0x68, 0x17, 0xa7, 0x1f, 0x09, 0x3b, 0x5f, 0x90, 0x52, 0x0a, 0xbb, 0x08, + 0x01, 0xf9, 0x25, 0xeb, 0x07, 0xcc, 0x3c, 0x8a, 0x64, 0x92, 0xcc, 0xfa, 0x69, 0x0b, 0x38, 0x67, + 0xbd, 0x60, 0xde, 0x41, 0x74, 0x88, 0xa4, 0x9a, 0x38, 0xe7, 0x51, 0x74, 0x27, 0xc9, 0x6c, 0x98, + 0x52, 0xcd, 0xc7, 0x6c, 0xb8, 0xf1, 0x00, 0x4b, 0x84, 0x1a, 0x45, 0x8f, 0x1a, 0x83, 0x86, 0x78, + 0x86, 0x9a, 0xa4, 0x77, 0x11, 0xfc, 0x41, 0xf4, 0xa9, 0xd1, 0x82, 0xe9, 0x67, 0xc2, 0xc6, 0x2f, + 0xc7, 0x70, 0x8b, 0x36, 0xdc, 0x29, 0x51, 0x88, 0x16, 0xf9, 0x15, 0x63, 0xe8, 0x30, 0xb3, 0xcb, + 0xc2, 0x60, 0xa0, 0x54, 0xdd, 0x74, 0x48, 0xcc, 0xa3, 0xc1, 0xc0, 0x1f, 0xd8, 0x7f, 0x4f, 0x83, + 0x41, 0x74, 0x26, 0xdd, 0xd9, 0xd9, 0xcd, 0xb5, 0xfc, 0xfb, 0x33, 0xe8, 0xc1, 0xf2, 0x97, 0x53, + 0x7a, 0x5a, 0x9e, 0xdf, 0xcf, 0x07, 0xad, 0xed, 0xd3, 0xea, 0xf5, 0x2e, 0x37, 0x58, 0xc4, 0x55, + 0x23, 0xa4, 0x8e, 0x42, 0xea, 0x5b, 0x48, 0x69, 0x6b, 0xa0, 0x42, 0x95, 0x3b, 0xba, 0xc3, 0x0f, + 0xdf, 0xc0, 0xd5, 0x3f, 0xb2, 0xb9, 0xfd, 0x0a, 0x00, 0x00, 0xff, 0xff, 0xcf, 0x03, 0x79, 0x98, + 0xa5, 0x01, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/task_service.pb.go b/client/gogrpc/conductor/grpc/task_service.pb.go new file mode 100644 index 0000000000..a175b4c49b --- /dev/null +++ b/client/gogrpc/conductor/grpc/task_service.pb.go @@ -0,0 +1,869 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/task_service.proto + +package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type PollRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` + TaskCount int32 `protobuf:"varint,4,opt,name=task_count,json=taskCount" json:"task_count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollRequest) Reset() { *m = PollRequest{} } +func (m *PollRequest) String() string { return proto.CompactTextString(m) } +func (*PollRequest) ProtoMessage() {} +func (*PollRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{0} +} +func (m *PollRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollRequest.Unmarshal(m, b) +} +func (m *PollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollRequest.Marshal(b, m, deterministic) +} +func (dst *PollRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollRequest.Merge(dst, src) +} +func (m *PollRequest) XXX_Size() int { + return xxx_messageInfo_PollRequest.Size(m) +} +func (m *PollRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PollRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PollRequest proto.InternalMessageInfo + +func (m *PollRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *PollRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *PollRequest) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *PollRequest) GetTaskCount() int32 { + if m != nil { + return m.TaskCount + } + return 0 +} + +type TasksInProgressRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey" json:"start_key,omitempty"` + Count int32 `protobuf:"varint,3,opt,name=count" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} } +func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } +func (*TasksInProgressRequest) ProtoMessage() {} +func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{1} +} +func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) +} +func (m *TasksInProgressRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TasksInProgressRequest.Marshal(b, m, deterministic) +} +func (dst *TasksInProgressRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TasksInProgressRequest.Merge(dst, src) +} +func (m *TasksInProgressRequest) XXX_Size() int { + return xxx_messageInfo_TasksInProgressRequest.Size(m) +} +func (m *TasksInProgressRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TasksInProgressRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TasksInProgressRequest proto.InternalMessageInfo + +func (m *TasksInProgressRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *TasksInProgressRequest) GetStartKey() string { + if m != nil { + return m.StartKey + } + return "" +} + +func (m *TasksInProgressRequest) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +type TasksInProgressResponse struct { + Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse{} } +func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } +func (*TasksInProgressResponse) ProtoMessage() {} +func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{2} +} +func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) +} +func (m *TasksInProgressResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TasksInProgressResponse.Marshal(b, m, deterministic) +} +func (dst *TasksInProgressResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TasksInProgressResponse.Merge(dst, src) +} +func (m *TasksInProgressResponse) XXX_Size() int { + return xxx_messageInfo_TasksInProgressResponse.Size(m) +} +func (m *TasksInProgressResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TasksInProgressResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TasksInProgressResponse proto.InternalMessageInfo + +func (m *TasksInProgressResponse) GetTasks() []*model.Task { + if m != nil { + return m.Tasks + } + return nil +} + +type PendingTaskRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName" json:"task_ref_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } +func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } +func (*PendingTaskRequest) ProtoMessage() {} +func (*PendingTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{3} +} +func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) +} +func (m *PendingTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PendingTaskRequest.Marshal(b, m, deterministic) +} +func (dst *PendingTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PendingTaskRequest.Merge(dst, src) +} +func (m *PendingTaskRequest) XXX_Size() int { + return xxx_messageInfo_PendingTaskRequest.Size(m) +} +func (m *PendingTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PendingTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PendingTaskRequest proto.InternalMessageInfo + +func (m *PendingTaskRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *PendingTaskRequest) GetTaskRefName() string { + if m != nil { + return m.TaskRefName + } + return "" +} + +type TaskUpdateResponse struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskUpdateResponse) Reset() { *m = TaskUpdateResponse{} } +func (m *TaskUpdateResponse) String() string { return proto.CompactTextString(m) } +func (*TaskUpdateResponse) ProtoMessage() {} +func (*TaskUpdateResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{4} +} +func (m *TaskUpdateResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskUpdateResponse.Unmarshal(m, b) +} +func (m *TaskUpdateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskUpdateResponse.Marshal(b, m, deterministic) +} +func (dst *TaskUpdateResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskUpdateResponse.Merge(dst, src) +} +func (m *TaskUpdateResponse) XXX_Size() int { + return xxx_messageInfo_TaskUpdateResponse.Size(m) +} +func (m *TaskUpdateResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TaskUpdateResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskUpdateResponse proto.InternalMessageInfo + +func (m *TaskUpdateResponse) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type AckTaskRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } +func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } +func (*AckTaskRequest) ProtoMessage() {} +func (*AckTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{5} +} +func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) +} +func (m *AckTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AckTaskRequest.Marshal(b, m, deterministic) +} +func (dst *AckTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AckTaskRequest.Merge(dst, src) +} +func (m *AckTaskRequest) XXX_Size() int { + return xxx_messageInfo_AckTaskRequest.Size(m) +} +func (m *AckTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AckTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AckTaskRequest proto.InternalMessageInfo + +func (m *AckTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *AckTaskRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +type AckTaskResponse struct { + Ack bool `protobuf:"varint,1,opt,name=ack" json:"ack,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } +func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } +func (*AckTaskResponse) ProtoMessage() {} +func (*AckTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{6} +} +func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) +} +func (m *AckTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AckTaskResponse.Marshal(b, m, deterministic) +} +func (dst *AckTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AckTaskResponse.Merge(dst, src) +} +func (m *AckTaskResponse) XXX_Size() int { + return xxx_messageInfo_AckTaskResponse.Size(m) +} +func (m *AckTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AckTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AckTaskResponse proto.InternalMessageInfo + +func (m *AckTaskResponse) GetAck() bool { + if m != nil { + return m.Ack + } + return false +} + +type AddLogRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + Log string `protobuf:"bytes,2,opt,name=log" json:"log,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } +func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } +func (*AddLogRequest) ProtoMessage() {} +func (*AddLogRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{7} +} +func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) +} +func (m *AddLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddLogRequest.Marshal(b, m, deterministic) +} +func (dst *AddLogRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddLogRequest.Merge(dst, src) +} +func (m *AddLogRequest) XXX_Size() int { + return xxx_messageInfo_AddLogRequest.Size(m) +} +func (m *AddLogRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddLogRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddLogRequest proto.InternalMessageInfo + +func (m *AddLogRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *AddLogRequest) GetLog() string { + if m != nil { + return m.Log + } + return "" +} + +type TaskId struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskId) Reset() { *m = TaskId{} } +func (m *TaskId) String() string { return proto.CompactTextString(m) } +func (*TaskId) ProtoMessage() {} +func (*TaskId) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{8} +} +func (m *TaskId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskId.Unmarshal(m, b) +} +func (m *TaskId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskId.Marshal(b, m, deterministic) +} +func (dst *TaskId) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskId.Merge(dst, src) +} +func (m *TaskId) XXX_Size() int { + return xxx_messageInfo_TaskId.Size(m) +} +func (m *TaskId) XXX_DiscardUnknown() { + xxx_messageInfo_TaskId.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskId proto.InternalMessageInfo + +func (m *TaskId) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type GetLogsResponse struct { + Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs" json:"logs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetLogsResponse) Reset() { *m = GetLogsResponse{} } +func (m *GetLogsResponse) String() string { return proto.CompactTextString(m) } +func (*GetLogsResponse) ProtoMessage() {} +func (*GetLogsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_cfa360f15b9a9291, []int{9} +} +func (m *GetLogsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetLogsResponse.Unmarshal(m, b) +} +func (m *GetLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetLogsResponse.Marshal(b, m, deterministic) +} +func (dst *GetLogsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetLogsResponse.Merge(dst, src) +} +func (m *GetLogsResponse) XXX_Size() int { + return xxx_messageInfo_GetLogsResponse.Size(m) +} +func (m *GetLogsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetLogsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetLogsResponse proto.InternalMessageInfo + +func (m *GetLogsResponse) GetLogs() []*model.TaskExecLog { + if m != nil { + return m.Logs + } + return nil +} + +func init() { + proto.RegisterType((*PollRequest)(nil), "com.netflix.conductor.grpc.PollRequest") + proto.RegisterType((*TasksInProgressRequest)(nil), "com.netflix.conductor.grpc.TasksInProgressRequest") + proto.RegisterType((*TasksInProgressResponse)(nil), "com.netflix.conductor.grpc.TasksInProgressResponse") + proto.RegisterType((*PendingTaskRequest)(nil), "com.netflix.conductor.grpc.PendingTaskRequest") + proto.RegisterType((*TaskUpdateResponse)(nil), "com.netflix.conductor.grpc.TaskUpdateResponse") + proto.RegisterType((*AckTaskRequest)(nil), "com.netflix.conductor.grpc.AckTaskRequest") + proto.RegisterType((*AckTaskResponse)(nil), "com.netflix.conductor.grpc.AckTaskResponse") + proto.RegisterType((*AddLogRequest)(nil), "com.netflix.conductor.grpc.AddLogRequest") + proto.RegisterType((*TaskId)(nil), "com.netflix.conductor.grpc.TaskId") + proto.RegisterType((*GetLogsResponse)(nil), "com.netflix.conductor.grpc.GetLogsResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TaskServiceClient is the client API for TaskService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TaskServiceClient interface { + Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*model.Task, error) + PollStream(ctx context.Context, opts ...grpc.CallOption) (TaskService_PollStreamClient, error) + GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) + GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*model.Task, error) + UpdateTask(ctx context.Context, in *model.TaskResult, opts ...grpc.CallOption) (*TaskUpdateResponse, error) + AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) + AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) + GetLogs(ctx context.Context, in *TaskId, opts ...grpc.CallOption) (*GetLogsResponse, error) +} + +type taskServiceClient struct { + cc *grpc.ClientConn +} + +func NewTaskServiceClient(cc *grpc.ClientConn) TaskServiceClient { + return &taskServiceClient{cc} +} + +func (c *taskServiceClient) Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*model.Task, error) { + out := new(model.Task) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/Poll", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) PollStream(ctx context.Context, opts ...grpc.CallOption) (TaskService_PollStreamClient, error) { + stream, err := c.cc.NewStream(ctx, &_TaskService_serviceDesc.Streams[0], "/com.netflix.conductor.grpc.TaskService/PollStream", opts...) + if err != nil { + return nil, err + } + x := &taskServicePollStreamClient{stream} + return x, nil +} + +type TaskService_PollStreamClient interface { + Send(*PollRequest) error + Recv() (*model.Task, error) + grpc.ClientStream +} + +type taskServicePollStreamClient struct { + grpc.ClientStream +} + +func (x *taskServicePollStreamClient) Send(m *PollRequest) error { + return x.ClientStream.SendMsg(m) +} + +func (x *taskServicePollStreamClient) Recv() (*model.Task, error) { + m := new(model.Task) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *taskServiceClient) GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) { + out := new(TasksInProgressResponse) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/GetTasksInProgress", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*model.Task, error) { + out := new(model.Task) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/GetPendingTaskForWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) UpdateTask(ctx context.Context, in *model.TaskResult, opts ...grpc.CallOption) (*TaskUpdateResponse, error) { + out := new(TaskUpdateResponse) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/UpdateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) { + out := new(AckTaskResponse) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/AckTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/AddLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetLogs(ctx context.Context, in *TaskId, opts ...grpc.CallOption) (*GetLogsResponse, error) { + out := new(GetLogsResponse) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/GetLogs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TaskServiceServer is the server API for TaskService service. +type TaskServiceServer interface { + Poll(context.Context, *PollRequest) (*model.Task, error) + PollStream(TaskService_PollStreamServer) error + GetTasksInProgress(context.Context, *TasksInProgressRequest) (*TasksInProgressResponse, error) + GetPendingTaskForWorkflow(context.Context, *PendingTaskRequest) (*model.Task, error) + UpdateTask(context.Context, *model.TaskResult) (*TaskUpdateResponse, error) + AckTask(context.Context, *AckTaskRequest) (*AckTaskResponse, error) + AddLog(context.Context, *AddLogRequest) (*empty.Empty, error) + GetLogs(context.Context, *TaskId) (*GetLogsResponse, error) +} + +func RegisterTaskServiceServer(s *grpc.Server, srv TaskServiceServer) { + s.RegisterService(&_TaskService_serviceDesc, srv) +} + +func _TaskService_Poll_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PollRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).Poll(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/Poll", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).Poll(ctx, req.(*PollRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_PollStream_Handler(srv interface{}, stream grpc.ServerStream) error { + return srv.(TaskServiceServer).PollStream(&taskServicePollStreamServer{stream}) +} + +type TaskService_PollStreamServer interface { + Send(*model.Task) error + Recv() (*PollRequest, error) + grpc.ServerStream +} + +type taskServicePollStreamServer struct { + grpc.ServerStream +} + +func (x *taskServicePollStreamServer) Send(m *model.Task) error { + return x.ServerStream.SendMsg(m) +} + +func (x *taskServicePollStreamServer) Recv() (*PollRequest, error) { + m := new(PollRequest) + if err := x.ServerStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func _TaskService_GetTasksInProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TasksInProgressRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTasksInProgress(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/GetTasksInProgress", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTasksInProgress(ctx, req.(*TasksInProgressRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetPendingTaskForWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PendingTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/GetPendingTaskForWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, req.(*PendingTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.TaskResult) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/UpdateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).UpdateTask(ctx, req.(*model.TaskResult)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_AckTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AckTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).AckTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/AckTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).AckTask(ctx, req.(*AckTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_AddLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).AddLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/AddLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).AddLog(ctx, req.(*AddLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TaskId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetLogs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.TaskService/GetLogs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetLogs(ctx, req.(*TaskId)) + } + return interceptor(ctx, in, info, handler) +} + +var _TaskService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "com.netflix.conductor.grpc.TaskService", + HandlerType: (*TaskServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Poll", + Handler: _TaskService_Poll_Handler, + }, + { + MethodName: "GetTasksInProgress", + Handler: _TaskService_GetTasksInProgress_Handler, + }, + { + MethodName: "GetPendingTaskForWorkflow", + Handler: _TaskService_GetPendingTaskForWorkflow_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _TaskService_UpdateTask_Handler, + }, + { + MethodName: "AckTask", + Handler: _TaskService_AckTask_Handler, + }, + { + MethodName: "AddLog", + Handler: _TaskService_AddLog_Handler, + }, + { + MethodName: "GetLogs", + Handler: _TaskService_GetLogs_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "PollStream", + Handler: _TaskService_PollStream_Handler, + ServerStreams: true, + ClientStreams: true, + }, + }, + Metadata: "grpc/task_service.proto", +} + +func init() { + proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_cfa360f15b9a9291) +} + +var fileDescriptor_task_service_cfa360f15b9a9291 = []byte{ + // 664 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdd, 0x4f, 0x13, 0x4f, + 0x14, 0x4d, 0x7f, 0x85, 0x02, 0xb7, 0xe1, 0x07, 0x99, 0x98, 0xb6, 0x2e, 0x31, 0xc2, 0xfa, 0x40, + 0xd5, 0x38, 0x6b, 0xe0, 0xc1, 0x68, 0x7c, 0x01, 0x03, 0xa4, 0x42, 0xb4, 0x59, 0x30, 0x46, 0x62, + 0xd2, 0x6c, 0x77, 0x6f, 0x97, 0x4d, 0x77, 0x77, 0xd6, 0x99, 0xa9, 0xd0, 0x07, 0xfd, 0x63, 0xfc, + 0x4b, 0xcd, 0x7c, 0xf4, 0x43, 0x3e, 0x5a, 0x4c, 0x7c, 0xdb, 0x39, 0x77, 0xee, 0xb9, 0xe7, 0xce, + 0xdc, 0x33, 0x0b, 0xf5, 0x98, 0x17, 0xa1, 0x27, 0x03, 0xd1, 0xef, 0x08, 0xe4, 0xdf, 0x93, 0x10, + 0x69, 0xc1, 0x99, 0x64, 0xc4, 0x09, 0x59, 0x46, 0x73, 0x94, 0xbd, 0x34, 0xb9, 0xa2, 0x21, 0xcb, + 0xa3, 0x41, 0x28, 0x19, 0xa7, 0x6a, 0xbb, 0xb3, 0x11, 0x33, 0x16, 0xa7, 0xe8, 0xe9, 0x9d, 0xdd, + 0x41, 0xcf, 0xc3, 0xac, 0x90, 0x43, 0x93, 0xe8, 0xd4, 0x33, 0x16, 0x61, 0xaa, 0x29, 0xf1, 0x0a, + 0xc3, 0x94, 0xc5, 0x36, 0x50, 0x9b, 0x04, 0x38, 0x8a, 0x41, 0x2a, 0x2d, 0xbe, 0x3e, 0xc1, 0x0d, + 0xe2, 0xfe, 0x84, 0x6a, 0x9b, 0xa5, 0xa9, 0x8f, 0xdf, 0x06, 0x28, 0x24, 0xd9, 0x80, 0x15, 0x2d, + 0x50, 0x0e, 0x0b, 0x6c, 0x94, 0x36, 0x4b, 0xcd, 0x15, 0x7f, 0x59, 0x01, 0x67, 0xc3, 0x02, 0x55, + 0xf0, 0x92, 0xf1, 0x3e, 0xf2, 0x4e, 0x12, 0x35, 0xfe, 0x33, 0x41, 0x03, 0xb4, 0x22, 0x52, 0x83, + 0x4a, 0xc4, 0xb2, 0x20, 0xc9, 0x1b, 0x65, 0x1d, 0xb1, 0x2b, 0xf2, 0x08, 0x40, 0x33, 0x86, 0x6c, + 0x90, 0xcb, 0xc6, 0xc2, 0x66, 0xa9, 0xb9, 0xe8, 0xeb, 0x1a, 0xef, 0x14, 0xe0, 0x5e, 0x40, 0xed, + 0x2c, 0x10, 0x7d, 0xd1, 0xca, 0xdb, 0x9c, 0xc5, 0x1c, 0x85, 0xb8, 0xaf, 0x14, 0x21, 0x03, 0x2e, + 0x3b, 0x7d, 0x1c, 0x8e, 0xa4, 0x68, 0xe0, 0x18, 0x87, 0xe4, 0x01, 0x2c, 0x9a, 0x6a, 0x65, 0x5d, + 0xcd, 0x2c, 0x5c, 0x1f, 0xea, 0x37, 0x2a, 0x89, 0x82, 0xe5, 0x02, 0xc9, 0x2b, 0x58, 0x54, 0xcc, + 0xa2, 0x51, 0xda, 0x2c, 0x37, 0xab, 0x3b, 0x5b, 0xf4, 0xf6, 0x0b, 0xd1, 0x27, 0x46, 0x15, 0x89, + 0x6f, 0xf6, 0xbb, 0x5f, 0x80, 0xb4, 0x31, 0x8f, 0x92, 0x3c, 0xd6, 0xa8, 0x55, 0xfe, 0x18, 0xaa, + 0xea, 0x58, 0x7a, 0x29, 0xbb, 0x54, 0x27, 0x65, 0xb4, 0xc3, 0x08, 0x6a, 0x45, 0xc4, 0x85, 0x55, + 0xdd, 0x1a, 0xc7, 0x5e, 0x27, 0x0f, 0x32, 0xb4, 0x1d, 0x54, 0xa5, 0x26, 0xe9, 0x7d, 0x08, 0x32, + 0x74, 0x5f, 0x00, 0x51, 0x9c, 0x9f, 0x8a, 0x28, 0x90, 0x38, 0x56, 0x5a, 0x87, 0x25, 0x9d, 0x39, + 0xa6, 0xad, 0xa8, 0x65, 0x2b, 0x72, 0x0f, 0xe1, 0xff, 0xbd, 0xb0, 0x3f, 0xad, 0xe2, 0xae, 0xad, + 0x33, 0xaf, 0xd1, 0x7d, 0x02, 0x6b, 0x63, 0x1e, 0x5b, 0x73, 0x1d, 0xca, 0x41, 0xd8, 0xd7, 0x24, + 0xcb, 0xbe, 0xfa, 0x74, 0xdf, 0xc0, 0xea, 0x5e, 0x14, 0x9d, 0xb0, 0x78, 0x6e, 0xad, 0x75, 0x28, + 0xa7, 0x2c, 0xb6, 0x55, 0xd4, 0xa7, 0xbb, 0x05, 0x95, 0x33, 0x13, 0xbb, 0xb3, 0x97, 0x8f, 0xb0, + 0x76, 0x84, 0xf2, 0x84, 0xc5, 0x93, 0x1b, 0x7a, 0x0b, 0x0b, 0x29, 0x8b, 0x47, 0x17, 0xd4, 0x9c, + 0x7b, 0x41, 0x07, 0x57, 0x18, 0x2a, 0x7d, 0x3a, 0x6b, 0xe7, 0x57, 0x05, 0xaa, 0x0a, 0x3d, 0x35, + 0xb6, 0x23, 0x3e, 0x2c, 0xa8, 0xa1, 0x27, 0xdb, 0xf4, 0x6e, 0xe7, 0xd1, 0x29, 0x5b, 0x38, 0xf3, + 0x27, 0x82, 0x7c, 0x05, 0x50, 0x19, 0xa7, 0x92, 0x63, 0x90, 0xfd, 0x4b, 0xe6, 0x66, 0xe9, 0x65, + 0x89, 0xfc, 0x00, 0x72, 0x84, 0xf2, 0xda, 0xfc, 0x92, 0x9d, 0x59, 0x55, 0x6e, 0xb7, 0x95, 0xb3, + 0xfb, 0x57, 0x39, 0xf6, 0xf8, 0x73, 0x78, 0x78, 0x84, 0x72, 0x6a, 0xd4, 0x0f, 0x19, 0xff, 0x6c, + 0xe7, 0x99, 0xd0, 0x99, 0xbd, 0xde, 0xb0, 0xc7, 0x7d, 0x0e, 0x13, 0x01, 0xcc, 0xe0, 0xeb, 0xd5, + 0xf6, 0x7c, 0x3f, 0xea, 0x47, 0xce, 0xa1, 0xf3, 0x7a, 0xbb, 0xe6, 0xa6, 0x2e, 0x2c, 0xd9, 0x61, + 0x27, 0xcf, 0x66, 0xa5, 0xfe, 0xe9, 0x2c, 0xe7, 0xf9, 0xbd, 0xf6, 0xda, 0x1a, 0xc7, 0x50, 0x31, + 0x5e, 0x21, 0x4f, 0x67, 0xa6, 0x4d, 0xfb, 0xc9, 0xa9, 0x51, 0xf3, 0xec, 0xd3, 0xd1, 0xb3, 0x4f, + 0x0f, 0xd4, 0xb3, 0x4f, 0xce, 0x61, 0xc9, 0x3a, 0x83, 0xb8, 0xf3, 0x7a, 0x6d, 0x45, 0xb3, 0x85, + 0x5e, 0xb3, 0xd8, 0xfe, 0xfb, 0xfd, 0xd5, 0x29, 0x8f, 0xb4, 0xbb, 0xe7, 0xaf, 0xe3, 0x44, 0x5e, + 0x0c, 0xba, 0x8a, 0xc3, 0xb3, 0x1c, 0xde, 0x98, 0xc3, 0x0b, 0xd3, 0x04, 0x73, 0xe9, 0xc5, 0x4c, + 0xff, 0xd5, 0x26, 0xb8, 0x5a, 0x76, 0x2b, 0x5a, 0xf7, 0xee, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x4b, 0x89, 0xdb, 0xbb, 0xf3, 0x06, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/workflow_service.pb.go b/client/gogrpc/conductor/grpc/workflow_service.pb.go new file mode 100644 index 0000000000..37f12229ff --- /dev/null +++ b/client/gogrpc/conductor/grpc/workflow_service.pb.go @@ -0,0 +1,1171 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/workflow_service.proto + +package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import empty "github.com/golang/protobuf/ptypes/empty" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type GetWorkflowsRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed" json:"include_closed,omitempty"` + IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsRequest) Reset() { *m = GetWorkflowsRequest{} } +func (m *GetWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsRequest) ProtoMessage() {} +func (*GetWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{0} +} +func (m *GetWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsRequest.Unmarshal(m, b) +} +func (m *GetWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsRequest.Merge(dst, src) +} +func (m *GetWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsRequest.Size(m) +} +func (m *GetWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsRequest proto.InternalMessageInfo + +func (m *GetWorkflowsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowsRequest) GetCorrelationId() []string { + if m != nil { + return m.CorrelationId + } + return nil +} + +func (m *GetWorkflowsRequest) GetIncludeClosed() bool { + if m != nil { + return m.IncludeClosed + } + return false +} + +func (m *GetWorkflowsRequest) GetIncludeTasks() bool { + if m != nil { + return m.IncludeTasks + } + return false +} + +type GetWorkflowsResponse struct { + WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsResponse) Reset() { *m = GetWorkflowsResponse{} } +func (m *GetWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsResponse) ProtoMessage() {} +func (*GetWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{1} +} +func (m *GetWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsResponse.Unmarshal(m, b) +} +func (m *GetWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsResponse.Merge(dst, src) +} +func (m *GetWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsResponse.Size(m) +} +func (m *GetWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsResponse proto.InternalMessageInfo + +func (m *GetWorkflowsResponse) GetWorkflowsById() map[string]*GetWorkflowsResponse_Workflows { + if m != nil { + return m.WorkflowsById + } + return nil +} + +type GetWorkflowsResponse_Workflows struct { + Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows" json:"workflows,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsResponse_Workflows) Reset() { *m = GetWorkflowsResponse_Workflows{} } +func (m *GetWorkflowsResponse_Workflows) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsResponse_Workflows) ProtoMessage() {} +func (*GetWorkflowsResponse_Workflows) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{1, 0} +} +func (m *GetWorkflowsResponse_Workflows) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Unmarshal(m, b) +} +func (m *GetWorkflowsResponse_Workflows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsResponse_Workflows) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsResponse_Workflows.Merge(dst, src) +} +func (m *GetWorkflowsResponse_Workflows) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Size(m) +} +func (m *GetWorkflowsResponse_Workflows) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsResponse_Workflows.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsResponse_Workflows proto.InternalMessageInfo + +func (m *GetWorkflowsResponse_Workflows) GetWorkflows() []*model.Workflow { + if m != nil { + return m.Workflows + } + return nil +} + +type GetWorkflowStatusRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowStatusRequest) Reset() { *m = GetWorkflowStatusRequest{} } +func (m *GetWorkflowStatusRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowStatusRequest) ProtoMessage() {} +func (*GetWorkflowStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{2} +} +func (m *GetWorkflowStatusRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowStatusRequest.Unmarshal(m, b) +} +func (m *GetWorkflowStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowStatusRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowStatusRequest.Merge(dst, src) +} +func (m *GetWorkflowStatusRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowStatusRequest.Size(m) +} +func (m *GetWorkflowStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowStatusRequest proto.InternalMessageInfo + +func (m *GetWorkflowStatusRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *GetWorkflowStatusRequest) GetIncludeTasks() bool { + if m != nil { + return m.IncludeTasks + } + return false +} + +type RemoveWorkflowRequest struct { + WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId" json:"workflod_id,omitempty"` + ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow" json:"archive_workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveWorkflowRequest) Reset() { *m = RemoveWorkflowRequest{} } +func (m *RemoveWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveWorkflowRequest) ProtoMessage() {} +func (*RemoveWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{3} +} +func (m *RemoveWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveWorkflowRequest.Unmarshal(m, b) +} +func (m *RemoveWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveWorkflowRequest.Merge(dst, src) +} +func (m *RemoveWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RemoveWorkflowRequest.Size(m) +} +func (m *RemoveWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveWorkflowRequest proto.InternalMessageInfo + +func (m *RemoveWorkflowRequest) GetWorkflodId() string { + if m != nil { + return m.WorkflodId + } + return "" +} + +func (m *RemoveWorkflowRequest) GetArchiveWorkflow() bool { + if m != nil { + return m.ArchiveWorkflow + } + return false +} + +type GetRunningWorkflowsRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRunningWorkflowsRequest) Reset() { *m = GetRunningWorkflowsRequest{} } +func (m *GetRunningWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*GetRunningWorkflowsRequest) ProtoMessage() {} +func (*GetRunningWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{4} +} +func (m *GetRunningWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRunningWorkflowsRequest.Unmarshal(m, b) +} +func (m *GetRunningWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRunningWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *GetRunningWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRunningWorkflowsRequest.Merge(dst, src) +} +func (m *GetRunningWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_GetRunningWorkflowsRequest.Size(m) +} +func (m *GetRunningWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRunningWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRunningWorkflowsRequest proto.InternalMessageInfo + +func (m *GetRunningWorkflowsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetRunningWorkflowsRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *GetRunningWorkflowsRequest) GetStartTime() int64 { + if m != nil { + return m.StartTime + } + return 0 +} + +func (m *GetRunningWorkflowsRequest) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +type GetRunningWorkflowsResponse struct { + WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds" json:"workflow_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRunningWorkflowsResponse) Reset() { *m = GetRunningWorkflowsResponse{} } +func (m *GetRunningWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*GetRunningWorkflowsResponse) ProtoMessage() {} +func (*GetRunningWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{5} +} +func (m *GetRunningWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRunningWorkflowsResponse.Unmarshal(m, b) +} +func (m *GetRunningWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRunningWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *GetRunningWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRunningWorkflowsResponse.Merge(dst, src) +} +func (m *GetRunningWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_GetRunningWorkflowsResponse.Size(m) +} +func (m *GetRunningWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetRunningWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRunningWorkflowsResponse proto.InternalMessageInfo + +func (m *GetRunningWorkflowsResponse) GetWorkflowIds() []string { + if m != nil { + return m.WorkflowIds + } + return nil +} + +type WorkflowId struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowId) Reset() { *m = WorkflowId{} } +func (m *WorkflowId) String() string { return proto.CompactTextString(m) } +func (*WorkflowId) ProtoMessage() {} +func (*WorkflowId) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{6} +} +func (m *WorkflowId) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowId.Unmarshal(m, b) +} +func (m *WorkflowId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowId.Marshal(b, m, deterministic) +} +func (dst *WorkflowId) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowId.Merge(dst, src) +} +func (m *WorkflowId) XXX_Size() int { + return xxx_messageInfo_WorkflowId.Size(m) +} +func (m *WorkflowId) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowId.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowId proto.InternalMessageInfo + +func (m *WorkflowId) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type SkipTaskRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName" json:"task_reference_name,omitempty"` + Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request" json:"request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } +func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } +func (*SkipTaskRequest) ProtoMessage() {} +func (*SkipTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{7} +} +func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) +} +func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) +} +func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskRequest.Merge(dst, src) +} +func (m *SkipTaskRequest) XXX_Size() int { + return xxx_messageInfo_SkipTaskRequest.Size(m) +} +func (m *SkipTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo + +func (m *SkipTaskRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *SkipTaskRequest) GetTaskReferenceName() string { + if m != nil { + return m.TaskReferenceName + } + return "" +} + +func (m *SkipTaskRequest) GetRequest() *model.SkipTaskRequest { + if m != nil { + return m.Request + } + return nil +} + +type TerminateWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + Reason string `protobuf:"bytes,2,opt,name=reason" json:"reason,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TerminateWorkflowRequest) Reset() { *m = TerminateWorkflowRequest{} } +func (m *TerminateWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*TerminateWorkflowRequest) ProtoMessage() {} +func (*TerminateWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{8} +} +func (m *TerminateWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TerminateWorkflowRequest.Unmarshal(m, b) +} +func (m *TerminateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TerminateWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *TerminateWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TerminateWorkflowRequest.Merge(dst, src) +} +func (m *TerminateWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_TerminateWorkflowRequest.Size(m) +} +func (m *TerminateWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TerminateWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TerminateWorkflowRequest proto.InternalMessageInfo + +func (m *TerminateWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *TerminateWorkflowRequest) GetReason() string { + if m != nil { + return m.Reason + } + return "" +} + +func init() { + proto.RegisterType((*GetWorkflowsRequest)(nil), "com.netflix.conductor.grpc.GetWorkflowsRequest") + proto.RegisterType((*GetWorkflowsResponse)(nil), "com.netflix.conductor.grpc.GetWorkflowsResponse") + proto.RegisterMapType((map[string]*GetWorkflowsResponse_Workflows)(nil), "com.netflix.conductor.grpc.GetWorkflowsResponse.WorkflowsByIdEntry") + proto.RegisterType((*GetWorkflowsResponse_Workflows)(nil), "com.netflix.conductor.grpc.GetWorkflowsResponse.Workflows") + proto.RegisterType((*GetWorkflowStatusRequest)(nil), "com.netflix.conductor.grpc.GetWorkflowStatusRequest") + proto.RegisterType((*RemoveWorkflowRequest)(nil), "com.netflix.conductor.grpc.RemoveWorkflowRequest") + proto.RegisterType((*GetRunningWorkflowsRequest)(nil), "com.netflix.conductor.grpc.GetRunningWorkflowsRequest") + proto.RegisterType((*GetRunningWorkflowsResponse)(nil), "com.netflix.conductor.grpc.GetRunningWorkflowsResponse") + proto.RegisterType((*WorkflowId)(nil), "com.netflix.conductor.grpc.WorkflowId") + proto.RegisterType((*SkipTaskRequest)(nil), "com.netflix.conductor.grpc.SkipTaskRequest") + proto.RegisterType((*TerminateWorkflowRequest)(nil), "com.netflix.conductor.grpc.TerminateWorkflowRequest") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WorkflowServiceClient is the client API for WorkflowService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WorkflowServiceClient interface { + // POST / + StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) + // GET /{name}/correlated/{correlationId} + GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) + // GET /{workflowId} + GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) + // DELETE /{workflodId}/remove + RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // GET /running/{name} + GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) + // PUT /decide/{workflowId} + DecideWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) + // PUT /{workflowId}/pause + PauseWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) + // PUT /{workflowId}/pause + ResumeWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) + // PUT /{workflowId}/skiptask/{taskReferenceName} + SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // POST /{workflowId}/rerun + RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) + // POST /{workflowId}/restart + RestartWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) + // POST /{workflowId}retry + RetryWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) + // POST /{workflowId}/resetcallbacks + ResetWorkflowCallbacks(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) + // DELETE /{workflowId} + TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) + // GET /search + Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) + SearchByTasks(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) +} + +type workflowServiceClient struct { + cc *grpc.ClientConn +} + +func NewWorkflowServiceClient(cc *grpc.ClientConn) WorkflowServiceClient { + return &workflowServiceClient{cc} +} + +func (c *workflowServiceClient) StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) { + out := new(WorkflowId) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/StartWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) { + out := new(GetWorkflowsResponse) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/GetWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) { + out := new(model.Workflow) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/GetWorkflowStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RemoveWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) { + out := new(GetRunningWorkflowsResponse) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/GetRunningWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) DecideWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/DecideWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) PauseWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/PauseWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) ResumeWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/ResumeWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/SkipTaskFromWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) { + out := new(WorkflowId) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RerunWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RestartWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RestartWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RetryWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RetryWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) ResetWorkflowCallbacks(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/ResetWorkflowCallbacks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + out := new(empty.Empty) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/TerminateWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { + out := new(WorkflowSummarySearchResult) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/Search", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) SearchByTasks(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { + out := new(WorkflowSummarySearchResult) + err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/SearchByTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WorkflowServiceServer is the server API for WorkflowService service. +type WorkflowServiceServer interface { + // POST / + StartWorkflow(context.Context, *model.StartWorkflowRequest) (*WorkflowId, error) + // GET /{name}/correlated/{correlationId} + GetWorkflows(context.Context, *GetWorkflowsRequest) (*GetWorkflowsResponse, error) + // GET /{workflowId} + GetWorkflowStatus(context.Context, *GetWorkflowStatusRequest) (*model.Workflow, error) + // DELETE /{workflodId}/remove + RemoveWorkflow(context.Context, *RemoveWorkflowRequest) (*empty.Empty, error) + // GET /running/{name} + GetRunningWorkflows(context.Context, *GetRunningWorkflowsRequest) (*GetRunningWorkflowsResponse, error) + // PUT /decide/{workflowId} + DecideWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) + // PUT /{workflowId}/pause + PauseWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) + // PUT /{workflowId}/pause + ResumeWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) + // PUT /{workflowId}/skiptask/{taskReferenceName} + SkipTaskFromWorkflow(context.Context, *SkipTaskRequest) (*empty.Empty, error) + // POST /{workflowId}/rerun + RerunWorkflow(context.Context, *model.RerunWorkflowRequest) (*WorkflowId, error) + // POST /{workflowId}/restart + RestartWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) + // POST /{workflowId}retry + RetryWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) + // POST /{workflowId}/resetcallbacks + ResetWorkflowCallbacks(context.Context, *WorkflowId) (*empty.Empty, error) + // DELETE /{workflowId} + TerminateWorkflow(context.Context, *TerminateWorkflowRequest) (*empty.Empty, error) + // GET /search + Search(context.Context, *SearchRequest) (*WorkflowSummarySearchResult, error) + SearchByTasks(context.Context, *SearchRequest) (*WorkflowSummarySearchResult, error) +} + +func RegisterWorkflowServiceServer(s *grpc.Server, srv WorkflowServiceServer) { + s.RegisterService(&_WorkflowService_serviceDesc, srv) +} + +func _WorkflowService_StartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.StartWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).StartWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/StartWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).StartWorkflow(ctx, req.(*model.StartWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/GetWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetWorkflows(ctx, req.(*GetWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetWorkflowStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/GetWorkflowStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, req.(*GetWorkflowStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RemoveWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RemoveWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, req.(*RemoveWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetRunningWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRunningWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/GetRunningWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, req.(*GetRunningWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_DecideWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WorkflowId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).DecideWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/DecideWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).DecideWorkflow(ctx, req.(*WorkflowId)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_PauseWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WorkflowId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).PauseWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/PauseWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).PauseWorkflow(ctx, req.(*WorkflowId)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_ResumeWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WorkflowId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/ResumeWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, req.(*WorkflowId)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_SkipTaskFromWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SkipTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/SkipTaskFromWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, req.(*SkipTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RerunWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.RerunWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RerunWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RerunWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RerunWorkflow(ctx, req.(*model.RerunWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RestartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WorkflowId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RestartWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RestartWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RestartWorkflow(ctx, req.(*WorkflowId)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RetryWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WorkflowId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RetryWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RetryWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RetryWorkflow(ctx, req.(*WorkflowId)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_ResetWorkflowCallbacks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(WorkflowId) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/ResetWorkflowCallbacks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, req.(*WorkflowId)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_TerminateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TerminateWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/TerminateWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, req.(*TerminateWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).Search(ctx, req.(*SearchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_SearchByTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SearchRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).SearchByTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/com.netflix.conductor.grpc.WorkflowService/SearchByTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).SearchByTasks(ctx, req.(*SearchRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _WorkflowService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "com.netflix.conductor.grpc.WorkflowService", + HandlerType: (*WorkflowServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "StartWorkflow", + Handler: _WorkflowService_StartWorkflow_Handler, + }, + { + MethodName: "GetWorkflows", + Handler: _WorkflowService_GetWorkflows_Handler, + }, + { + MethodName: "GetWorkflowStatus", + Handler: _WorkflowService_GetWorkflowStatus_Handler, + }, + { + MethodName: "RemoveWorkflow", + Handler: _WorkflowService_RemoveWorkflow_Handler, + }, + { + MethodName: "GetRunningWorkflows", + Handler: _WorkflowService_GetRunningWorkflows_Handler, + }, + { + MethodName: "DecideWorkflow", + Handler: _WorkflowService_DecideWorkflow_Handler, + }, + { + MethodName: "PauseWorkflow", + Handler: _WorkflowService_PauseWorkflow_Handler, + }, + { + MethodName: "ResumeWorkflow", + Handler: _WorkflowService_ResumeWorkflow_Handler, + }, + { + MethodName: "SkipTaskFromWorkflow", + Handler: _WorkflowService_SkipTaskFromWorkflow_Handler, + }, + { + MethodName: "RerunWorkflow", + Handler: _WorkflowService_RerunWorkflow_Handler, + }, + { + MethodName: "RestartWorkflow", + Handler: _WorkflowService_RestartWorkflow_Handler, + }, + { + MethodName: "RetryWorkflow", + Handler: _WorkflowService_RetryWorkflow_Handler, + }, + { + MethodName: "ResetWorkflowCallbacks", + Handler: _WorkflowService_ResetWorkflowCallbacks_Handler, + }, + { + MethodName: "TerminateWorkflow", + Handler: _WorkflowService_TerminateWorkflow_Handler, + }, + { + MethodName: "Search", + Handler: _WorkflowService_Search_Handler, + }, + { + MethodName: "SearchByTasks", + Handler: _WorkflowService_SearchByTasks_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/workflow_service.proto", +} + +func init() { + proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_ebe287e56823ea2c) +} + +var fileDescriptor_workflow_service_ebe287e56823ea2c = []byte{ + // 919 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x5f, 0x6f, 0x1b, 0x45, + 0x10, 0x97, 0xed, 0x34, 0xa9, 0xc7, 0x71, 0x5c, 0x6f, 0x43, 0x64, 0x2e, 0x42, 0x18, 0xa3, 0x22, + 0x57, 0xc0, 0x19, 0x02, 0xe2, 0x4f, 0x9f, 0x50, 0x42, 0x5b, 0xe5, 0xa5, 0x98, 0x75, 0xa4, 0x22, + 0xfa, 0x70, 0x9c, 0xef, 0x26, 0xee, 0xd5, 0x77, 0xb7, 0xee, 0xee, 0x9e, 0x83, 0x25, 0x1e, 0x11, + 0x9f, 0x82, 0x4f, 0xc0, 0x47, 0xe1, 0x95, 0x2f, 0x84, 0x76, 0x6f, 0xef, 0x6c, 0xc7, 0x3e, 0xd7, + 0x81, 0xa8, 0x6f, 0x77, 0xbf, 0x99, 0xfd, 0xcd, 0xec, 0xcc, 0xfc, 0x76, 0x17, 0x8e, 0x47, 0x7c, + 0xe2, 0xf5, 0xae, 0x18, 0x1f, 0x5f, 0x86, 0xec, 0xca, 0x11, 0xc8, 0xa7, 0x81, 0x87, 0xf6, 0x84, + 0x33, 0xc9, 0x88, 0xe5, 0xb1, 0xc8, 0x8e, 0x51, 0x5e, 0x86, 0xc1, 0xaf, 0xb6, 0xc7, 0x62, 0x3f, + 0xf1, 0x24, 0xe3, 0xb6, 0x5a, 0x62, 0x1d, 0x8f, 0x18, 0x1b, 0x85, 0xd8, 0xd3, 0x9e, 0xc3, 0xe4, + 0xb2, 0x87, 0xd1, 0x44, 0xce, 0xd2, 0x85, 0x56, 0x53, 0xb3, 0x0a, 0x74, 0xb9, 0xf7, 0xd2, 0x40, + 0x87, 0x11, 0xf3, 0x31, 0xcc, 0x23, 0x19, 0xf4, 0x38, 0x45, 0xc5, 0x38, 0x98, 0x48, 0x57, 0x8c, + 0x39, 0xbe, 0x4e, 0x50, 0x48, 0x63, 0x6c, 0x1b, 0xa3, 0x74, 0xb9, 0xcc, 0xd6, 0xad, 0xf5, 0xe0, + 0xc8, 0x93, 0x78, 0xad, 0x47, 0xe7, 0xcf, 0x12, 0xdc, 0x7f, 0x8a, 0xf2, 0xb9, 0x31, 0x0a, 0x9a, + 0x5a, 0x09, 0x81, 0x9d, 0xd8, 0x8d, 0xb0, 0x55, 0x6a, 0x97, 0xba, 0x55, 0xaa, 0xbf, 0xc9, 0x03, + 0x38, 0xf0, 0x18, 0xe7, 0x18, 0xba, 0x32, 0x60, 0xb1, 0x13, 0xf8, 0xad, 0x72, 0xbb, 0xd2, 0xad, + 0xd2, 0xfa, 0x02, 0x7a, 0xee, 0x2b, 0xb7, 0x20, 0xf6, 0xc2, 0xc4, 0x47, 0xc7, 0x0b, 0x99, 0x40, + 0xbf, 0x55, 0x69, 0x97, 0xba, 0x77, 0x69, 0xdd, 0xa0, 0x67, 0x1a, 0x24, 0x1f, 0x42, 0x06, 0x38, + 0x6a, 0x6b, 0xa2, 0xb5, 0xa3, 0xbd, 0xf6, 0x0d, 0x78, 0xa1, 0xb0, 0xce, 0x3f, 0x65, 0x38, 0x5c, + 0x4e, 0x4f, 0x4c, 0x58, 0x2c, 0x90, 0x8c, 0xa1, 0x91, 0x6d, 0x48, 0x38, 0xc3, 0x99, 0x4a, 0xa6, + 0xd4, 0xae, 0x74, 0x6b, 0x27, 0x67, 0x76, 0x71, 0x53, 0xec, 0x75, 0x54, 0x76, 0x8e, 0x9c, 0xce, + 0xce, 0xfd, 0xc7, 0xb1, 0xe4, 0x33, 0x5a, 0xbf, 0x5a, 0xc4, 0xac, 0x3e, 0x54, 0x73, 0x27, 0x72, + 0x06, 0xd5, 0xdc, 0x6a, 0x62, 0x3e, 0x28, 0x88, 0xa9, 0x4b, 0x9c, 0xf3, 0xd3, 0xf9, 0x3a, 0xeb, + 0x37, 0x20, 0xab, 0x61, 0xc9, 0x3d, 0xa8, 0x8c, 0x71, 0x66, 0x6a, 0xae, 0x3e, 0x49, 0x1f, 0xee, + 0x4c, 0xdd, 0x30, 0xc1, 0x56, 0xb9, 0x5d, 0xea, 0xd6, 0x4e, 0x1e, 0xfd, 0xf7, 0xcd, 0xd1, 0x94, + 0xe8, 0x51, 0xf9, 0x9b, 0x52, 0xe7, 0x17, 0x68, 0x2d, 0x38, 0x0f, 0xa4, 0x2b, 0x93, 0xbc, 0xf1, + 0xef, 0x43, 0x2d, 0x9f, 0x76, 0x5d, 0x54, 0x95, 0x0b, 0x64, 0xd0, 0xf9, 0x9a, 0xbe, 0x95, 0xd7, + 0xf4, 0xcd, 0x83, 0x77, 0x28, 0x46, 0x6c, 0x8a, 0xf9, 0xe6, 0x57, 0xe8, 0xfd, 0x55, 0x7a, 0xff, + 0xdc, 0x27, 0x0f, 0xe1, 0x9e, 0x52, 0x45, 0x30, 0x45, 0x27, 0x0b, 0x6a, 0x22, 0x34, 0x0c, 0x9e, + 0x51, 0x76, 0x7e, 0x2f, 0x81, 0xf5, 0x14, 0x25, 0x4d, 0xe2, 0x38, 0x88, 0x47, 0x5b, 0x8d, 0x70, + 0x0b, 0xf6, 0xa6, 0xc8, 0x45, 0xc0, 0x62, 0x4d, 0x7a, 0x87, 0x66, 0xbf, 0xe4, 0x3d, 0x00, 0x2d, + 0x24, 0x47, 0x06, 0x11, 0xea, 0x89, 0xad, 0xd0, 0xaa, 0x46, 0x2e, 0x82, 0x08, 0xc9, 0xbb, 0x70, + 0x17, 0x63, 0x3f, 0x35, 0xee, 0x68, 0xe3, 0x1e, 0xc6, 0xbe, 0x32, 0x75, 0xbe, 0x83, 0xe3, 0xb5, + 0x59, 0x98, 0x49, 0xfd, 0x00, 0xf6, 0x17, 0x0a, 0x9a, 0x8e, 0x4c, 0x95, 0xd6, 0xe6, 0x15, 0x15, + 0x9d, 0x4f, 0x01, 0x9e, 0xcf, 0x0b, 0xfc, 0xa6, 0x0e, 0x74, 0xfe, 0x2a, 0x41, 0x63, 0x30, 0x0e, + 0x26, 0xaa, 0xd4, 0x5b, 0xb7, 0xcd, 0x86, 0xfb, 0xaa, 0x5d, 0x0e, 0xc7, 0x4b, 0xe4, 0x18, 0x7b, + 0xe8, 0xe8, 0xe2, 0x94, 0xb5, 0x63, 0x53, 0x6a, 0x2a, 0x63, 0x79, 0xa6, 0x2a, 0xf5, 0x04, 0xf6, + 0xcc, 0x49, 0xa1, 0x8b, 0x51, 0x3b, 0xf9, 0x64, 0xe3, 0x90, 0x5f, 0xcb, 0x87, 0x66, 0x8b, 0x3b, + 0x03, 0x68, 0x5d, 0x20, 0x8f, 0x82, 0xd8, 0x95, 0x1b, 0x86, 0xa1, 0x20, 0xe9, 0x23, 0xd8, 0xe5, + 0xe8, 0x0a, 0xd3, 0xad, 0x2a, 0x35, 0x7f, 0x27, 0x7f, 0xef, 0x43, 0x23, 0x1f, 0xdf, 0xf4, 0x48, + 0x26, 0xaf, 0xa0, 0x3e, 0x50, 0xed, 0xca, 0x70, 0xf2, 0xf9, 0xe6, 0x84, 0x17, 0x7d, 0x4d, 0x42, + 0xd6, 0x47, 0x9b, 0xf4, 0xb5, 0xd0, 0xa2, 0xd7, 0xb0, 0xbf, 0xa8, 0x36, 0xd2, 0xdb, 0x5e, 0x97, + 0x69, 0xa0, 0xcf, 0x6e, 0x2a, 0x64, 0x32, 0x81, 0xe6, 0x8a, 0x66, 0xc9, 0x97, 0x5b, 0xd2, 0x2c, + 0x49, 0xdc, 0xda, 0xee, 0xb8, 0x22, 0x2f, 0xe0, 0x60, 0x59, 0xc3, 0x85, 0x15, 0xd5, 0xe1, 0xd6, + 0xea, 0xdd, 0x3a, 0xb2, 0xd3, 0x7b, 0xd0, 0xce, 0xee, 0x41, 0xfb, 0xb1, 0xba, 0x07, 0xc9, 0x1f, + 0xe9, 0xbd, 0x73, 0x5d, 0x35, 0xe4, 0xab, 0x37, 0xec, 0xa8, 0x40, 0xec, 0xd6, 0xd7, 0x37, 0x5e, + 0x67, 0xea, 0xda, 0x87, 0x83, 0xef, 0xd1, 0x0b, 0xfc, 0xf9, 0x2e, 0xb7, 0x1c, 0x82, 0xc2, 0xad, + 0xfd, 0x00, 0xf5, 0xbe, 0x9b, 0x88, 0xdb, 0x23, 0xec, 0xab, 0x46, 0x88, 0x24, 0xba, 0x3d, 0xc6, + 0x17, 0x70, 0x98, 0x09, 0xf6, 0x09, 0x67, 0x51, 0xce, 0xfb, 0xf1, 0x26, 0xde, 0x6b, 0x12, 0x2f, + 0x24, 0x7f, 0x05, 0x75, 0xaa, 0x1e, 0x1c, 0x5b, 0x0a, 0x71, 0xc9, 0xf7, 0xa6, 0x42, 0xfc, 0x11, + 0x1a, 0x14, 0xc5, 0x92, 0xec, 0x6f, 0xa1, 0x7d, 0x14, 0x25, 0x9f, 0xdd, 0x1a, 0xe1, 0x4f, 0x70, + 0x44, 0x51, 0xcc, 0xc5, 0x78, 0xe6, 0x86, 0xe1, 0xd0, 0xf5, 0xc6, 0xe2, 0x7f, 0x33, 0xbb, 0xd0, + 0x5c, 0x39, 0x5b, 0x37, 0x9f, 0x09, 0x45, 0x47, 0x71, 0x61, 0x88, 0x10, 0x76, 0x07, 0xfa, 0x99, + 0x4a, 0x1e, 0x6e, 0x9c, 0x0d, 0xed, 0xb3, 0x95, 0x18, 0xf3, 0x33, 0x29, 0x89, 0x22, 0x97, 0xcf, + 0xb2, 0x95, 0x22, 0x09, 0x25, 0x11, 0x50, 0x4f, 0xff, 0x4f, 0x67, 0xfa, 0x1d, 0xf1, 0x36, 0x82, + 0x9e, 0x3e, 0x3b, 0x6d, 0x5e, 0xbb, 0x4b, 0xfa, 0xc3, 0x9f, 0xbf, 0x1d, 0x05, 0xf2, 0x65, 0x32, + 0x54, 0x9c, 0x3d, 0xc3, 0xd9, 0xcb, 0x39, 0x7b, 0x5e, 0x18, 0x60, 0x2c, 0x7b, 0x23, 0xa6, 0xdf, + 0xf1, 0x73, 0x5c, 0xfd, 0x0e, 0x77, 0x75, 0x09, 0xbf, 0xf8, 0x37, 0x00, 0x00, 0xff, 0xff, 0xeb, + 0x48, 0x45, 0x4d, 0x3b, 0x0c, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/dynamicforkjointask.pb.go b/client/gogrpc/conductor/model/dynamicforkjointask.pb.go new file mode 100644 index 0000000000..5fc8eb16ce --- /dev/null +++ b/client/gogrpc/conductor/model/dynamicforkjointask.pb.go @@ -0,0 +1,124 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/dynamicforkjointask.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type DynamicForkJoinTask struct { + TaskName string `protobuf:"bytes,1,opt,name=task_name,json=taskName" json:"task_name,omitempty"` + WorkflowName string `protobuf:"bytes,2,opt,name=workflow_name,json=workflowName" json:"workflow_name,omitempty"` + ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName" json:"reference_name,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Type string `protobuf:"bytes,5,opt,name=type" json:"type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicForkJoinTask) Reset() { *m = DynamicForkJoinTask{} } +func (m *DynamicForkJoinTask) String() string { return proto.CompactTextString(m) } +func (*DynamicForkJoinTask) ProtoMessage() {} +func (*DynamicForkJoinTask) Descriptor() ([]byte, []int) { + return fileDescriptor_dynamicforkjointask_d18821af65a16be7, []int{0} +} +func (m *DynamicForkJoinTask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicForkJoinTask.Unmarshal(m, b) +} +func (m *DynamicForkJoinTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicForkJoinTask.Marshal(b, m, deterministic) +} +func (dst *DynamicForkJoinTask) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicForkJoinTask.Merge(dst, src) +} +func (m *DynamicForkJoinTask) XXX_Size() int { + return xxx_messageInfo_DynamicForkJoinTask.Size(m) +} +func (m *DynamicForkJoinTask) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicForkJoinTask.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicForkJoinTask proto.InternalMessageInfo + +func (m *DynamicForkJoinTask) GetTaskName() string { + if m != nil { + return m.TaskName + } + return "" +} + +func (m *DynamicForkJoinTask) GetWorkflowName() string { + if m != nil { + return m.WorkflowName + } + return "" +} + +func (m *DynamicForkJoinTask) GetReferenceName() string { + if m != nil { + return m.ReferenceName + } + return "" +} + +func (m *DynamicForkJoinTask) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *DynamicForkJoinTask) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func init() { + proto.RegisterType((*DynamicForkJoinTask)(nil), "com.netflix.conductor.proto.DynamicForkJoinTask") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.DynamicForkJoinTask.InputEntry") +} + +func init() { + proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_d18821af65a16be7) +} + +var fileDescriptor_dynamicforkjointask_d18821af65a16be7 = []byte{ + // 324 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x4b, 0x03, 0x31, + 0x10, 0xc5, 0xe9, 0x3f, 0xb1, 0xa9, 0x15, 0x89, 0x28, 0xa5, 0x15, 0x2c, 0x8a, 0xd0, 0x83, 0x24, + 0x50, 0x2f, 0x52, 0x6f, 0x45, 0x05, 0x3d, 0x48, 0xad, 0xe2, 0xc1, 0x8b, 0xec, 0xa6, 0xb3, 0x6b, + 0xdc, 0xdd, 0xcc, 0x92, 0x66, 0xad, 0xfb, 0xc9, 0xfc, 0x7a, 0x92, 0xa4, 0xad, 0x1e, 0x8a, 0xb7, + 0xd9, 0x37, 0xbf, 0x9d, 0x37, 0x6f, 0x42, 0x8e, 0x33, 0x9c, 0x41, 0xca, 0x67, 0xa5, 0x0a, 0x32, + 0x29, 0x22, 0xd4, 0xc9, 0x07, 0x4a, 0x65, 0x82, 0x79, 0xc2, 0x72, 0x8d, 0x06, 0x69, 0x4f, 0x60, + 0xc6, 0x14, 0x98, 0x28, 0x95, 0x5f, 0x4c, 0xa0, 0x9a, 0x15, 0xc2, 0xa0, 0xf6, 0xcd, 0xee, 0x51, + 0x8c, 0x18, 0xa7, 0xc0, 0xdd, 0x57, 0x58, 0x44, 0x7c, 0x6e, 0x74, 0x21, 0x8c, 0xef, 0x9e, 0x7c, + 0x57, 0xc9, 0xfe, 0xb5, 0x1f, 0x7c, 0x8b, 0x3a, 0xb9, 0x47, 0xa9, 0x9e, 0x83, 0x79, 0x42, 0x7b, + 0xa4, 0x69, 0x0d, 0xde, 0x54, 0x90, 0x41, 0xa7, 0xd2, 0xaf, 0x0c, 0x9a, 0xd3, 0x6d, 0x2b, 0x3c, + 0x04, 0x19, 0xd0, 0x53, 0xd2, 0x5e, 0xa0, 0x4e, 0xa2, 0x14, 0x17, 0x1e, 0xa8, 0x3a, 0x60, 0x67, + 0x25, 0x3a, 0xe8, 0x8c, 0xec, 0x6a, 0x88, 0x40, 0x83, 0x12, 0xe0, 0xa9, 0x9a, 0xa3, 0xda, 0x6b, + 0xd5, 0x61, 0x8f, 0xa4, 0x21, 0x55, 0x5e, 0x98, 0x4e, 0xbd, 0x5f, 0x1b, 0xb4, 0x86, 0x57, 0xec, + 0x9f, 0x2c, 0x6c, 0xc3, 0xa6, 0xec, 0xce, 0xfe, 0x7d, 0xa3, 0x8c, 0x2e, 0xa7, 0x7e, 0x12, 0xa5, + 0xa4, 0x6e, 0xca, 0x1c, 0x3a, 0x0d, 0xe7, 0xe7, 0xea, 0xee, 0x84, 0x90, 0x5f, 0x90, 0xee, 0x91, + 0x5a, 0x02, 0xe5, 0x32, 0x97, 0x2d, 0xe9, 0x39, 0x69, 0x7c, 0x06, 0x69, 0xe1, 0xa3, 0xb4, 0x86, + 0x87, 0xcc, 0x5f, 0x8d, 0xad, 0xae, 0xc6, 0x5e, 0x6c, 0x77, 0xea, 0xa1, 0x51, 0xf5, 0xb2, 0x32, + 0x7e, 0x1a, 0x1f, 0x6c, 0x58, 0x67, 0x12, 0xbe, 0x8e, 0x62, 0x69, 0xde, 0x8b, 0xd0, 0x06, 0xe1, + 0xcb, 0x20, 0x7c, 0x1d, 0x84, 0x8b, 0x54, 0x82, 0x32, 0x3c, 0xc6, 0x58, 0xe7, 0xe2, 0x8f, 0xee, + 0xde, 0x38, 0xdc, 0x72, 0x7e, 0x17, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xe5, 0xd6, 0x52, 0xf3, + 0xf3, 0x01, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go b/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go new file mode 100644 index 0000000000..007f73e2a7 --- /dev/null +++ b/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go @@ -0,0 +1,82 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/dynamicforkjointasklist.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type DynamicForkJoinTaskList struct { + DynamicTasks []*DynamicForkJoinTask `protobuf:"bytes,1,rep,name=dynamic_tasks,json=dynamicTasks" json:"dynamic_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DynamicForkJoinTaskList) Reset() { *m = DynamicForkJoinTaskList{} } +func (m *DynamicForkJoinTaskList) String() string { return proto.CompactTextString(m) } +func (*DynamicForkJoinTaskList) ProtoMessage() {} +func (*DynamicForkJoinTaskList) Descriptor() ([]byte, []int) { + return fileDescriptor_dynamicforkjointasklist_67e040324fd05201, []int{0} +} +func (m *DynamicForkJoinTaskList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DynamicForkJoinTaskList.Unmarshal(m, b) +} +func (m *DynamicForkJoinTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DynamicForkJoinTaskList.Marshal(b, m, deterministic) +} +func (dst *DynamicForkJoinTaskList) XXX_Merge(src proto.Message) { + xxx_messageInfo_DynamicForkJoinTaskList.Merge(dst, src) +} +func (m *DynamicForkJoinTaskList) XXX_Size() int { + return xxx_messageInfo_DynamicForkJoinTaskList.Size(m) +} +func (m *DynamicForkJoinTaskList) XXX_DiscardUnknown() { + xxx_messageInfo_DynamicForkJoinTaskList.DiscardUnknown(m) +} + +var xxx_messageInfo_DynamicForkJoinTaskList proto.InternalMessageInfo + +func (m *DynamicForkJoinTaskList) GetDynamicTasks() []*DynamicForkJoinTask { + if m != nil { + return m.DynamicTasks + } + return nil +} + +func init() { + proto.RegisterType((*DynamicForkJoinTaskList)(nil), "com.netflix.conductor.proto.DynamicForkJoinTaskList") +} + +func init() { + proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_67e040324fd05201) +} + +var fileDescriptor_dynamicforkjointasklist_67e040324fd05201 = []byte{ + // 195 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xce, 0xcd, 0x4f, 0x49, + 0xcd, 0xd1, 0x4f, 0xa9, 0xcc, 0x4b, 0xcc, 0xcd, 0x4c, 0x4e, 0xcb, 0x2f, 0xca, 0xce, 0xca, 0xcf, + 0xcc, 0x2b, 0x49, 0x2c, 0xce, 0xce, 0xc9, 0x2c, 0x2e, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, + 0x92, 0x4e, 0xce, 0xcf, 0xd5, 0xcb, 0x4b, 0x2d, 0x49, 0xcb, 0xc9, 0xac, 0xd0, 0x4b, 0xce, 0xcf, + 0x4b, 0x29, 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x48, 0x4a, 0xc9, 0xe3, 0x34, 0x01, 0xa2, 0x40, 0xa9, + 0x80, 0x4b, 0xdc, 0x05, 0x22, 0xe9, 0x96, 0x5f, 0x94, 0xed, 0x95, 0x9f, 0x99, 0x17, 0x92, 0x58, + 0x9c, 0xed, 0x93, 0x59, 0x5c, 0x22, 0x14, 0xca, 0xc5, 0x0b, 0xd5, 0x17, 0x0f, 0xd2, 0x50, 0x2c, + 0xc1, 0xa8, 0xc0, 0xac, 0xc1, 0x6d, 0x64, 0xa0, 0x87, 0xc7, 0x42, 0x3d, 0x2c, 0x86, 0x05, 0xf1, + 0x40, 0x8d, 0x01, 0x71, 0x8a, 0x9d, 0xc2, 0x9d, 0x24, 0x71, 0xd8, 0x18, 0x90, 0x14, 0x65, 0x95, + 0x9e, 0x59, 0x92, 0x51, 0x9a, 0x04, 0xb2, 0x42, 0x1f, 0x6a, 0x85, 0x3e, 0xdc, 0x0a, 0xfd, 0xe4, + 0x9c, 0xcc, 0xd4, 0xbc, 0x12, 0xfd, 0xf4, 0xfc, 0xf4, 0xa2, 0x82, 0x64, 0x24, 0x71, 0xb0, 0x27, + 0x93, 0xd8, 0xc0, 0x2e, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x9f, 0xb2, 0x2f, 0xcc, 0x36, + 0x01, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/eventexecution.pb.go b/client/gogrpc/conductor/model/eventexecution.pb.go new file mode 100644 index 0000000000..855847c123 --- /dev/null +++ b/client/gogrpc/conductor/model/eventexecution.pb.go @@ -0,0 +1,174 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/eventexecution.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type EventExecution_Status int32 + +const ( + EventExecution_IN_PROGRESS EventExecution_Status = 0 + EventExecution_COMPLETED EventExecution_Status = 1 + EventExecution_FAILED EventExecution_Status = 2 + EventExecution_SKIPPED EventExecution_Status = 3 +) + +var EventExecution_Status_name = map[int32]string{ + 0: "IN_PROGRESS", + 1: "COMPLETED", + 2: "FAILED", + 3: "SKIPPED", +} +var EventExecution_Status_value = map[string]int32{ + "IN_PROGRESS": 0, + "COMPLETED": 1, + "FAILED": 2, + "SKIPPED": 3, +} + +func (x EventExecution_Status) String() string { + return proto.EnumName(EventExecution_Status_name, int32(x)) +} +func (EventExecution_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_eventexecution_e6ddf9d2dc999f89, []int{0, 0} +} + +type EventExecution struct { + Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` + MessageId string `protobuf:"bytes,2,opt,name=message_id,json=messageId" json:"message_id,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"` + Event string `protobuf:"bytes,4,opt,name=event" json:"event,omitempty"` + Created int64 `protobuf:"varint,5,opt,name=created" json:"created,omitempty"` + Status EventExecution_Status `protobuf:"varint,6,opt,name=status,enum=com.netflix.conductor.proto.EventExecution_Status" json:"status,omitempty"` + Output map[string]*_struct.Value `protobuf:"bytes,8,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventExecution) Reset() { *m = EventExecution{} } +func (m *EventExecution) String() string { return proto.CompactTextString(m) } +func (*EventExecution) ProtoMessage() {} +func (*EventExecution) Descriptor() ([]byte, []int) { + return fileDescriptor_eventexecution_e6ddf9d2dc999f89, []int{0} +} +func (m *EventExecution) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventExecution.Unmarshal(m, b) +} +func (m *EventExecution) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventExecution.Marshal(b, m, deterministic) +} +func (dst *EventExecution) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventExecution.Merge(dst, src) +} +func (m *EventExecution) XXX_Size() int { + return xxx_messageInfo_EventExecution.Size(m) +} +func (m *EventExecution) XXX_DiscardUnknown() { + xxx_messageInfo_EventExecution.DiscardUnknown(m) +} + +var xxx_messageInfo_EventExecution proto.InternalMessageInfo + +func (m *EventExecution) GetId() string { + if m != nil { + return m.Id + } + return "" +} + +func (m *EventExecution) GetMessageId() string { + if m != nil { + return m.MessageId + } + return "" +} + +func (m *EventExecution) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventExecution) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *EventExecution) GetCreated() int64 { + if m != nil { + return m.Created + } + return 0 +} + +func (m *EventExecution) GetStatus() EventExecution_Status { + if m != nil { + return m.Status + } + return EventExecution_IN_PROGRESS +} + +func (m *EventExecution) GetOutput() map[string]*_struct.Value { + if m != nil { + return m.Output + } + return nil +} + +func init() { + proto.RegisterType((*EventExecution)(nil), "com.netflix.conductor.proto.EventExecution") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.EventExecution.OutputEntry") + proto.RegisterEnum("com.netflix.conductor.proto.EventExecution_Status", EventExecution_Status_name, EventExecution_Status_value) +} + +func init() { + proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_e6ddf9d2dc999f89) +} + +var fileDescriptor_eventexecution_e6ddf9d2dc999f89 = []byte{ + // 393 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x41, 0x8f, 0x94, 0x30, + 0x18, 0x15, 0xd8, 0x61, 0x9d, 0x8f, 0x38, 0x92, 0xc6, 0x98, 0x66, 0xd4, 0x84, 0xec, 0x89, 0x83, + 0x29, 0x09, 0x1e, 0x34, 0x7b, 0xdb, 0x75, 0xaa, 0x41, 0xd7, 0x05, 0xc1, 0x78, 0xf0, 0xb2, 0x81, + 0xd2, 0x45, 0x22, 0xd0, 0x09, 0xb4, 0x9b, 0x99, 0x3f, 0xe6, 0xef, 0x33, 0x14, 0xc6, 0xcc, 0x5c, + 0x4c, 0xbc, 0x7d, 0xdf, 0x7b, 0xfd, 0x5e, 0xde, 0x7b, 0x29, 0xac, 0x5b, 0x51, 0xf2, 0x26, 0xe0, + 0x0f, 0xbc, 0x93, 0x7c, 0xc7, 0x99, 0x92, 0xb5, 0xe8, 0xc8, 0xb6, 0x17, 0x52, 0xa0, 0x17, 0x4c, + 0xb4, 0xa4, 0xe3, 0xf2, 0xbe, 0xa9, 0x77, 0x84, 0x89, 0xae, 0x54, 0x4c, 0x8a, 0x7e, 0x22, 0xd7, + 0x2f, 0x2b, 0x21, 0xaa, 0x86, 0x07, 0x7a, 0x2b, 0xd4, 0x7d, 0x30, 0xc8, 0x5e, 0x31, 0x39, 0xb1, + 0x17, 0xbf, 0x2d, 0x58, 0xd1, 0x51, 0x93, 0x1e, 0x34, 0xd1, 0x0a, 0xcc, 0xba, 0xc4, 0x86, 0x67, + 0xf8, 0xcb, 0xd4, 0xac, 0x4b, 0xf4, 0x0a, 0xa0, 0xe5, 0xc3, 0x90, 0x57, 0xfc, 0xae, 0x2e, 0xb1, + 0xa9, 0xf1, 0xe5, 0x8c, 0x44, 0x25, 0x42, 0x70, 0xd6, 0xe5, 0x2d, 0xc7, 0x96, 0x26, 0xf4, 0x8c, + 0x9e, 0xc1, 0x42, 0x1b, 0xc5, 0x67, 0x1a, 0x9c, 0x16, 0x84, 0xe1, 0x9c, 0xf5, 0x3c, 0x97, 0xbc, + 0xc4, 0x0b, 0xcf, 0xf0, 0xad, 0xf4, 0xb0, 0xa2, 0x4f, 0x60, 0x0f, 0x32, 0x97, 0x6a, 0xc0, 0xb6, + 0x67, 0xf8, 0xab, 0x30, 0x24, 0xff, 0x48, 0x44, 0x4e, 0xfd, 0x92, 0x4c, 0x5f, 0xa6, 0xb3, 0x02, + 0x8a, 0xc1, 0x16, 0x4a, 0x6e, 0x95, 0xc4, 0x8f, 0x3d, 0xcb, 0x77, 0xc2, 0xb7, 0xff, 0xa3, 0x15, + 0xeb, 0x4b, 0xda, 0xc9, 0x7e, 0x9f, 0xce, 0x32, 0xeb, 0xaf, 0xe0, 0x1c, 0xc1, 0xc8, 0x05, 0xeb, + 0x17, 0xdf, 0xcf, 0xfd, 0x8c, 0x23, 0x7a, 0x0d, 0x8b, 0x87, 0xbc, 0x51, 0x5c, 0x77, 0xe3, 0x84, + 0xcf, 0xc9, 0xd4, 0x38, 0x39, 0x34, 0x4e, 0xbe, 0x8f, 0x6c, 0x3a, 0x3d, 0xba, 0x34, 0xdf, 0x19, + 0x17, 0x57, 0x60, 0x4f, 0xae, 0xd1, 0x53, 0x70, 0xa2, 0xdb, 0xbb, 0x24, 0x8d, 0x3f, 0xa6, 0x34, + 0xcb, 0xdc, 0x47, 0xe8, 0x09, 0x2c, 0xdf, 0xc7, 0x5f, 0x92, 0x1b, 0xfa, 0x8d, 0x6e, 0x5c, 0x03, + 0x01, 0xd8, 0x1f, 0xae, 0xa2, 0x1b, 0xba, 0x71, 0x4d, 0xe4, 0xc0, 0x79, 0xf6, 0x39, 0x4a, 0x12, + 0xba, 0x71, 0xad, 0xeb, 0xdb, 0x6b, 0xf7, 0xd4, 0x7b, 0x52, 0xfc, 0xb8, 0xac, 0x6a, 0xf9, 0x53, + 0x15, 0x63, 0xe0, 0x60, 0x0e, 0x1c, 0xfc, 0x0d, 0x1c, 0xb0, 0xa6, 0xe6, 0x9d, 0x0c, 0x2a, 0x51, + 0xf5, 0x5b, 0x76, 0x84, 0xeb, 0x8f, 0x55, 0xd8, 0xda, 0xed, 0x9b, 0x3f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0xa4, 0x08, 0x46, 0x66, 0x68, 0x02, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/polldata.pb.go b/client/gogrpc/conductor/model/polldata.pb.go new file mode 100644 index 0000000000..69f1cb74e3 --- /dev/null +++ b/client/gogrpc/conductor/model/polldata.pb.go @@ -0,0 +1,105 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/polldata.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type PollData struct { + QueueName string `protobuf:"bytes,1,opt,name=queue_name,json=queueName" json:"queue_name,omitempty"` + Domain string `protobuf:"bytes,2,opt,name=domain" json:"domain,omitempty"` + WorkerId string `protobuf:"bytes,3,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + LastPollTime int64 `protobuf:"varint,4,opt,name=last_poll_time,json=lastPollTime" json:"last_poll_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollData) Reset() { *m = PollData{} } +func (m *PollData) String() string { return proto.CompactTextString(m) } +func (*PollData) ProtoMessage() {} +func (*PollData) Descriptor() ([]byte, []int) { + return fileDescriptor_polldata_a39ed67c311a83df, []int{0} +} +func (m *PollData) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollData.Unmarshal(m, b) +} +func (m *PollData) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollData.Marshal(b, m, deterministic) +} +func (dst *PollData) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollData.Merge(dst, src) +} +func (m *PollData) XXX_Size() int { + return xxx_messageInfo_PollData.Size(m) +} +func (m *PollData) XXX_DiscardUnknown() { + xxx_messageInfo_PollData.DiscardUnknown(m) +} + +var xxx_messageInfo_PollData proto.InternalMessageInfo + +func (m *PollData) GetQueueName() string { + if m != nil { + return m.QueueName + } + return "" +} + +func (m *PollData) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *PollData) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *PollData) GetLastPollTime() int64 { + if m != nil { + return m.LastPollTime + } + return 0 +} + +func init() { + proto.RegisterType((*PollData)(nil), "com.netflix.conductor.proto.PollData") +} + +func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_a39ed67c311a83df) } + +var fileDescriptor_polldata_a39ed67c311a83df = []byte{ + // 224 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x8f, 0x31, 0x4b, 0x03, 0x41, + 0x10, 0x46, 0x39, 0x23, 0x21, 0xb7, 0x88, 0xc5, 0x22, 0x72, 0x10, 0x84, 0x20, 0x16, 0xa9, 0x6e, + 0x0b, 0x3b, 0xcb, 0x60, 0xa1, 0x8d, 0x84, 0x60, 0x65, 0x73, 0xec, 0xed, 0x8e, 0xe7, 0xe2, 0xcc, + 0xce, 0xb9, 0xce, 0xa2, 0x3f, 0xc0, 0x1f, 0x2e, 0xb7, 0x89, 0x92, 0x72, 0xde, 0x2b, 0xe6, 0x7d, + 0xea, 0x82, 0xd8, 0x03, 0x9a, 0x91, 0x11, 0xbd, 0x15, 0xdb, 0x8e, 0x89, 0x85, 0xf5, 0xd2, 0x31, + 0xb5, 0x11, 0xe4, 0x15, 0xc3, 0x77, 0xeb, 0x38, 0xfa, 0xec, 0x84, 0xd3, 0x5e, 0x5e, 0xff, 0x54, + 0x6a, 0xb1, 0x65, 0xc4, 0x7b, 0x2b, 0x56, 0x5f, 0x29, 0xf5, 0x91, 0x21, 0x43, 0x17, 0x2d, 0x41, + 0x53, 0xad, 0xaa, 0x75, 0xbd, 0xab, 0x0b, 0x79, 0xb2, 0x04, 0xfa, 0x52, 0xcd, 0x3d, 0x93, 0x0d, + 0xb1, 0x39, 0x29, 0xea, 0x70, 0xe9, 0xa5, 0xaa, 0xbf, 0x38, 0xbd, 0x43, 0xea, 0x82, 0x6f, 0x66, + 0x45, 0x2d, 0xf6, 0xe0, 0xd1, 0xeb, 0x1b, 0x75, 0x8e, 0xf6, 0x53, 0xba, 0x29, 0xaa, 0x93, 0x40, + 0xd0, 0x9c, 0xae, 0xaa, 0xf5, 0x6c, 0x77, 0x36, 0xd1, 0xe9, 0xf3, 0x73, 0x20, 0xd8, 0x3c, 0x6c, + 0xd4, 0x5f, 0xc5, 0xb6, 0x7f, 0xb9, 0x1b, 0x82, 0xbc, 0xe5, 0xbe, 0x75, 0x4c, 0xe6, 0x10, 0x6e, + 0xfe, 0xc3, 0x8d, 0xc3, 0x00, 0x51, 0xcc, 0xc0, 0x43, 0x1a, 0xdd, 0x11, 0x2f, 0xe3, 0xfb, 0x79, + 0xd9, 0x75, 0xfb, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x47, 0x36, 0x49, 0x42, 0x0c, 0x01, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go b/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go new file mode 100644 index 0000000000..cfc15ce8e9 --- /dev/null +++ b/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go @@ -0,0 +1,128 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/rerunworkflowrequest.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type RerunWorkflowRequest struct { + ReRunFromWorkflowId string `protobuf:"bytes,1,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId" json:"re_run_from_workflow_id,omitempty"` + WorkflowInput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=workflow_input,json=workflowInput" json:"workflow_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + ReRunFromTaskId string `protobuf:"bytes,3,opt,name=re_run_from_task_id,json=reRunFromTaskId" json:"re_run_from_task_id,omitempty"` + TaskInput map[string]*_struct.Value `protobuf:"bytes,4,rep,name=task_input,json=taskInput" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + CorrelationId string `protobuf:"bytes,5,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RerunWorkflowRequest) Reset() { *m = RerunWorkflowRequest{} } +func (m *RerunWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RerunWorkflowRequest) ProtoMessage() {} +func (*RerunWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_rerunworkflowrequest_ecd5e08640f59d8d, []int{0} +} +func (m *RerunWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RerunWorkflowRequest.Unmarshal(m, b) +} +func (m *RerunWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RerunWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RerunWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RerunWorkflowRequest.Merge(dst, src) +} +func (m *RerunWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RerunWorkflowRequest.Size(m) +} +func (m *RerunWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RerunWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RerunWorkflowRequest proto.InternalMessageInfo + +func (m *RerunWorkflowRequest) GetReRunFromWorkflowId() string { + if m != nil { + return m.ReRunFromWorkflowId + } + return "" +} + +func (m *RerunWorkflowRequest) GetWorkflowInput() map[string]*_struct.Value { + if m != nil { + return m.WorkflowInput + } + return nil +} + +func (m *RerunWorkflowRequest) GetReRunFromTaskId() string { + if m != nil { + return m.ReRunFromTaskId + } + return "" +} + +func (m *RerunWorkflowRequest) GetTaskInput() map[string]*_struct.Value { + if m != nil { + return m.TaskInput + } + return nil +} + +func (m *RerunWorkflowRequest) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func init() { + proto.RegisterType((*RerunWorkflowRequest)(nil), "com.netflix.conductor.proto.RerunWorkflowRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.RerunWorkflowRequest.TaskInputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.RerunWorkflowRequest.WorkflowInputEntry") +} + +func init() { + proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_ecd5e08640f59d8d) +} + +var fileDescriptor_rerunworkflowrequest_ecd5e08640f59d8d = []byte{ + // 369 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0x4b, 0x6b, 0xdb, 0x40, + 0x10, 0xc7, 0x91, 0x1f, 0x05, 0xaf, 0xb1, 0x5b, 0xd6, 0xc5, 0x35, 0x6e, 0x0f, 0xa2, 0x50, 0xf0, + 0xc1, 0xac, 0xc0, 0xed, 0xa1, 0xf8, 0x14, 0x4c, 0x12, 0xf0, 0x2d, 0x08, 0x93, 0x84, 0x5c, 0x84, + 0x1e, 0x2b, 0x45, 0x48, 0xda, 0x71, 0x46, 0xbb, 0x71, 0xfc, 0x69, 0xf2, 0x55, 0x83, 0x56, 0x92, + 0xad, 0x10, 0x13, 0x08, 0xe4, 0xa6, 0x79, 0xfd, 0xe7, 0xa7, 0xff, 0x2c, 0x31, 0x33, 0x08, 0x78, + 0x6a, 0x21, 0x47, 0x25, 0x76, 0x80, 0x49, 0x98, 0xc2, 0x0e, 0xf9, 0x83, 0xe2, 0xb9, 0x64, 0x5b, + 0x04, 0x09, 0xf4, 0xa7, 0x0f, 0x19, 0x13, 0x5c, 0x86, 0x69, 0xfc, 0xc4, 0x7c, 0x10, 0x81, 0xf2, + 0x25, 0x60, 0x59, 0x9c, 0xfe, 0x8a, 0x00, 0xa2, 0x94, 0x5b, 0x3a, 0xf2, 0x54, 0x68, 0xe5, 0x12, + 0x95, 0x5f, 0x8d, 0xfe, 0x7e, 0xee, 0x90, 0xef, 0x76, 0xa1, 0x7c, 0x53, 0x29, 0xdb, 0xa5, 0x32, + 0xfd, 0x47, 0x7e, 0x20, 0x77, 0x50, 0x09, 0x27, 0x44, 0xc8, 0x9c, 0x7a, 0xb1, 0x13, 0x07, 0x13, + 0xc3, 0x34, 0x66, 0x3d, 0x7b, 0x84, 0xdc, 0x56, 0xe2, 0x12, 0x21, 0xab, 0x47, 0xd7, 0x01, 0x4d, + 0xc8, 0xf0, 0xd8, 0x29, 0xb6, 0x4a, 0x4e, 0x5a, 0x66, 0x7b, 0xd6, 0x5f, 0x9c, 0xb3, 0x77, 0x10, + 0xd9, 0x29, 0x00, 0x76, 0x50, 0x2d, 0x64, 0x2e, 0x84, 0xc4, 0xbd, 0x3d, 0xd8, 0x35, 0x73, 0x74, + 0x4e, 0x46, 0x4d, 0x44, 0xe9, 0xe6, 0x49, 0x81, 0xd7, 0xd6, 0x78, 0x5f, 0x0f, 0x78, 0x1b, 0x37, + 0x4f, 0xd6, 0x01, 0x75, 0x08, 0x29, 0x3b, 0x34, 0x56, 0x47, 0x63, 0x9d, 0x7d, 0x1c, 0x4b, 0xab, + 0x1d, 0x91, 0x7a, 0xb2, 0x8e, 0xe9, 0x1f, 0x32, 0xf4, 0x01, 0x91, 0xa7, 0xae, 0x8c, 0x41, 0x14, + 0x24, 0x5d, 0x4d, 0x32, 0x68, 0x64, 0xd7, 0xc1, 0xf4, 0x96, 0xd0, 0xb7, 0xbf, 0x46, 0xbf, 0x91, + 0x76, 0xc2, 0xf7, 0x95, 0xb5, 0xc5, 0x27, 0x9d, 0x93, 0xee, 0xa3, 0x9b, 0x2a, 0x3e, 0x69, 0x99, + 0xc6, 0xac, 0xbf, 0x18, 0xb3, 0xf2, 0x8e, 0xac, 0xbe, 0x23, 0xbb, 0x2e, 0xaa, 0x76, 0xd9, 0xb4, + 0x6c, 0xfd, 0x37, 0xa6, 0x1b, 0x32, 0x7c, 0x4d, 0xf7, 0x19, 0xaa, 0xab, 0xcd, 0x6a, 0x7c, 0xca, + 0x88, 0x2b, 0xef, 0x6e, 0x19, 0xc5, 0xf2, 0x5e, 0x79, 0x85, 0x87, 0x56, 0xe5, 0xa1, 0x75, 0xf0, + 0xd0, 0xf2, 0xd3, 0x98, 0x0b, 0x69, 0x45, 0x10, 0xe1, 0xd6, 0x6f, 0xe4, 0xf5, 0x6b, 0xf6, 0xbe, + 0xe8, 0x85, 0x7f, 0x5f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x61, 0xde, 0x4e, 0x72, 0xdd, 0x02, 0x00, + 0x00, +} diff --git a/client/gogrpc/conductor/model/skiptaskrequest.pb.go b/client/gogrpc/conductor/model/skiptaskrequest.pb.go new file mode 100644 index 0000000000..047399ec96 --- /dev/null +++ b/client/gogrpc/conductor/model/skiptaskrequest.pb.go @@ -0,0 +1,99 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/skiptaskrequest.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SkipTaskRequest struct { + TaskInput map[string]*_struct.Value `protobuf:"bytes,1,rep,name=task_input,json=taskInput" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + TaskOutput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=task_output,json=taskOutput" json:"task_output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } +func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } +func (*SkipTaskRequest) ProtoMessage() {} +func (*SkipTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_skiptaskrequest_877c4b9b26d6de32, []int{0} +} +func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) +} +func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) +} +func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskRequest.Merge(dst, src) +} +func (m *SkipTaskRequest) XXX_Size() int { + return xxx_messageInfo_SkipTaskRequest.Size(m) +} +func (m *SkipTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo + +func (m *SkipTaskRequest) GetTaskInput() map[string]*_struct.Value { + if m != nil { + return m.TaskInput + } + return nil +} + +func (m *SkipTaskRequest) GetTaskOutput() map[string]*_struct.Value { + if m != nil { + return m.TaskOutput + } + return nil +} + +func init() { + proto.RegisterType((*SkipTaskRequest)(nil), "com.netflix.conductor.proto.SkipTaskRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.SkipTaskRequest.TaskInputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.SkipTaskRequest.TaskOutputEntry") +} + +func init() { + proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_877c4b9b26d6de32) +} + +var fileDescriptor_skiptaskrequest_877c4b9b26d6de32 = []byte{ + // 296 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xcf, 0x4b, 0xfb, 0x30, + 0x18, 0xc6, 0x69, 0xc7, 0xf7, 0x0b, 0x4b, 0xc1, 0x69, 0x0f, 0x52, 0x3a, 0x0f, 0xc5, 0x53, 0x0f, + 0x92, 0xc0, 0xbc, 0xc8, 0xf4, 0x34, 0xf0, 0xe0, 0x69, 0x52, 0xa7, 0x87, 0x81, 0x48, 0x9b, 0x65, + 0x35, 0xb4, 0xeb, 0x5b, 0xd3, 0x37, 0xe2, 0xfe, 0x74, 0x6f, 0x92, 0x64, 0x13, 0xed, 0x41, 0x10, + 0xbc, 0xbd, 0x3f, 0xf2, 0x7c, 0x9e, 0xe4, 0x21, 0x64, 0xbc, 0x81, 0x95, 0xa8, 0x59, 0x57, 0xc9, + 0x16, 0xf3, 0xae, 0x52, 0xe2, 0x45, 0x8b, 0x0e, 0x69, 0xab, 0x00, 0x21, 0x1c, 0x73, 0xd8, 0xd0, + 0x46, 0xe0, 0xba, 0x96, 0x6f, 0x94, 0x43, 0xb3, 0xd2, 0x1c, 0x41, 0xb9, 0x65, 0x7c, 0x52, 0x02, + 0x94, 0xb5, 0x60, 0xb6, 0x2b, 0xf4, 0x9a, 0x75, 0xa8, 0x34, 0xdf, 0x49, 0x4f, 0xdf, 0x7d, 0x32, + 0xba, 0xab, 0x64, 0xbb, 0xc8, 0xbb, 0x2a, 0x73, 0xd0, 0x70, 0x49, 0x88, 0xf1, 0x78, 0x92, 0x4d, + 0xab, 0x31, 0xf2, 0x92, 0x41, 0x1a, 0x4c, 0x2e, 0xe9, 0x0f, 0x1e, 0xb4, 0x47, 0xa0, 0xa6, 0xbe, + 0x31, 0xea, 0xeb, 0x06, 0xd5, 0x36, 0x1b, 0xe2, 0xbe, 0x0f, 0x1f, 0x49, 0x60, 0xd9, 0xa0, 0xd1, + 0xc0, 0x7d, 0x0b, 0xbf, 0xfa, 0x35, 0x7c, 0x6e, 0xe5, 0x8e, 0x6e, 0x2f, 0xeb, 0x06, 0xf1, 0x82, + 0x1c, 0x7c, 0xf7, 0x0e, 0x0f, 0xc9, 0xa0, 0x12, 0xdb, 0xc8, 0x4b, 0xbc, 0x74, 0x98, 0x99, 0x32, + 0x3c, 0x23, 0xff, 0x5e, 0xf3, 0x5a, 0x8b, 0xc8, 0x4f, 0xbc, 0x34, 0x98, 0x1c, 0x53, 0x17, 0x10, + 0xdd, 0x07, 0x44, 0x1f, 0xcc, 0x36, 0x73, 0x87, 0xa6, 0xfe, 0x85, 0x17, 0xdf, 0x93, 0x51, 0xcf, + 0xf4, 0x2f, 0xb0, 0xb3, 0xf9, 0xec, 0xa8, 0xf7, 0xb6, 0xdb, 0x62, 0x39, 0x2d, 0x25, 0x3e, 0xeb, + 0xc2, 0x24, 0xc2, 0x76, 0x89, 0xb0, 0xcf, 0x44, 0x18, 0xaf, 0xa5, 0x68, 0x90, 0x95, 0x50, 0xaa, + 0x96, 0x7f, 0x99, 0xdb, 0xdf, 0x51, 0xfc, 0xb7, 0x5e, 0xe7, 0x1f, 0x01, 0x00, 0x00, 0xff, 0xff, + 0x6f, 0xf9, 0xb7, 0xd0, 0x2d, 0x02, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/startworkflowrequest.pb.go b/client/gogrpc/conductor/model/startworkflowrequest.pb.go new file mode 100644 index 0000000000..ac928d8fe5 --- /dev/null +++ b/client/gogrpc/conductor/model/startworkflowrequest.pb.go @@ -0,0 +1,127 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/startworkflowrequest.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StartWorkflowRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + TaskToDomain map[string]string `protobuf:"bytes,5,rep,name=task_to_domain,json=taskToDomain" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartWorkflowRequest) Reset() { *m = StartWorkflowRequest{} } +func (m *StartWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*StartWorkflowRequest) ProtoMessage() {} +func (*StartWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_startworkflowrequest_0f3c58ff07a6063b, []int{0} +} +func (m *StartWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartWorkflowRequest.Unmarshal(m, b) +} +func (m *StartWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *StartWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartWorkflowRequest.Merge(dst, src) +} +func (m *StartWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_StartWorkflowRequest.Size(m) +} +func (m *StartWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StartWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StartWorkflowRequest proto.InternalMessageInfo + +func (m *StartWorkflowRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *StartWorkflowRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *StartWorkflowRequest) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *StartWorkflowRequest) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *StartWorkflowRequest) GetTaskToDomain() map[string]string { + if m != nil { + return m.TaskToDomain + } + return nil +} + +func init() { + proto.RegisterType((*StartWorkflowRequest)(nil), "com.netflix.conductor.proto.StartWorkflowRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.StartWorkflowRequest.InputEntry") + proto.RegisterMapType((map[string]string)(nil), "com.netflix.conductor.proto.StartWorkflowRequest.TaskToDomainEntry") +} + +func init() { + proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_0f3c58ff07a6063b) +} + +var fileDescriptor_startworkflowrequest_0f3c58ff07a6063b = []byte{ + // 353 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0x4f, 0x4b, 0xeb, 0x40, + 0x10, 0xc0, 0x49, 0xd3, 0xbc, 0x47, 0xb7, 0xef, 0x15, 0x5d, 0x4a, 0x09, 0xd5, 0x43, 0x10, 0x84, + 0x1e, 0x64, 0x03, 0xf5, 0x22, 0x45, 0x10, 0xaa, 0x1e, 0x7a, 0x2b, 0xb1, 0x28, 0x78, 0x29, 0xf9, + 0xb3, 0x8d, 0x4b, 0x92, 0x9d, 0xba, 0x99, 0xb4, 0xf6, 0xc3, 0xf9, 0xdd, 0x24, 0x9b, 0x54, 0x03, + 0x16, 0xc1, 0xdb, 0xfc, 0xc9, 0xfc, 0x66, 0xf2, 0x63, 0x89, 0x93, 0x41, 0xc4, 0x53, 0x37, 0x47, + 0x5f, 0xe1, 0x16, 0x54, 0xb2, 0x4a, 0x61, 0xab, 0xf8, 0x6b, 0xc1, 0x73, 0x64, 0x6b, 0x05, 0x08, + 0xf4, 0x24, 0x84, 0x8c, 0x49, 0x8e, 0xab, 0x54, 0xbc, 0xb1, 0x10, 0x64, 0x54, 0x84, 0x08, 0xaa, + 0x6a, 0x0e, 0x4f, 0x63, 0x80, 0x38, 0xe5, 0xae, 0xce, 0x82, 0x62, 0xe5, 0xe6, 0xa8, 0x8a, 0xb0, + 0x1e, 0x3d, 0x7b, 0x37, 0x49, 0xff, 0xa1, 0x24, 0x3f, 0xd5, 0x64, 0xaf, 0x22, 0x53, 0x4a, 0xda, + 0xd2, 0xcf, 0xb8, 0x6d, 0x38, 0xc6, 0xa8, 0xe3, 0xe9, 0x98, 0xda, 0xe4, 0xef, 0x86, 0xab, 0x5c, + 0x80, 0xb4, 0x5b, 0x8e, 0x31, 0xb2, 0xbc, 0x7d, 0x4a, 0xcf, 0x49, 0x2f, 0x04, 0xa5, 0x78, 0xea, + 0xa3, 0x00, 0xb9, 0x14, 0x91, 0x6d, 0xea, 0xb9, 0xff, 0x8d, 0xea, 0x2c, 0xa2, 0x1e, 0xb1, 0x84, + 0x5c, 0x17, 0x68, 0xb7, 0x1d, 0x73, 0xd4, 0x1d, 0x5f, 0xb3, 0x1f, 0x0e, 0x67, 0x87, 0xce, 0x62, + 0xb3, 0x72, 0xfc, 0x5e, 0xa2, 0xda, 0x79, 0x15, 0x8a, 0x0a, 0xd2, 0x43, 0x3f, 0x4f, 0x96, 0x08, + 0xcb, 0x08, 0x32, 0x5f, 0x48, 0xdb, 0xd2, 0xf0, 0xdb, 0xdf, 0xc3, 0x17, 0x7e, 0x9e, 0x2c, 0xe0, + 0x4e, 0x53, 0xaa, 0x1d, 0xff, 0xb0, 0x51, 0x1a, 0xce, 0x09, 0xf9, 0xda, 0x4f, 0x8f, 0x88, 0x99, + 0xf0, 0x5d, 0x2d, 0xa8, 0x0c, 0xe9, 0x05, 0xb1, 0x36, 0x7e, 0x5a, 0x70, 0x6d, 0xa7, 0x3b, 0x1e, + 0xb0, 0x4a, 0x3d, 0xdb, 0xab, 0x67, 0x8f, 0x65, 0xd7, 0xab, 0x3e, 0x9a, 0xb4, 0xae, 0x8c, 0xe1, + 0x0d, 0x39, 0xfe, 0xb6, 0xf4, 0x00, 0xb8, 0xdf, 0x04, 0x77, 0x1a, 0x80, 0xe9, 0x62, 0x3a, 0x38, + 0xf4, 0x2b, 0xf3, 0xe0, 0x79, 0x12, 0x0b, 0x7c, 0x29, 0x82, 0xd2, 0x82, 0x5b, 0x5b, 0x70, 0x3f, + 0x2d, 0xb8, 0x61, 0x2a, 0xb8, 0x44, 0x37, 0x86, 0x58, 0xad, 0xc3, 0x46, 0x5d, 0xbf, 0xb5, 0xe0, + 0x8f, 0xbe, 0xf8, 0xf2, 0x23, 0x00, 0x00, 0xff, 0xff, 0x53, 0xd0, 0xc7, 0x25, 0x7b, 0x02, 0x00, + 0x00, +} diff --git a/client/gogrpc/conductor/model/subworkflowparams.pb.go b/client/gogrpc/conductor/model/subworkflowparams.pb.go new file mode 100644 index 0000000000..61f09fd26d --- /dev/null +++ b/client/gogrpc/conductor/model/subworkflowparams.pb.go @@ -0,0 +1,92 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/subworkflowparams.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type SubWorkflowParams struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version *_struct.Value `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SubWorkflowParams) Reset() { *m = SubWorkflowParams{} } +func (m *SubWorkflowParams) String() string { return proto.CompactTextString(m) } +func (*SubWorkflowParams) ProtoMessage() {} +func (*SubWorkflowParams) Descriptor() ([]byte, []int) { + return fileDescriptor_subworkflowparams_c15f17d5a5a4cfdd, []int{0} +} +func (m *SubWorkflowParams) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SubWorkflowParams.Unmarshal(m, b) +} +func (m *SubWorkflowParams) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SubWorkflowParams.Marshal(b, m, deterministic) +} +func (dst *SubWorkflowParams) XXX_Merge(src proto.Message) { + xxx_messageInfo_SubWorkflowParams.Merge(dst, src) +} +func (m *SubWorkflowParams) XXX_Size() int { + return xxx_messageInfo_SubWorkflowParams.Size(m) +} +func (m *SubWorkflowParams) XXX_DiscardUnknown() { + xxx_messageInfo_SubWorkflowParams.DiscardUnknown(m) +} + +var xxx_messageInfo_SubWorkflowParams proto.InternalMessageInfo + +func (m *SubWorkflowParams) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *SubWorkflowParams) GetVersion() *_struct.Value { + if m != nil { + return m.Version + } + return nil +} + +func init() { + proto.RegisterType((*SubWorkflowParams)(nil), "com.netflix.conductor.proto.SubWorkflowParams") +} + +func init() { + proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_c15f17d5a5a4cfdd) +} + +var fileDescriptor_subworkflowparams_c15f17d5a5a4cfdd = []byte{ + // 213 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x4d, 0x4b, 0xc4, 0x30, + 0x10, 0x86, 0xa9, 0x88, 0x62, 0x3c, 0x19, 0x41, 0x8a, 0x1f, 0x50, 0x3c, 0xf5, 0x94, 0x88, 0xde, + 0x3c, 0xf6, 0x17, 0xd4, 0x0a, 0x8a, 0xde, 0x92, 0x34, 0xcd, 0x86, 0x4d, 0x32, 0x25, 0x1f, 0xdb, + 0xfd, 0xf9, 0x0b, 0x69, 0xbb, 0x2c, 0xec, 0x6d, 0x66, 0xde, 0xe1, 0x99, 0x87, 0x41, 0x2f, 0x16, + 0x7a, 0x69, 0x68, 0x48, 0x7c, 0x02, 0xbf, 0x1d, 0x0c, 0x4c, 0x23, 0xf3, 0xcc, 0x06, 0x32, 0x7a, + 0x88, 0x80, 0x9f, 0x04, 0x58, 0xe2, 0x64, 0x1c, 0x8c, 0xde, 0x13, 0x01, 0xae, 0x4f, 0x22, 0x82, + 0x9f, 0xc3, 0xc7, 0x67, 0x05, 0xa0, 0x8c, 0xa4, 0xb9, 0xe3, 0x69, 0xa0, 0x21, 0xfa, 0x24, 0xe2, + 0x9c, 0xbe, 0xfe, 0xa1, 0xbb, 0xef, 0xc4, 0x7f, 0x17, 0x6a, 0x9b, 0xa9, 0x18, 0xa3, 0x4b, 0xc7, + 0xac, 0x2c, 0x8b, 0xaa, 0xa8, 0x6f, 0xba, 0x5c, 0xe3, 0x37, 0x74, 0xbd, 0x93, 0x3e, 0x68, 0x70, + 0xe5, 0x45, 0x55, 0xd4, 0xb7, 0xef, 0x0f, 0x64, 0x06, 0x93, 0x15, 0x4c, 0x7e, 0x98, 0x49, 0xb2, + 0x5b, 0xd7, 0x9a, 0xaf, 0xe6, 0xfe, 0x0c, 0xdd, 0xf2, 0xff, 0x4f, 0xa5, 0xe3, 0x26, 0x71, 0x22, + 0xc0, 0xd2, 0xc5, 0x99, 0x1e, 0x9d, 0xa9, 0x30, 0x5a, 0xba, 0x48, 0x15, 0x28, 0x3f, 0x8a, 0x93, + 0x79, 0x7e, 0x00, 0xbf, 0xca, 0xb7, 0x3e, 0x0e, 0x01, 0x00, 0x00, 0xff, 0xff, 0x9f, 0x5e, 0xa1, + 0x60, 0x10, 0x01, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/task.pb.go b/client/gogrpc/conductor/model/task.pb.go new file mode 100644 index 0000000000..0dc6dd26bd --- /dev/null +++ b/client/gogrpc/conductor/model/task.pb.go @@ -0,0 +1,380 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/task.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Task_Status int32 + +const ( + Task_IN_PROGRESS Task_Status = 0 + Task_CANCELED Task_Status = 1 + Task_FAILED Task_Status = 2 + Task_COMPLETED Task_Status = 3 + Task_COMPLETED_WITH_ERRORS Task_Status = 4 + Task_SCHEDULED Task_Status = 5 + Task_TIMED_OUT Task_Status = 6 + Task_READY_FOR_RERUN Task_Status = 7 + Task_SKIPPED Task_Status = 8 +) + +var Task_Status_name = map[int32]string{ + 0: "IN_PROGRESS", + 1: "CANCELED", + 2: "FAILED", + 3: "COMPLETED", + 4: "COMPLETED_WITH_ERRORS", + 5: "SCHEDULED", + 6: "TIMED_OUT", + 7: "READY_FOR_RERUN", + 8: "SKIPPED", +} +var Task_Status_value = map[string]int32{ + "IN_PROGRESS": 0, + "CANCELED": 1, + "FAILED": 2, + "COMPLETED": 3, + "COMPLETED_WITH_ERRORS": 4, + "SCHEDULED": 5, + "TIMED_OUT": 6, + "READY_FOR_RERUN": 7, + "SKIPPED": 8, +} + +func (x Task_Status) String() string { + return proto.EnumName(Task_Status_name, int32(x)) +} +func (Task_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_task_c6cd754250bb9501, []int{0, 0} +} + +type Task struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + Status Task_Status `protobuf:"varint,2,opt,name=status,enum=com.netflix.conductor.proto.Task_Status" json:"status,omitempty"` + InputData map[string]*_struct.Value `protobuf:"bytes,3,rep,name=input_data,json=inputData" json:"input_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + ReferenceTaskName string `protobuf:"bytes,4,opt,name=reference_task_name,json=referenceTaskName" json:"reference_task_name,omitempty"` + RetryCount int32 `protobuf:"varint,5,opt,name=retry_count,json=retryCount" json:"retry_count,omitempty"` + Seq int32 `protobuf:"varint,6,opt,name=seq" json:"seq,omitempty"` + CorrelationId string `protobuf:"bytes,7,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + PollCount int32 `protobuf:"varint,8,opt,name=poll_count,json=pollCount" json:"poll_count,omitempty"` + TaskDefName string `protobuf:"bytes,9,opt,name=task_def_name,json=taskDefName" json:"task_def_name,omitempty"` + ScheduledTime int64 `protobuf:"varint,10,opt,name=scheduled_time,json=scheduledTime" json:"scheduled_time,omitempty"` + StartTime int64 `protobuf:"varint,11,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,12,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + UpdateTime int64 `protobuf:"varint,13,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + StartDelayInSeconds int32 `protobuf:"varint,14,opt,name=start_delay_in_seconds,json=startDelayInSeconds" json:"start_delay_in_seconds,omitempty"` + RetriedTaskId string `protobuf:"bytes,15,opt,name=retried_task_id,json=retriedTaskId" json:"retried_task_id,omitempty"` + Retried bool `protobuf:"varint,16,opt,name=retried" json:"retried,omitempty"` + CallbackFromWorker bool `protobuf:"varint,17,opt,name=callback_from_worker,json=callbackFromWorker" json:"callback_from_worker,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,18,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` + WorkflowInstanceId string `protobuf:"bytes,19,opt,name=workflow_instance_id,json=workflowInstanceId" json:"workflow_instance_id,omitempty"` + WorkflowType string `protobuf:"bytes,20,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` + TaskId string `protobuf:"bytes,21,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,22,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,23,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,24,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,25,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + WorkflowTask *WorkflowTask `protobuf:"bytes,26,opt,name=workflow_task,json=workflowTask" json:"workflow_task,omitempty"` + Domain string `protobuf:"bytes,27,opt,name=domain" json:"domain,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Task) Reset() { *m = Task{} } +func (m *Task) String() string { return proto.CompactTextString(m) } +func (*Task) ProtoMessage() {} +func (*Task) Descriptor() ([]byte, []int) { + return fileDescriptor_task_c6cd754250bb9501, []int{0} +} +func (m *Task) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Task.Unmarshal(m, b) +} +func (m *Task) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Task.Marshal(b, m, deterministic) +} +func (dst *Task) XXX_Merge(src proto.Message) { + xxx_messageInfo_Task.Merge(dst, src) +} +func (m *Task) XXX_Size() int { + return xxx_messageInfo_Task.Size(m) +} +func (m *Task) XXX_DiscardUnknown() { + xxx_messageInfo_Task.DiscardUnknown(m) +} + +var xxx_messageInfo_Task proto.InternalMessageInfo + +func (m *Task) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *Task) GetStatus() Task_Status { + if m != nil { + return m.Status + } + return Task_IN_PROGRESS +} + +func (m *Task) GetInputData() map[string]*_struct.Value { + if m != nil { + return m.InputData + } + return nil +} + +func (m *Task) GetReferenceTaskName() string { + if m != nil { + return m.ReferenceTaskName + } + return "" +} + +func (m *Task) GetRetryCount() int32 { + if m != nil { + return m.RetryCount + } + return 0 +} + +func (m *Task) GetSeq() int32 { + if m != nil { + return m.Seq + } + return 0 +} + +func (m *Task) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *Task) GetPollCount() int32 { + if m != nil { + return m.PollCount + } + return 0 +} + +func (m *Task) GetTaskDefName() string { + if m != nil { + return m.TaskDefName + } + return "" +} + +func (m *Task) GetScheduledTime() int64 { + if m != nil { + return m.ScheduledTime + } + return 0 +} + +func (m *Task) GetStartTime() int64 { + if m != nil { + return m.StartTime + } + return 0 +} + +func (m *Task) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +func (m *Task) GetUpdateTime() int64 { + if m != nil { + return m.UpdateTime + } + return 0 +} + +func (m *Task) GetStartDelayInSeconds() int32 { + if m != nil { + return m.StartDelayInSeconds + } + return 0 +} + +func (m *Task) GetRetriedTaskId() string { + if m != nil { + return m.RetriedTaskId + } + return "" +} + +func (m *Task) GetRetried() bool { + if m != nil { + return m.Retried + } + return false +} + +func (m *Task) GetCallbackFromWorker() bool { + if m != nil { + return m.CallbackFromWorker + } + return false +} + +func (m *Task) GetResponseTimeoutSeconds() int32 { + if m != nil { + return m.ResponseTimeoutSeconds + } + return 0 +} + +func (m *Task) GetWorkflowInstanceId() string { + if m != nil { + return m.WorkflowInstanceId + } + return "" +} + +func (m *Task) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *Task) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *Task) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *Task) GetCallbackAfterSeconds() int64 { + if m != nil { + return m.CallbackAfterSeconds + } + return 0 +} + +func (m *Task) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *Task) GetOutputData() map[string]*_struct.Value { + if m != nil { + return m.OutputData + } + return nil +} + +func (m *Task) GetWorkflowTask() *WorkflowTask { + if m != nil { + return m.WorkflowTask + } + return nil +} + +func (m *Task) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func init() { + proto.RegisterType((*Task)(nil), "com.netflix.conductor.proto.Task") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Task.InputDataEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Task.OutputDataEntry") + proto.RegisterEnum("com.netflix.conductor.proto.Task_Status", Task_Status_name, Task_Status_value) +} + +func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_c6cd754250bb9501) } + +var fileDescriptor_task_c6cd754250bb9501 = []byte{ + // 899 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x6f, 0xdb, 0x36, + 0x14, 0x9d, 0xf2, 0xe1, 0x8f, 0xab, 0xd8, 0x56, 0x99, 0xc4, 0x61, 0x9c, 0x0d, 0x35, 0x32, 0x74, + 0xf0, 0x80, 0x41, 0xce, 0xd2, 0x61, 0x28, 0xfa, 0xb4, 0xd4, 0x52, 0x56, 0x61, 0xad, 0x6d, 0xc8, + 0xce, 0x82, 0xed, 0x45, 0xa0, 0x25, 0xda, 0x15, 0x2c, 0x89, 0x1e, 0x45, 0xad, 0xf3, 0x8f, 0xd9, + 0x3f, 0xdc, 0x8f, 0x18, 0x48, 0x5a, 0xaa, 0xb7, 0x87, 0xf6, 0xa5, 0x6f, 0xe2, 0x39, 0xf7, 0x5e, + 0x9f, 0x7b, 0xc8, 0x7b, 0x0d, 0x56, 0xca, 0x22, 0x9a, 0x0c, 0x05, 0xc9, 0xd7, 0xf6, 0x86, 0x33, + 0xc1, 0xd0, 0x55, 0xc8, 0x52, 0x3b, 0xa3, 0x62, 0x99, 0xc4, 0x7f, 0xd9, 0x21, 0xcb, 0xa2, 0x22, + 0x14, 0x8c, 0x6b, 0xb2, 0x87, 0x75, 0xf8, 0x7b, 0xc6, 0xd7, 0xcb, 0x84, 0xbd, 0xff, 0x90, 0xd6, + 0xfb, 0x72, 0xc5, 0xd8, 0x2a, 0xa1, 0x43, 0x75, 0x5a, 0x14, 0xcb, 0x61, 0x2e, 0x78, 0x11, 0x0a, + 0xcd, 0x5e, 0xff, 0x63, 0xc2, 0xd1, 0x9c, 0xe4, 0x6b, 0x74, 0x05, 0x4d, 0x99, 0x14, 0x88, 0xed, + 0x86, 0x62, 0xa3, 0x6f, 0x0c, 0x9a, 0x7e, 0x43, 0x02, 0xf3, 0xed, 0x86, 0xa2, 0x9f, 0xa0, 0x96, + 0x0b, 0x22, 0x8a, 0x1c, 0x1f, 0xf4, 0x8d, 0x41, 0xfb, 0x76, 0x60, 0x7f, 0x44, 0x8b, 0x2d, 0xeb, + 0xd9, 0x33, 0x15, 0xef, 0xef, 0xf2, 0xd0, 0x04, 0x20, 0xce, 0x36, 0x85, 0x08, 0x22, 0x22, 0x08, + 0x3e, 0xec, 0x1f, 0x0e, 0xcc, 0xdb, 0x9b, 0x4f, 0x57, 0xf1, 0x64, 0x8e, 0x43, 0x04, 0x71, 0x33, + 0xc1, 0xb7, 0x7e, 0x33, 0x2e, 0xcf, 0xc8, 0x86, 0x53, 0x4e, 0x97, 0x94, 0xd3, 0x2c, 0xa4, 0x81, + 0x52, 0x9e, 0x91, 0x94, 0xe2, 0x23, 0xa5, 0xfc, 0x49, 0x45, 0xc9, 0x2a, 0x63, 0x92, 0x52, 0xf4, + 0x14, 0x4c, 0x4e, 0x05, 0xdf, 0x06, 0x21, 0x2b, 0x32, 0x81, 0x8f, 0xfb, 0xc6, 0xe0, 0xd8, 0x07, + 0x05, 0x8d, 0x24, 0x82, 0x2c, 0x38, 0xcc, 0xe9, 0x1f, 0xb8, 0xa6, 0x08, 0xf9, 0x89, 0x9e, 0x41, + 0x3b, 0x64, 0x9c, 0xd3, 0x84, 0x88, 0x98, 0x65, 0x41, 0x1c, 0xe1, 0xba, 0xaa, 0xde, 0xda, 0x43, + 0xbd, 0x08, 0x7d, 0x05, 0xb0, 0x61, 0x49, 0xb2, 0x2b, 0xdc, 0x50, 0xf9, 0x4d, 0x89, 0xe8, 0xba, + 0xd7, 0xd0, 0x52, 0xf2, 0x22, 0xba, 0xd4, 0x12, 0x9b, 0xaa, 0x88, 0x29, 0x41, 0x87, 0x2e, 0x95, + 0xb8, 0x67, 0xd0, 0xce, 0xc3, 0x77, 0x34, 0x2a, 0x12, 0x1a, 0x05, 0x22, 0x4e, 0x29, 0x86, 0xbe, + 0x31, 0x38, 0xf4, 0x5b, 0x15, 0x3a, 0x8f, 0x53, 0x2a, 0x7f, 0x29, 0x17, 0x84, 0x0b, 0x1d, 0x62, + 0xaa, 0x90, 0xa6, 0x42, 0x14, 0x7d, 0x09, 0x0d, 0x9a, 0xed, 0xf2, 0x4f, 0x14, 0x59, 0xa7, 0x99, + 0xce, 0x7c, 0x0a, 0x66, 0xb1, 0x89, 0x88, 0xa0, 0x9a, 0x6d, 0x29, 0x16, 0x34, 0xa4, 0x02, 0x9e, + 0x43, 0x57, 0x97, 0x8e, 0x68, 0x42, 0xb6, 0x41, 0x9c, 0x05, 0x39, 0x95, 0x37, 0x92, 0xe3, 0xb6, + 0x6a, 0xe8, 0x54, 0xb1, 0x8e, 0x24, 0xbd, 0x6c, 0xa6, 0x29, 0xf4, 0x0d, 0x74, 0xa4, 0x81, 0xb1, + 0x14, 0x2d, 0x5b, 0x8c, 0x23, 0xdc, 0xd1, 0x0e, 0xed, 0x60, 0xe9, 0xbe, 0x17, 0x21, 0x0c, 0xf5, + 0x1d, 0x80, 0xad, 0xbe, 0x31, 0x68, 0xf8, 0xe5, 0x11, 0xdd, 0xc0, 0x59, 0x48, 0x92, 0x64, 0x41, + 0xc2, 0x75, 0xb0, 0xe4, 0x2c, 0x0d, 0xe4, 0x03, 0xa6, 0x1c, 0x3f, 0x51, 0x61, 0xa8, 0xe4, 0xee, + 0x39, 0x4b, 0x1f, 0x15, 0x83, 0x5e, 0x00, 0xe6, 0x34, 0xdf, 0xb0, 0x2c, 0xd7, 0xbd, 0xb0, 0x42, + 0x54, 0x52, 0x91, 0x92, 0xda, 0x2d, 0xf9, 0xb9, 0xa6, 0x4b, 0xb5, 0x37, 0x70, 0x56, 0x8e, 0x47, + 0x10, 0x67, 0xb9, 0x20, 0xf2, 0xe5, 0xc4, 0x11, 0x3e, 0x55, 0x92, 0x51, 0xc9, 0x79, 0x3b, 0xca, + 0x8b, 0xd0, 0xd7, 0xd0, 0xaa, 0x32, 0xd4, 0x5c, 0x9c, 0xa9, 0xd0, 0x93, 0x12, 0x54, 0xb3, 0x71, + 0x01, 0xf5, 0xb2, 0xf9, 0x73, 0x45, 0xd7, 0x84, 0xee, 0xfa, 0x47, 0xb8, 0xe0, 0x94, 0xe4, 0x2c, + 0x0b, 0x96, 0x8c, 0x07, 0x71, 0x16, 0xb2, 0x74, 0x93, 0x50, 0xf9, 0x68, 0x70, 0x57, 0x05, 0x9e, + 0x6b, 0xfa, 0x9e, 0x71, 0x6f, 0x8f, 0x44, 0x3f, 0x40, 0xb7, 0xf2, 0x84, 0x2c, 0x05, 0xe5, 0x55, + 0x7f, 0x17, 0xea, 0xda, 0x2a, 0xc7, 0xee, 0x24, 0x59, 0x76, 0x77, 0x05, 0x4d, 0xed, 0x9d, 0x14, + 0x82, 0xf5, 0xfc, 0x6a, 0xc0, 0x8b, 0x90, 0x0f, 0x26, 0x2b, 0x44, 0x35, 0x7e, 0x97, 0x6a, 0xfc, + 0xbe, 0xff, 0xf4, 0xf8, 0x4d, 0x54, 0xd2, 0x87, 0xf9, 0x03, 0x56, 0x01, 0x68, 0xbc, 0x6f, 0x0e, + 0xc9, 0xd7, 0xb8, 0xd7, 0x37, 0x06, 0xe6, 0xed, 0xb7, 0x1f, 0xad, 0xfa, 0x58, 0x3a, 0x47, 0xf2, + 0xf5, 0x9e, 0x8f, 0x72, 0x01, 0x75, 0xa1, 0x16, 0xb1, 0x94, 0xc4, 0x19, 0xbe, 0xd2, 0x36, 0xea, + 0x53, 0x6f, 0x0e, 0xed, 0xff, 0x6e, 0x01, 0x39, 0xa9, 0x6b, 0xba, 0xdd, 0x2d, 0x29, 0xf9, 0x89, + 0xbe, 0x83, 0xe3, 0x3f, 0x49, 0x52, 0x50, 0xb5, 0x9e, 0xcc, 0xdb, 0xae, 0xad, 0x77, 0x9e, 0x5d, + 0xee, 0x3c, 0xfb, 0x57, 0xc9, 0xfa, 0x3a, 0xe8, 0xe5, 0xc1, 0x0b, 0xa3, 0xf7, 0x00, 0x9d, 0xff, + 0x35, 0xf7, 0x39, 0xca, 0x5e, 0xff, 0x6d, 0x40, 0x4d, 0x6f, 0x3e, 0xd4, 0x01, 0xd3, 0x1b, 0x07, + 0x53, 0x7f, 0xf2, 0xb3, 0xef, 0xce, 0x66, 0xd6, 0x17, 0xe8, 0x04, 0x1a, 0xa3, 0xbb, 0xf1, 0xc8, + 0x7d, 0xe3, 0x3a, 0x96, 0x81, 0x00, 0x6a, 0xf7, 0x77, 0x9e, 0xfc, 0x3e, 0x40, 0x2d, 0x68, 0x8e, + 0x26, 0x6f, 0xa7, 0x6f, 0xdc, 0xb9, 0xeb, 0x58, 0x87, 0xe8, 0x12, 0xce, 0xab, 0x63, 0xf0, 0xe8, + 0xcd, 0x5f, 0x07, 0xae, 0xef, 0x4f, 0xfc, 0x99, 0x75, 0x24, 0x23, 0x67, 0xa3, 0xd7, 0xae, 0xf3, + 0x20, 0x13, 0x8f, 0xe5, 0x71, 0xee, 0xbd, 0x75, 0x9d, 0x60, 0xf2, 0x30, 0xb7, 0x6a, 0xe8, 0x14, + 0x3a, 0xbe, 0x7b, 0xe7, 0xfc, 0x16, 0xdc, 0x4f, 0xfc, 0xc0, 0x77, 0xfd, 0x87, 0xb1, 0x55, 0x47, + 0x26, 0xd4, 0x67, 0xbf, 0x78, 0xd3, 0xa9, 0xeb, 0x58, 0x8d, 0x57, 0xce, 0xab, 0x9a, 0x34, 0x7b, + 0xba, 0xf8, 0xfd, 0xe5, 0x2a, 0x16, 0xef, 0x8a, 0x85, 0xbc, 0xad, 0xe1, 0xee, 0xb6, 0x86, 0xd5, + 0x6d, 0x0d, 0xc3, 0x24, 0xa6, 0x99, 0x18, 0xae, 0xd8, 0x8a, 0x6f, 0xc2, 0x3d, 0x5c, 0xfd, 0xcb, + 0x2c, 0x6a, 0xca, 0x80, 0xe7, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xe0, 0x94, 0x89, 0xa4, + 0x06, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/taskdef.pb.go b/client/gogrpc/conductor/model/taskdef.pb.go new file mode 100644 index 0000000000..6ac3ddbd1c --- /dev/null +++ b/client/gogrpc/conductor/model/taskdef.pb.go @@ -0,0 +1,244 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/taskdef.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskDef_RetryLogic int32 + +const ( + TaskDef_FIXED TaskDef_RetryLogic = 0 + TaskDef_EXPONENTIAL_BACKOFF TaskDef_RetryLogic = 1 +) + +var TaskDef_RetryLogic_name = map[int32]string{ + 0: "FIXED", + 1: "EXPONENTIAL_BACKOFF", +} +var TaskDef_RetryLogic_value = map[string]int32{ + "FIXED": 0, + "EXPONENTIAL_BACKOFF": 1, +} + +func (x TaskDef_RetryLogic) String() string { + return proto.EnumName(TaskDef_RetryLogic_name, int32(x)) +} +func (TaskDef_RetryLogic) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_taskdef_eb59d906db8eb198, []int{0, 0} +} + +type TaskDef_TimeoutPolicy int32 + +const ( + TaskDef_RETRY TaskDef_TimeoutPolicy = 0 + TaskDef_TIME_OUT_WF TaskDef_TimeoutPolicy = 1 + TaskDef_ALERT_ONLY TaskDef_TimeoutPolicy = 2 +) + +var TaskDef_TimeoutPolicy_name = map[int32]string{ + 0: "RETRY", + 1: "TIME_OUT_WF", + 2: "ALERT_ONLY", +} +var TaskDef_TimeoutPolicy_value = map[string]int32{ + "RETRY": 0, + "TIME_OUT_WF": 1, + "ALERT_ONLY": 2, +} + +func (x TaskDef_TimeoutPolicy) String() string { + return proto.EnumName(TaskDef_TimeoutPolicy_name, int32(x)) +} +func (TaskDef_TimeoutPolicy) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_taskdef_eb59d906db8eb198, []int{0, 1} +} + +type TaskDef struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + RetryCount int32 `protobuf:"varint,3,opt,name=retry_count,json=retryCount" json:"retry_count,omitempty"` + TimeoutSeconds int64 `protobuf:"varint,4,opt,name=timeout_seconds,json=timeoutSeconds" json:"timeout_seconds,omitempty"` + InputKeys []string `protobuf:"bytes,5,rep,name=input_keys,json=inputKeys" json:"input_keys,omitempty"` + OutputKeys []string `protobuf:"bytes,6,rep,name=output_keys,json=outputKeys" json:"output_keys,omitempty"` + TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,enum=com.netflix.conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` + RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,enum=com.netflix.conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` + RetryDelaySeconds int32 `protobuf:"varint,9,opt,name=retry_delay_seconds,json=retryDelaySeconds" json:"retry_delay_seconds,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,10,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` + ConcurrentExecLimit int32 `protobuf:"varint,11,opt,name=concurrent_exec_limit,json=concurrentExecLimit" json:"concurrent_exec_limit,omitempty"` + InputTemplate map[string]*_struct.Value `protobuf:"bytes,12,rep,name=input_template,json=inputTemplate" json:"input_template,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskDef) Reset() { *m = TaskDef{} } +func (m *TaskDef) String() string { return proto.CompactTextString(m) } +func (*TaskDef) ProtoMessage() {} +func (*TaskDef) Descriptor() ([]byte, []int) { + return fileDescriptor_taskdef_eb59d906db8eb198, []int{0} +} +func (m *TaskDef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskDef.Unmarshal(m, b) +} +func (m *TaskDef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskDef.Marshal(b, m, deterministic) +} +func (dst *TaskDef) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskDef.Merge(dst, src) +} +func (m *TaskDef) XXX_Size() int { + return xxx_messageInfo_TaskDef.Size(m) +} +func (m *TaskDef) XXX_DiscardUnknown() { + xxx_messageInfo_TaskDef.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskDef proto.InternalMessageInfo + +func (m *TaskDef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *TaskDef) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *TaskDef) GetRetryCount() int32 { + if m != nil { + return m.RetryCount + } + return 0 +} + +func (m *TaskDef) GetTimeoutSeconds() int64 { + if m != nil { + return m.TimeoutSeconds + } + return 0 +} + +func (m *TaskDef) GetInputKeys() []string { + if m != nil { + return m.InputKeys + } + return nil +} + +func (m *TaskDef) GetOutputKeys() []string { + if m != nil { + return m.OutputKeys + } + return nil +} + +func (m *TaskDef) GetTimeoutPolicy() TaskDef_TimeoutPolicy { + if m != nil { + return m.TimeoutPolicy + } + return TaskDef_RETRY +} + +func (m *TaskDef) GetRetryLogic() TaskDef_RetryLogic { + if m != nil { + return m.RetryLogic + } + return TaskDef_FIXED +} + +func (m *TaskDef) GetRetryDelaySeconds() int32 { + if m != nil { + return m.RetryDelaySeconds + } + return 0 +} + +func (m *TaskDef) GetResponseTimeoutSeconds() int32 { + if m != nil { + return m.ResponseTimeoutSeconds + } + return 0 +} + +func (m *TaskDef) GetConcurrentExecLimit() int32 { + if m != nil { + return m.ConcurrentExecLimit + } + return 0 +} + +func (m *TaskDef) GetInputTemplate() map[string]*_struct.Value { + if m != nil { + return m.InputTemplate + } + return nil +} + +func init() { + proto.RegisterType((*TaskDef)(nil), "com.netflix.conductor.proto.TaskDef") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.TaskDef.InputTemplateEntry") + proto.RegisterEnum("com.netflix.conductor.proto.TaskDef_RetryLogic", TaskDef_RetryLogic_name, TaskDef_RetryLogic_value) + proto.RegisterEnum("com.netflix.conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) +} + +func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_eb59d906db8eb198) } + +var fileDescriptor_taskdef_eb59d906db8eb198 = []byte{ + // 566 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x4f, 0x6f, 0xd3, 0x40, + 0x10, 0xc5, 0xeb, 0xa6, 0x69, 0xc9, 0x84, 0xa6, 0x66, 0x23, 0x8a, 0x55, 0x40, 0x58, 0xbd, 0xe0, + 0x03, 0xb2, 0x51, 0x38, 0x50, 0x95, 0x53, 0xff, 0x38, 0x28, 0x6a, 0x68, 0x22, 0x63, 0xa0, 0xe5, + 0x80, 0xe5, 0x6c, 0x26, 0x66, 0x15, 0xdb, 0x6b, 0xd9, 0x6b, 0x54, 0x7f, 0x38, 0xbe, 0x1b, 0xda, + 0xb5, 0xd3, 0xa6, 0x20, 0xa1, 0xde, 0x76, 0xdf, 0x9b, 0x37, 0xc9, 0xfc, 0x76, 0x0c, 0xfd, 0x84, + 0xcf, 0x31, 0x76, 0x44, 0x58, 0x2c, 0xe7, 0xb8, 0xb0, 0xb3, 0x9c, 0x0b, 0x4e, 0x9e, 0x53, 0x9e, + 0xd8, 0x29, 0x8a, 0x45, 0xcc, 0x6e, 0x6c, 0xca, 0xd3, 0x79, 0x49, 0x05, 0xcf, 0x6b, 0xf3, 0xe0, + 0x45, 0xc4, 0x79, 0x14, 0xa3, 0xa3, 0x6e, 0xb3, 0x72, 0xe1, 0x14, 0x22, 0x2f, 0xa9, 0xa8, 0xdd, + 0xc3, 0xdf, 0xdb, 0xb0, 0xe3, 0x87, 0xc5, 0xf2, 0x1c, 0x17, 0x84, 0xc0, 0x56, 0x1a, 0x26, 0x68, + 0x68, 0xa6, 0x66, 0x75, 0x3c, 0x75, 0x26, 0x26, 0x74, 0xe7, 0x58, 0xd0, 0x9c, 0x65, 0x82, 0xf1, + 0xd4, 0xd8, 0x54, 0xd6, 0xba, 0x44, 0x5e, 0x41, 0x37, 0x47, 0x91, 0x57, 0x01, 0xe5, 0x65, 0x2a, + 0x8c, 0x96, 0xa9, 0x59, 0x6d, 0x0f, 0x94, 0x74, 0x26, 0x15, 0xf2, 0x1a, 0xf6, 0x04, 0x4b, 0x90, + 0x97, 0x22, 0x28, 0x50, 0xfe, 0xbb, 0xc2, 0xd8, 0x32, 0x35, 0xab, 0xe5, 0xf5, 0x1a, 0xf9, 0x73, + 0xad, 0x92, 0x97, 0x00, 0x2c, 0xcd, 0x4a, 0x11, 0x2c, 0xb1, 0x2a, 0x8c, 0xb6, 0xd9, 0xb2, 0x3a, + 0x5e, 0x47, 0x29, 0x17, 0x58, 0x15, 0xf2, 0x87, 0x78, 0x29, 0x6e, 0xfd, 0x6d, 0xe5, 0x43, 0x2d, + 0xa9, 0x82, 0x6b, 0x58, 0x75, 0x0c, 0x32, 0x1e, 0x33, 0x5a, 0x19, 0x3b, 0xa6, 0x66, 0xf5, 0x06, + 0x03, 0xfb, 0x3f, 0x7c, 0xec, 0x66, 0x7a, 0xdb, 0xaf, 0xa3, 0x53, 0x95, 0xf4, 0x76, 0xc5, 0xfa, + 0x95, 0x4c, 0x57, 0x43, 0xc6, 0x3c, 0x62, 0xd4, 0x78, 0xa4, 0xfa, 0x3a, 0x0f, 0xea, 0xeb, 0xc9, + 0xdc, 0x58, 0xc6, 0x1a, 0x2a, 0xea, 0x4c, 0x6c, 0xe8, 0xd7, 0x1d, 0xe7, 0x18, 0x87, 0xd5, 0x2d, + 0x99, 0x8e, 0xc2, 0xf7, 0x44, 0x59, 0xe7, 0xd2, 0x59, 0xc1, 0x39, 0x02, 0x23, 0xc7, 0x22, 0xe3, + 0x69, 0x81, 0xc1, 0xdf, 0x38, 0x41, 0x85, 0xf6, 0x57, 0xbe, 0x7f, 0x1f, 0xeb, 0x00, 0x9e, 0x52, + 0x9e, 0xd2, 0x32, 0xcf, 0x31, 0x15, 0x01, 0xde, 0x20, 0x0d, 0x62, 0x96, 0x30, 0x61, 0x74, 0x55, + 0xac, 0x7f, 0x67, 0xba, 0x37, 0x48, 0xc7, 0xd2, 0x22, 0x3f, 0xa0, 0x57, 0x3f, 0x85, 0xc0, 0x24, + 0x8b, 0x43, 0x81, 0xc6, 0x63, 0xb3, 0x65, 0x75, 0x07, 0xef, 0x1f, 0x34, 0xf2, 0x48, 0x46, 0xfd, + 0x26, 0xe9, 0xa6, 0x22, 0xaf, 0xbc, 0x5d, 0xb6, 0xae, 0x1d, 0x5c, 0x01, 0xf9, 0xb7, 0x88, 0xe8, + 0xd0, 0x5a, 0x62, 0xd5, 0xec, 0x9f, 0x3c, 0x92, 0x37, 0xd0, 0xfe, 0x15, 0xc6, 0x25, 0xaa, 0xc5, + 0xeb, 0x0e, 0xf6, 0xed, 0x7a, 0x99, 0xed, 0xd5, 0x32, 0xdb, 0x5f, 0xa5, 0xeb, 0xd5, 0x45, 0xc7, + 0x9b, 0x47, 0xda, 0xe1, 0x5b, 0x80, 0x3b, 0xe2, 0xa4, 0x03, 0xed, 0xe1, 0xe8, 0xca, 0x3d, 0xd7, + 0x37, 0xc8, 0x33, 0xe8, 0xbb, 0x57, 0xd3, 0xc9, 0xa5, 0x7b, 0xe9, 0x8f, 0x4e, 0xc6, 0xc1, 0xe9, + 0xc9, 0xd9, 0xc5, 0x64, 0x38, 0xd4, 0xb5, 0xc3, 0x0f, 0xb0, 0x7b, 0xef, 0xed, 0x65, 0xc8, 0x73, + 0x7d, 0xef, 0x5a, 0xdf, 0x20, 0x7b, 0xd0, 0xf5, 0x47, 0x9f, 0xdc, 0x60, 0xf2, 0xc5, 0x0f, 0xbe, + 0x0d, 0x75, 0x8d, 0xf4, 0x00, 0x4e, 0xc6, 0xae, 0xe7, 0x07, 0x93, 0xcb, 0xf1, 0xb5, 0xbe, 0x79, + 0xfa, 0xf1, 0xb4, 0xd3, 0x4c, 0x3d, 0x9d, 0x7d, 0x3f, 0x8e, 0x98, 0xf8, 0x59, 0xce, 0x24, 0x23, + 0xa7, 0x61, 0xe4, 0xdc, 0x32, 0x72, 0x68, 0xcc, 0x30, 0x15, 0x4e, 0xc4, 0xa3, 0x3c, 0xa3, 0x6b, + 0xba, 0xfa, 0xa2, 0x67, 0xdb, 0x6a, 0xa4, 0x77, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xe5, 0xb9, + 0x8e, 0x83, 0xe1, 0x03, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/taskexeclog.pb.go b/client/gogrpc/conductor/model/taskexeclog.pb.go new file mode 100644 index 0000000000..2761ddd081 --- /dev/null +++ b/client/gogrpc/conductor/model/taskexeclog.pb.go @@ -0,0 +1,98 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/taskexeclog.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskExecLog struct { + Log string `protobuf:"bytes,1,opt,name=log" json:"log,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + CreatedTime int64 `protobuf:"varint,3,opt,name=created_time,json=createdTime" json:"created_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskExecLog) Reset() { *m = TaskExecLog{} } +func (m *TaskExecLog) String() string { return proto.CompactTextString(m) } +func (*TaskExecLog) ProtoMessage() {} +func (*TaskExecLog) Descriptor() ([]byte, []int) { + return fileDescriptor_taskexeclog_fcbee8d85ec58fef, []int{0} +} +func (m *TaskExecLog) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskExecLog.Unmarshal(m, b) +} +func (m *TaskExecLog) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskExecLog.Marshal(b, m, deterministic) +} +func (dst *TaskExecLog) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskExecLog.Merge(dst, src) +} +func (m *TaskExecLog) XXX_Size() int { + return xxx_messageInfo_TaskExecLog.Size(m) +} +func (m *TaskExecLog) XXX_DiscardUnknown() { + xxx_messageInfo_TaskExecLog.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskExecLog proto.InternalMessageInfo + +func (m *TaskExecLog) GetLog() string { + if m != nil { + return m.Log + } + return "" +} + +func (m *TaskExecLog) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *TaskExecLog) GetCreatedTime() int64 { + if m != nil { + return m.CreatedTime + } + return 0 +} + +func init() { + proto.RegisterType((*TaskExecLog)(nil), "com.netflix.conductor.proto.TaskExecLog") +} + +func init() { + proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_fcbee8d85ec58fef) +} + +var fileDescriptor_taskexeclog_fcbee8d85ec58fef = []byte{ + // 201 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x8f, 0x31, 0x4b, 0xc5, 0x30, + 0x10, 0x80, 0xa9, 0x85, 0x27, 0xe6, 0x29, 0x48, 0x96, 0x57, 0x70, 0x79, 0x3a, 0x75, 0x4a, 0x06, + 0x37, 0xc7, 0x82, 0x83, 0xe8, 0x20, 0xa5, 0x93, 0x0e, 0xa5, 0xbd, 0x9c, 0x31, 0x34, 0xe9, 0x95, + 0xf4, 0x0a, 0xfd, 0xf9, 0xd2, 0x5a, 0x1e, 0xdd, 0xee, 0xbe, 0x83, 0x8f, 0xef, 0xc4, 0x29, 0x90, + 0x41, 0xaf, 0xb9, 0x19, 0x3b, 0x9c, 0x11, 0x3c, 0x59, 0x35, 0x44, 0x62, 0x92, 0x0f, 0x40, 0x41, + 0xf5, 0xc8, 0x3f, 0xde, 0xcd, 0x0a, 0xa8, 0x37, 0x13, 0x30, 0xc5, 0xff, 0xe3, 0xd3, 0xb7, 0x38, + 0x56, 0xcd, 0xd8, 0xbd, 0xce, 0x08, 0x1f, 0x64, 0xe5, 0xbd, 0x48, 0x3d, 0xd9, 0x2c, 0x39, 0x27, + 0xf9, 0x4d, 0xb9, 0x8c, 0xf2, 0x24, 0xae, 0x17, 0x65, 0xed, 0x4c, 0x76, 0xb5, 0xd2, 0xc3, 0xb2, + 0xbe, 0x19, 0xf9, 0x28, 0x6e, 0x21, 0x62, 0xc3, 0x68, 0x6a, 0x76, 0x01, 0xb3, 0xf4, 0x9c, 0xe4, + 0x69, 0x79, 0xdc, 0x58, 0xe5, 0x02, 0x16, 0xef, 0xc5, 0xdd, 0x4e, 0xfe, 0xd9, 0x7e, 0xbd, 0x58, + 0xc7, 0xbf, 0x53, 0xab, 0x80, 0x82, 0xde, 0x8a, 0xf4, 0xa5, 0x48, 0x83, 0x77, 0xd8, 0xb3, 0xb6, + 0x64, 0xe3, 0x00, 0x3b, 0xbe, 0x3e, 0xd6, 0x1e, 0xd6, 0xe0, 0xe7, 0xbf, 0x00, 0x00, 0x00, 0xff, + 0xff, 0xf0, 0xe3, 0x45, 0x74, 0xe8, 0x00, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/taskresult.pb.go b/client/gogrpc/conductor/model/taskresult.pb.go new file mode 100644 index 0000000000..c71164ce53 --- /dev/null +++ b/client/gogrpc/conductor/model/taskresult.pb.go @@ -0,0 +1,176 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/taskresult.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskResult_Status int32 + +const ( + TaskResult_IN_PROGRESS TaskResult_Status = 0 + TaskResult_FAILED TaskResult_Status = 1 + TaskResult_COMPLETED TaskResult_Status = 2 + TaskResult_SCHEDULED TaskResult_Status = 3 +) + +var TaskResult_Status_name = map[int32]string{ + 0: "IN_PROGRESS", + 1: "FAILED", + 2: "COMPLETED", + 3: "SCHEDULED", +} +var TaskResult_Status_value = map[string]int32{ + "IN_PROGRESS": 0, + "FAILED": 1, + "COMPLETED": 2, + "SCHEDULED": 3, +} + +func (x TaskResult_Status) String() string { + return proto.EnumName(TaskResult_Status_name, int32(x)) +} +func (TaskResult_Status) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_taskresult_d94b981b2ba0d45d, []int{0, 0} +} + +type TaskResult struct { + WorkflowInstanceId string `protobuf:"bytes,1,opt,name=workflow_instance_id,json=workflowInstanceId" json:"workflow_instance_id,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,3,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,4,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,5,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + Status TaskResult_Status `protobuf:"varint,6,opt,name=status,enum=com.netflix.conductor.proto.TaskResult_Status" json:"status,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,7,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskResult) Reset() { *m = TaskResult{} } +func (m *TaskResult) String() string { return proto.CompactTextString(m) } +func (*TaskResult) ProtoMessage() {} +func (*TaskResult) Descriptor() ([]byte, []int) { + return fileDescriptor_taskresult_d94b981b2ba0d45d, []int{0} +} +func (m *TaskResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskResult.Unmarshal(m, b) +} +func (m *TaskResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskResult.Marshal(b, m, deterministic) +} +func (dst *TaskResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskResult.Merge(dst, src) +} +func (m *TaskResult) XXX_Size() int { + return xxx_messageInfo_TaskResult.Size(m) +} +func (m *TaskResult) XXX_DiscardUnknown() { + xxx_messageInfo_TaskResult.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskResult proto.InternalMessageInfo + +func (m *TaskResult) GetWorkflowInstanceId() string { + if m != nil { + return m.WorkflowInstanceId + } + return "" +} + +func (m *TaskResult) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *TaskResult) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *TaskResult) GetCallbackAfterSeconds() int64 { + if m != nil { + return m.CallbackAfterSeconds + } + return 0 +} + +func (m *TaskResult) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *TaskResult) GetStatus() TaskResult_Status { + if m != nil { + return m.Status + } + return TaskResult_IN_PROGRESS +} + +func (m *TaskResult) GetOutputData() map[string]*_struct.Value { + if m != nil { + return m.OutputData + } + return nil +} + +func init() { + proto.RegisterType((*TaskResult)(nil), "com.netflix.conductor.proto.TaskResult") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.TaskResult.OutputDataEntry") + proto.RegisterEnum("com.netflix.conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) +} + +func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_d94b981b2ba0d45d) } + +var fileDescriptor_taskresult_d94b981b2ba0d45d = []byte{ + // 455 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6f, 0xd3, 0x30, + 0x14, 0xc6, 0xc9, 0xba, 0x65, 0xf4, 0x15, 0x58, 0x65, 0x8d, 0x2e, 0xda, 0x38, 0x54, 0x3b, 0xf5, + 0x80, 0x1c, 0x54, 0x10, 0xa0, 0xdd, 0xb6, 0x36, 0x85, 0xa0, 0x41, 0xab, 0x64, 0x43, 0x88, 0x4b, + 0xe4, 0x38, 0x4e, 0x88, 0xe2, 0xc6, 0x95, 0xed, 0x30, 0xf6, 0xdf, 0xf0, 0xa7, 0x22, 0x3b, 0xe9, + 0x36, 0x71, 0x40, 0xdc, 0xf2, 0xde, 0xef, 0xbd, 0xa7, 0xef, 0xfb, 0x1c, 0x18, 0xad, 0x45, 0xc6, + 0xb8, 0xaf, 0x89, 0xaa, 0x24, 0x53, 0x0d, 0xd7, 0x78, 0x23, 0x85, 0x16, 0xe8, 0x84, 0x8a, 0x35, + 0xae, 0x99, 0xce, 0x79, 0xf9, 0x0b, 0x53, 0x51, 0x67, 0x0d, 0xd5, 0x42, 0xb6, 0xf0, 0xf8, 0x45, + 0x21, 0x44, 0xc1, 0x99, 0x6f, 0xab, 0xb4, 0xc9, 0x7d, 0xa5, 0x65, 0x43, 0xbb, 0xd5, 0xd3, 0xdf, + 0xbb, 0x00, 0x57, 0x44, 0x55, 0x91, 0xbd, 0x87, 0x5e, 0xc1, 0xe1, 0x8d, 0x90, 0x55, 0xce, 0xc5, + 0x4d, 0x52, 0xd6, 0x4a, 0x93, 0x9a, 0xb2, 0xa4, 0xcc, 0x3c, 0x67, 0xec, 0x4c, 0xfa, 0x11, 0xda, + 0xb2, 0xb0, 0x43, 0x61, 0x86, 0x8e, 0x60, 0xdf, 0xe8, 0x31, 0x43, 0x3b, 0x76, 0xc8, 0x35, 0x65, + 0x98, 0xa1, 0xb7, 0x70, 0x24, 0x19, 0x51, 0xa2, 0x4e, 0x72, 0x21, 0x93, 0xb2, 0xa6, 0x62, 0xbd, + 0xe1, 0x4c, 0x97, 0xa2, 0xf6, 0x7a, 0x76, 0xf0, 0x79, 0x8b, 0x17, 0x42, 0x86, 0x0f, 0x20, 0x7a, + 0x03, 0x23, 0x4a, 0x38, 0x4f, 0x09, 0xad, 0x12, 0x92, 0x6b, 0x26, 0x13, 0xc5, 0x8c, 0x27, 0xe5, + 0xed, 0x8e, 0x9d, 0x49, 0x2f, 0x3a, 0xdc, 0xd2, 0x73, 0x03, 0xe3, 0x96, 0xa1, 0x13, 0xe8, 0x1b, + 0x71, 0x4c, 0x1a, 0x21, 0x7b, 0xf6, 0xfe, 0xe3, 0xb6, 0x11, 0x66, 0x68, 0x01, 0xae, 0xd2, 0x44, + 0x37, 0xca, 0x73, 0xc7, 0xce, 0xe4, 0xd9, 0x14, 0xe3, 0x7f, 0x04, 0x86, 0xef, 0xe3, 0xc0, 0xb1, + 0xdd, 0x8a, 0xba, 0x6d, 0xf4, 0x0d, 0x06, 0xa2, 0xd1, 0x9b, 0x46, 0x27, 0x19, 0xd1, 0xc4, 0xdb, + 0x1f, 0xf7, 0x26, 0x83, 0xe9, 0xbb, 0xff, 0x3d, 0xb6, 0xb4, 0xab, 0x73, 0xa2, 0x49, 0x50, 0x6b, + 0x79, 0x1b, 0x81, 0xb8, 0x6b, 0x1c, 0x5f, 0xc3, 0xc1, 0x5f, 0x18, 0x0d, 0xa1, 0x57, 0xb1, 0xdb, + 0x2e, 0x79, 0xf3, 0x89, 0x5e, 0xc2, 0xde, 0x4f, 0xc2, 0x1b, 0x66, 0x83, 0x1e, 0x4c, 0x47, 0xb8, + 0x7d, 0x59, 0xbc, 0x7d, 0x59, 0xfc, 0xd5, 0xd0, 0xa8, 0x1d, 0x3a, 0xdb, 0x79, 0xef, 0x9c, 0xce, + 0xc0, 0x6d, 0x2d, 0xa0, 0x03, 0x18, 0x84, 0x5f, 0x92, 0x55, 0xb4, 0xfc, 0x10, 0x05, 0x71, 0x3c, + 0x7c, 0x84, 0x00, 0xdc, 0xc5, 0x79, 0x78, 0x19, 0xcc, 0x87, 0x0e, 0x7a, 0x0a, 0xfd, 0xd9, 0xf2, + 0xf3, 0xea, 0x32, 0xb8, 0x0a, 0xe6, 0xc3, 0x1d, 0x53, 0xc6, 0xb3, 0x8f, 0xc1, 0xfc, 0xda, 0xd0, + 0xde, 0xc5, 0xa7, 0x8b, 0x27, 0xf7, 0x2e, 0x56, 0xe9, 0xf7, 0xb3, 0xa2, 0xd4, 0x3f, 0x9a, 0xd4, + 0xd8, 0xf6, 0x3b, 0xdb, 0xfe, 0x9d, 0x6d, 0x9f, 0xf2, 0x92, 0xd5, 0xda, 0x2f, 0x44, 0x21, 0x37, + 0xf4, 0x41, 0xdf, 0xfe, 0xba, 0xa9, 0x6b, 0xb5, 0xbe, 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0xbe, + 0xcf, 0xb8, 0x26, 0xca, 0x02, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/tasksummary.pb.go b/client/gogrpc/conductor/model/tasksummary.pb.go new file mode 100644 index 0000000000..5bad0055bd --- /dev/null +++ b/client/gogrpc/conductor/model/tasksummary.pb.go @@ -0,0 +1,217 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/tasksummary.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type TaskSummary struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowType string `protobuf:"bytes,2,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + ScheduledTime string `protobuf:"bytes,4,opt,name=scheduled_time,json=scheduledTime" json:"scheduled_time,omitempty"` + StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + Status Task_Status `protobuf:"varint,8,opt,name=status,enum=com.netflix.conductor.proto.Task_Status" json:"status,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,9,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` + ExecutionTime int64 `protobuf:"varint,10,opt,name=execution_time,json=executionTime" json:"execution_time,omitempty"` + QueueWaitTime int64 `protobuf:"varint,11,opt,name=queue_wait_time,json=queueWaitTime" json:"queue_wait_time,omitempty"` + TaskDefName string `protobuf:"bytes,12,opt,name=task_def_name,json=taskDefName" json:"task_def_name,omitempty"` + TaskType string `protobuf:"bytes,13,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + Input string `protobuf:"bytes,14,opt,name=input" json:"input,omitempty"` + Output string `protobuf:"bytes,15,opt,name=output" json:"output,omitempty"` + TaskId string `protobuf:"bytes,16,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TaskSummary) Reset() { *m = TaskSummary{} } +func (m *TaskSummary) String() string { return proto.CompactTextString(m) } +func (*TaskSummary) ProtoMessage() {} +func (*TaskSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_tasksummary_ab79a64782e5b735, []int{0} +} +func (m *TaskSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TaskSummary.Unmarshal(m, b) +} +func (m *TaskSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TaskSummary.Marshal(b, m, deterministic) +} +func (dst *TaskSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_TaskSummary.Merge(dst, src) +} +func (m *TaskSummary) XXX_Size() int { + return xxx_messageInfo_TaskSummary.Size(m) +} +func (m *TaskSummary) XXX_DiscardUnknown() { + xxx_messageInfo_TaskSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_TaskSummary proto.InternalMessageInfo + +func (m *TaskSummary) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *TaskSummary) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *TaskSummary) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *TaskSummary) GetScheduledTime() string { + if m != nil { + return m.ScheduledTime + } + return "" +} + +func (m *TaskSummary) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *TaskSummary) GetUpdateTime() string { + if m != nil { + return m.UpdateTime + } + return "" +} + +func (m *TaskSummary) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +func (m *TaskSummary) GetStatus() Task_Status { + if m != nil { + return m.Status + } + return Task_IN_PROGRESS +} + +func (m *TaskSummary) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *TaskSummary) GetExecutionTime() int64 { + if m != nil { + return m.ExecutionTime + } + return 0 +} + +func (m *TaskSummary) GetQueueWaitTime() int64 { + if m != nil { + return m.QueueWaitTime + } + return 0 +} + +func (m *TaskSummary) GetTaskDefName() string { + if m != nil { + return m.TaskDefName + } + return "" +} + +func (m *TaskSummary) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *TaskSummary) GetInput() string { + if m != nil { + return m.Input + } + return "" +} + +func (m *TaskSummary) GetOutput() string { + if m != nil { + return m.Output + } + return "" +} + +func (m *TaskSummary) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func init() { + proto.RegisterType((*TaskSummary)(nil), "com.netflix.conductor.proto.TaskSummary") +} + +func init() { + proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ab79a64782e5b735) +} + +var fileDescriptor_tasksummary_ab79a64782e5b735 = []byte{ + // 446 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x4f, 0x6b, 0x1b, 0x31, + 0x10, 0xc5, 0x71, 0x93, 0xf8, 0xcf, 0x38, 0xeb, 0x84, 0xa5, 0xad, 0xb7, 0x0d, 0x25, 0x26, 0xa5, + 0xc5, 0xa7, 0x5d, 0x68, 0xa1, 0x87, 0x9e, 0x4a, 0x28, 0x05, 0x53, 0x28, 0xc5, 0x31, 0x14, 0x7a, + 0x59, 0x64, 0x69, 0xd6, 0x11, 0x5e, 0x49, 0x5b, 0xad, 0x84, 0xe3, 0x0f, 0xd9, 0xef, 0x54, 0x34, + 0xda, 0xb8, 0x3e, 0xe5, 0xa8, 0xdf, 0x7b, 0x9a, 0x19, 0xbd, 0x11, 0x4c, 0x95, 0x11, 0x58, 0x17, + 0x8e, 0xb5, 0xdb, 0xd6, 0x2b, 0xc5, 0xec, 0x3e, 0x6f, 0xac, 0x71, 0x26, 0xbd, 0xe2, 0x46, 0xe5, + 0x1a, 0x5d, 0x55, 0xcb, 0x87, 0x9c, 0x1b, 0x2d, 0x3c, 0x77, 0xc6, 0x46, 0xf1, 0xf5, 0xe5, 0xff, + 0x5b, 0x91, 0xdc, 0xfc, 0x3d, 0x85, 0xf1, 0x8a, 0xb5, 0xdb, 0xbb, 0x58, 0x24, 0xbd, 0x86, 0xf1, + 0xce, 0xd8, 0x6d, 0x55, 0x9b, 0x5d, 0x29, 0x45, 0xd6, 0x9b, 0xf5, 0xe6, 0xa3, 0x25, 0x3c, 0xa2, + 0x85, 0x48, 0xdf, 0x42, 0x72, 0x30, 0xb8, 0x7d, 0x83, 0xd9, 0x33, 0xb2, 0x9c, 0x3f, 0xc2, 0xd5, + 0xbe, 0xc1, 0xf4, 0x1d, 0x4c, 0xb8, 0xb1, 0x16, 0x6b, 0xe6, 0xa4, 0xd1, 0xa1, 0xd0, 0x09, 0xb9, + 0x92, 0x23, 0xba, 0x10, 0xc1, 0xd6, 0xf2, 0x7b, 0x14, 0xbe, 0x46, 0x51, 0x3a, 0xa9, 0x30, 0x3b, + 0x8d, 0xb6, 0x03, 0x5d, 0x49, 0x85, 0xe9, 0x1b, 0x80, 0xd6, 0x31, 0xeb, 0xa2, 0xe5, 0x8c, 0x2c, + 0x23, 0x22, 0x24, 0x5f, 0xc3, 0xd8, 0x37, 0x82, 0x39, 0x8c, 0x7a, 0x3f, 0x8e, 0x1c, 0x11, 0x19, + 0x5e, 0xc1, 0x10, 0x75, 0xd7, 0x60, 0x40, 0xea, 0x00, 0x75, 0x2c, 0xfd, 0x05, 0xfa, 0xad, 0x63, + 0xce, 0xb7, 0xd9, 0x70, 0xd6, 0x9b, 0x4f, 0x3e, 0xcc, 0xf3, 0x27, 0xe2, 0xcb, 0x43, 0x50, 0xf9, + 0x1d, 0xf9, 0x97, 0xdd, 0xbd, 0xf4, 0x13, 0x4c, 0x2d, 0xb2, 0xd6, 0xe8, 0xb2, 0x32, 0xb6, 0x94, + 0x9a, 0x1b, 0xd5, 0xd4, 0x18, 0x1e, 0x98, 0x8d, 0xa8, 0xd7, 0x8b, 0x28, 0x7f, 0x33, 0x76, 0x71, + 0x24, 0x86, 0xb7, 0xe3, 0x03, 0x72, 0x4f, 0x01, 0xd1, 0x68, 0x30, 0xeb, 0xcd, 0x4f, 0x96, 0xc9, + 0x81, 0xd2, 0x80, 0xef, 0xe1, 0xe2, 0x8f, 0x47, 0x8f, 0xe5, 0x8e, 0xc9, 0x2e, 0x80, 0x71, 0xf4, + 0x11, 0xfe, 0xc5, 0x64, 0x0c, 0xe1, 0x06, 0x92, 0xb0, 0xd5, 0x52, 0x60, 0x55, 0x6a, 0xa6, 0x30, + 0x3b, 0xa7, 0xe6, 0xe3, 0x00, 0xbf, 0x62, 0xf5, 0x83, 0x29, 0x4c, 0xaf, 0x60, 0x44, 0x1e, 0x5a, + 0x5b, 0x42, 0xfa, 0x30, 0x00, 0x5a, 0xd9, 0x73, 0x38, 0x93, 0xba, 0xf1, 0x2e, 0x9b, 0x90, 0x10, + 0x0f, 0xe9, 0x4b, 0xe8, 0x1b, 0xef, 0x02, 0xbe, 0x20, 0xdc, 0x9d, 0xd2, 0x29, 0x0c, 0xa8, 0x94, + 0x14, 0xd9, 0x65, 0x14, 0xc2, 0x71, 0x21, 0x6e, 0xbf, 0xdf, 0x26, 0x47, 0xdf, 0xe9, 0xe7, 0xfa, + 0xf7, 0xe7, 0x8d, 0x74, 0xf7, 0x7e, 0x1d, 0x72, 0x2d, 0xba, 0x5c, 0x8b, 0x43, 0xae, 0x05, 0xaf, + 0x25, 0x6a, 0x57, 0x6c, 0xcc, 0xc6, 0x36, 0xfc, 0x88, 0xd3, 0x3f, 0x5d, 0xf7, 0x29, 0xf6, 0x8f, + 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x7f, 0x0a, 0x96, 0xed, 0x02, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/workflow.pb.go b/client/gogrpc/conductor/model/workflow.pb.go new file mode 100644 index 0000000000..764fc0b108 --- /dev/null +++ b/client/gogrpc/conductor/model/workflow.pb.go @@ -0,0 +1,278 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflow.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Workflow_WorkflowStatus int32 + +const ( + Workflow_RUNNING Workflow_WorkflowStatus = 0 + Workflow_COMPLETED Workflow_WorkflowStatus = 1 + Workflow_FAILED Workflow_WorkflowStatus = 2 + Workflow_TIMED_OUT Workflow_WorkflowStatus = 3 + Workflow_TERMINATED Workflow_WorkflowStatus = 4 + Workflow_PAUSED Workflow_WorkflowStatus = 5 +) + +var Workflow_WorkflowStatus_name = map[int32]string{ + 0: "RUNNING", + 1: "COMPLETED", + 2: "FAILED", + 3: "TIMED_OUT", + 4: "TERMINATED", + 5: "PAUSED", +} +var Workflow_WorkflowStatus_value = map[string]int32{ + "RUNNING": 0, + "COMPLETED": 1, + "FAILED": 2, + "TIMED_OUT": 3, + "TERMINATED": 4, + "PAUSED": 5, +} + +func (x Workflow_WorkflowStatus) String() string { + return proto.EnumName(Workflow_WorkflowStatus_name, int32(x)) +} +func (Workflow_WorkflowStatus) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflow_544173e0c7375d2a, []int{0, 0} +} + +type Workflow struct { + Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,enum=com.netflix.conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + EndTime int64 `protobuf:"varint,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + ParentWorkflowId string `protobuf:"bytes,4,opt,name=parent_workflow_id,json=parentWorkflowId" json:"parent_workflow_id,omitempty"` + ParentWorkflowTaskId string `protobuf:"bytes,5,opt,name=parent_workflow_task_id,json=parentWorkflowTaskId" json:"parent_workflow_task_id,omitempty"` + Tasks []*Task `protobuf:"bytes,6,rep,name=tasks" json:"tasks,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,8,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Output map[string]*_struct.Value `protobuf:"bytes,9,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + WorkflowType string `protobuf:"bytes,10,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` + Version int32 `protobuf:"varint,11,opt,name=version" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,12,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + ReRunFromWorkflowId string `protobuf:"bytes,13,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId" json:"re_run_from_workflow_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,14,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` + SchemaVersion int32 `protobuf:"varint,15,opt,name=schema_version,json=schemaVersion" json:"schema_version,omitempty"` + Event string `protobuf:"bytes,16,opt,name=event" json:"event,omitempty"` + TaskToDomain map[string]string `protobuf:"bytes,17,rep,name=task_to_domain,json=taskToDomain" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + FailedReferenceTaskNames []string `protobuf:"bytes,18,rep,name=failed_reference_task_names,json=failedReferenceTaskNames" json:"failed_reference_task_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Workflow) Reset() { *m = Workflow{} } +func (m *Workflow) String() string { return proto.CompactTextString(m) } +func (*Workflow) ProtoMessage() {} +func (*Workflow) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_544173e0c7375d2a, []int{0} +} +func (m *Workflow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Workflow.Unmarshal(m, b) +} +func (m *Workflow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Workflow.Marshal(b, m, deterministic) +} +func (dst *Workflow) XXX_Merge(src proto.Message) { + xxx_messageInfo_Workflow.Merge(dst, src) +} +func (m *Workflow) XXX_Size() int { + return xxx_messageInfo_Workflow.Size(m) +} +func (m *Workflow) XXX_DiscardUnknown() { + xxx_messageInfo_Workflow.DiscardUnknown(m) +} + +var xxx_messageInfo_Workflow proto.InternalMessageInfo + +func (m *Workflow) GetStatus() Workflow_WorkflowStatus { + if m != nil { + return m.Status + } + return Workflow_RUNNING +} + +func (m *Workflow) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +func (m *Workflow) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *Workflow) GetParentWorkflowId() string { + if m != nil { + return m.ParentWorkflowId + } + return "" +} + +func (m *Workflow) GetParentWorkflowTaskId() string { + if m != nil { + return m.ParentWorkflowTaskId + } + return "" +} + +func (m *Workflow) GetTasks() []*Task { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *Workflow) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *Workflow) GetOutput() map[string]*_struct.Value { + if m != nil { + return m.Output + } + return nil +} + +func (m *Workflow) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *Workflow) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *Workflow) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *Workflow) GetReRunFromWorkflowId() string { + if m != nil { + return m.ReRunFromWorkflowId + } + return "" +} + +func (m *Workflow) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *Workflow) GetSchemaVersion() int32 { + if m != nil { + return m.SchemaVersion + } + return 0 +} + +func (m *Workflow) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *Workflow) GetTaskToDomain() map[string]string { + if m != nil { + return m.TaskToDomain + } + return nil +} + +func (m *Workflow) GetFailedReferenceTaskNames() []string { + if m != nil { + return m.FailedReferenceTaskNames + } + return nil +} + +func init() { + proto.RegisterType((*Workflow)(nil), "com.netflix.conductor.proto.Workflow") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Workflow.InputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Workflow.OutputEntry") + proto.RegisterMapType((map[string]string)(nil), "com.netflix.conductor.proto.Workflow.TaskToDomainEntry") + proto.RegisterEnum("com.netflix.conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) +} + +func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_544173e0c7375d2a) } + +var fileDescriptor_workflow_544173e0c7375d2a = []byte{ + // 688 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, + 0x18, 0x77, 0x8c, 0x0d, 0xf6, 0x8c, 0xcd, 0x72, 0x4e, 0xa9, 0x60, 0xe2, 0xc4, 0x98, 0xec, 0x05, + 0xe9, 0x14, 0x51, 0x0c, 0x89, 0x31, 0xe0, 0x36, 0x6d, 0x02, 0x63, 0x96, 0x02, 0x89, 0x89, 0x69, + 0xba, 0xf6, 0x3a, 0x9a, 0xb5, 0x77, 0xcd, 0xf5, 0x0a, 0xee, 0xd3, 0xf9, 0xd5, 0xcc, 0xdd, 0xad, + 0x50, 0xd4, 0x10, 0x5e, 0xf8, 0xae, 0xf7, 0xfc, 0xfe, 0xf4, 0xee, 0xf7, 0xdc, 0x73, 0xd0, 0x8a, + 0xa9, 0x8f, 0xa3, 0xee, 0x15, 0x65, 0xd3, 0x20, 0xa2, 0x57, 0x46, 0xc2, 0x28, 0xa7, 0x68, 0xc3, + 0xa3, 0xb1, 0x41, 0x30, 0x0f, 0xa2, 0xf0, 0xa7, 0xe1, 0x51, 0xe2, 0x67, 0x1e, 0xa7, 0x4c, 0x81, + 0xeb, 0x9a, 0x92, 0x70, 0x37, 0x9d, 0xce, 0x2b, 0xcf, 0x26, 0x94, 0x4e, 0x22, 0xdc, 0x95, 0xab, + 0x71, 0x16, 0x74, 0x53, 0xce, 0x32, 0x8f, 0x2b, 0x74, 0xf3, 0x57, 0x0d, 0x96, 0xcf, 0xe7, 0xfe, + 0xe8, 0x10, 0xaa, 0x29, 0x77, 0x79, 0x96, 0xea, 0xa5, 0x76, 0xa9, 0xd3, 0xdc, 0xde, 0x31, 0xee, + 0xf8, 0x95, 0x91, 0xcb, 0xae, 0x3f, 0x4e, 0xa4, 0xd6, 0x9a, 0x7b, 0xa0, 0xa7, 0xb0, 0x8c, 0x89, + 0xef, 0xf0, 0x30, 0xc6, 0xfa, 0x42, 0xbb, 0xd4, 0x29, 0x5b, 0x4b, 0x98, 0xf8, 0x76, 0x18, 0x63, + 0xf4, 0x1c, 0xea, 0xf9, 0xa1, 0x9c, 0xd0, 0xd7, 0xcb, 0xed, 0x52, 0xa7, 0x66, 0x41, 0x5e, 0x32, + 0x7d, 0xb4, 0x05, 0x28, 0x71, 0x19, 0x26, 0xdc, 0x29, 0xf2, 0x16, 0x25, 0x4f, 0x53, 0xc8, 0xf9, + 0x0d, 0xfb, 0x1d, 0xac, 0xfd, 0xc9, 0x16, 0x01, 0x08, 0x49, 0x45, 0x4a, 0x5a, 0xb7, 0x25, 0xb6, + 0x9b, 0x4e, 0x4d, 0x1f, 0xed, 0x42, 0x45, 0xd0, 0x52, 0xbd, 0xda, 0x2e, 0x77, 0xea, 0xdb, 0x2f, + 0xee, 0x3c, 0xad, 0xd0, 0x58, 0x8a, 0x8f, 0x06, 0x50, 0x09, 0x49, 0x92, 0x71, 0x7d, 0x59, 0x0a, + 0x5f, 0xdf, 0x2f, 0x26, 0x53, 0x48, 0xfa, 0x84, 0xb3, 0x99, 0xa5, 0xe4, 0xc8, 0x84, 0x2a, 0xcd, + 0xb8, 0x30, 0xaa, 0x49, 0xa3, 0x37, 0xf7, 0x33, 0x3a, 0x96, 0x1a, 0xe5, 0x34, 0x37, 0x40, 0x2f, + 0xa1, 0x71, 0x73, 0xf6, 0x59, 0x82, 0x75, 0x90, 0x07, 0x5f, 0xc9, 0x8b, 0xf6, 0x2c, 0xc1, 0x48, + 0x87, 0xa5, 0x4b, 0xcc, 0xd2, 0x90, 0x12, 0xbd, 0xde, 0x2e, 0x75, 0x2a, 0x56, 0xbe, 0x44, 0xaf, + 0xa0, 0xe9, 0x51, 0xc6, 0x70, 0xe4, 0xf2, 0x90, 0x12, 0x11, 0xdc, 0x8a, 0xd4, 0x37, 0x0a, 0x55, + 0xd3, 0x47, 0x3b, 0xb0, 0xc6, 0xb0, 0xc3, 0x32, 0xe2, 0x04, 0x8c, 0xc6, 0xb7, 0x7a, 0xd3, 0x90, + 0xfc, 0x47, 0x0c, 0x5b, 0x19, 0x19, 0x30, 0x1a, 0x17, 0xda, 0xf3, 0x5e, 0xa8, 0xdc, 0x94, 0x12, + 0x27, 0xa0, 0xcc, 0x09, 0x89, 0x47, 0xe3, 0x24, 0xc2, 0xc2, 0x52, 0x6f, 0x4a, 0xd5, 0x63, 0x05, + 0x0f, 0x28, 0x33, 0x0b, 0xa0, 0xd8, 0x54, 0xea, 0x5d, 0xe0, 0xd8, 0x75, 0xf2, 0x5d, 0x3f, 0x94, + 0xbb, 0x6e, 0xa8, 0xea, 0xd9, 0x7c, 0xef, 0x2d, 0xa8, 0xe0, 0x4b, 0x4c, 0xb8, 0xae, 0x49, 0x33, + 0xb5, 0x40, 0x3f, 0xa0, 0x29, 0xef, 0x00, 0xa7, 0x8e, 0x4f, 0x63, 0x37, 0x24, 0xfa, 0xaa, 0xcc, + 0x78, 0xf7, 0x7e, 0x19, 0x8b, 0x76, 0xdb, 0xb4, 0x27, 0x95, 0x2a, 0xe9, 0x15, 0x5e, 0x28, 0xa1, + 0x8f, 0xb0, 0x11, 0xb8, 0x61, 0x84, 0x7d, 0x87, 0xe1, 0x00, 0x33, 0x4c, 0x3c, 0xac, 0xee, 0x1c, + 0x71, 0x63, 0x9c, 0xea, 0xa8, 0x5d, 0xee, 0xd4, 0x2c, 0x5d, 0x51, 0xac, 0x9c, 0x21, 0x4c, 0x87, + 0x02, 0x5f, 0x1f, 0x01, 0xdc, 0x5c, 0x07, 0xa4, 0x41, 0x79, 0x8a, 0x67, 0x72, 0xe8, 0x6a, 0x96, + 0xf8, 0x44, 0x5b, 0x50, 0xb9, 0x74, 0xa3, 0x4c, 0x0d, 0x4e, 0x7d, 0xfb, 0x89, 0xa1, 0x86, 0xd8, + 0xc8, 0x87, 0xd8, 0x38, 0x13, 0xa8, 0xa5, 0x48, 0x7b, 0x0b, 0x1f, 0x4a, 0xeb, 0xdf, 0xa0, 0x5e, + 0xb8, 0x17, 0xff, 0xc5, 0xf2, 0x13, 0xac, 0xfe, 0x15, 0xc3, 0x3f, 0x8c, 0x5b, 0x45, 0xe3, 0x5a, + 0xc1, 0x60, 0xd3, 0x83, 0xe6, 0xed, 0xb7, 0x01, 0xd5, 0x61, 0xc9, 0x3a, 0x1d, 0x0e, 0xcd, 0xe1, + 0x17, 0xed, 0x01, 0x6a, 0x40, 0xed, 0xf3, 0xf1, 0xd1, 0xe8, 0xb0, 0x6f, 0xf7, 0x7b, 0x5a, 0x09, + 0x01, 0x54, 0x07, 0xfb, 0xe6, 0x61, 0xbf, 0xa7, 0x2d, 0x08, 0xc8, 0x36, 0x8f, 0xfa, 0x3d, 0xe7, + 0xf8, 0xd4, 0xd6, 0xca, 0xa8, 0x09, 0x60, 0xf7, 0xad, 0x23, 0x73, 0xb8, 0x2f, 0xa8, 0x8b, 0x82, + 0x3a, 0xda, 0x3f, 0x3d, 0xe9, 0xf7, 0xb4, 0xca, 0xc1, 0xd7, 0x03, 0xc8, 0x7f, 0x32, 0x1a, 0x7f, + 0xdf, 0x9b, 0x84, 0xfc, 0x22, 0x1b, 0x8b, 0x26, 0x77, 0xe7, 0x4d, 0xee, 0x5e, 0x37, 0xb9, 0xeb, + 0x45, 0x21, 0x26, 0xbc, 0x3b, 0xa1, 0x13, 0x96, 0x78, 0x85, 0xba, 0x7c, 0x34, 0xc7, 0x55, 0x19, + 0xc5, 0xdb, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xcf, 0x79, 0x5d, 0x7e, 0x77, 0x05, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/workflowdef.pb.go b/client/gogrpc/conductor/model/workflowdef.pb.go new file mode 100644 index 0000000000..5256c694fd --- /dev/null +++ b/client/gogrpc/conductor/model/workflowdef.pb.go @@ -0,0 +1,152 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflowdef.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WorkflowDef struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` + Version int32 `protobuf:"varint,3,opt,name=version" json:"version,omitempty"` + Tasks []*WorkflowTask `protobuf:"bytes,4,rep,name=tasks" json:"tasks,omitempty"` + InputParameters []string `protobuf:"bytes,5,rep,name=input_parameters,json=inputParameters" json:"input_parameters,omitempty"` + OutputParameters map[string]*_struct.Value `protobuf:"bytes,6,rep,name=output_parameters,json=outputParameters" json:"output_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + FailureWorkflow string `protobuf:"bytes,7,opt,name=failure_workflow,json=failureWorkflow" json:"failure_workflow,omitempty"` + SchemaVersion int32 `protobuf:"varint,8,opt,name=schema_version,json=schemaVersion" json:"schema_version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowDef) Reset() { *m = WorkflowDef{} } +func (m *WorkflowDef) String() string { return proto.CompactTextString(m) } +func (*WorkflowDef) ProtoMessage() {} +func (*WorkflowDef) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowdef_bf5b9fbf7e32cdd5, []int{0} +} +func (m *WorkflowDef) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowDef.Unmarshal(m, b) +} +func (m *WorkflowDef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowDef.Marshal(b, m, deterministic) +} +func (dst *WorkflowDef) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowDef.Merge(dst, src) +} +func (m *WorkflowDef) XXX_Size() int { + return xxx_messageInfo_WorkflowDef.Size(m) +} +func (m *WorkflowDef) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowDef.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowDef proto.InternalMessageInfo + +func (m *WorkflowDef) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WorkflowDef) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *WorkflowDef) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowDef) GetTasks() []*WorkflowTask { + if m != nil { + return m.Tasks + } + return nil +} + +func (m *WorkflowDef) GetInputParameters() []string { + if m != nil { + return m.InputParameters + } + return nil +} + +func (m *WorkflowDef) GetOutputParameters() map[string]*_struct.Value { + if m != nil { + return m.OutputParameters + } + return nil +} + +func (m *WorkflowDef) GetFailureWorkflow() string { + if m != nil { + return m.FailureWorkflow + } + return "" +} + +func (m *WorkflowDef) GetSchemaVersion() int32 { + if m != nil { + return m.SchemaVersion + } + return 0 +} + +func init() { + proto.RegisterType((*WorkflowDef)(nil), "com.netflix.conductor.proto.WorkflowDef") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.WorkflowDef.OutputParametersEntry") +} + +func init() { + proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_bf5b9fbf7e32cdd5) +} + +var fileDescriptor_workflowdef_bf5b9fbf7e32cdd5 = []byte{ + // 388 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x4b, 0xab, 0xd3, 0x40, + 0x14, 0xc7, 0xc9, 0xcd, 0xcd, 0xad, 0x9d, 0x50, 0x5b, 0x07, 0xd4, 0x50, 0x5d, 0x04, 0x41, 0x48, + 0x41, 0x26, 0x50, 0x37, 0xd2, 0x85, 0x42, 0xa9, 0x2b, 0x17, 0x96, 0x20, 0x15, 0x74, 0x51, 0x26, + 0x93, 0x93, 0x34, 0xe4, 0x31, 0x61, 0x1e, 0xad, 0xfd, 0xc0, 0x7e, 0x0f, 0xc9, 0x24, 0xa9, 0xb5, + 0x88, 0xb8, 0x9b, 0xf9, 0xfd, 0xe7, 0xbc, 0xfe, 0x67, 0xd0, 0xf3, 0x8a, 0x27, 0x50, 0x86, 0x27, + 0x2e, 0x8a, 0xb4, 0xe4, 0xa7, 0x04, 0x52, 0xd2, 0x08, 0xae, 0x38, 0x7e, 0xc1, 0x78, 0x45, 0x6a, + 0x50, 0x69, 0x99, 0xff, 0x20, 0x8c, 0xd7, 0x89, 0x66, 0x8a, 0x8b, 0x4e, 0x9c, 0x7b, 0x7f, 0x46, + 0x29, 0x2a, 0x8b, 0x5e, 0x79, 0x99, 0x71, 0x9e, 0x95, 0x10, 0x9a, 0x5b, 0xac, 0xd3, 0x50, 0x2a, + 0xa1, 0x99, 0xea, 0xd4, 0x57, 0x3f, 0x6d, 0xe4, 0x7e, 0xed, 0x83, 0x36, 0x90, 0x62, 0x8c, 0xee, + 0x6b, 0x5a, 0x81, 0x67, 0xf9, 0x56, 0x30, 0x8e, 0xcc, 0x19, 0xfb, 0xc8, 0x4d, 0x40, 0x32, 0x91, + 0x37, 0x2a, 0xe7, 0xb5, 0x77, 0x67, 0xa4, 0x6b, 0x84, 0x3d, 0x34, 0x3a, 0x82, 0x90, 0xad, 0x6a, + 0xfb, 0x56, 0xe0, 0x44, 0xc3, 0x15, 0x7f, 0x40, 0x4e, 0xdb, 0x8b, 0xf4, 0xee, 0x7d, 0x3b, 0x70, + 0x97, 0x0b, 0xf2, 0x8f, 0x21, 0xc8, 0xd0, 0xc8, 0x17, 0x2a, 0x8b, 0xa8, 0x8b, 0xc3, 0x0b, 0x34, + 0xcb, 0xeb, 0x46, 0xab, 0x7d, 0x43, 0x05, 0xad, 0x40, 0x81, 0x90, 0x9e, 0xe3, 0xdb, 0xc1, 0x38, + 0x9a, 0x1a, 0xbe, 0xbd, 0x60, 0x5c, 0xa0, 0x27, 0x5c, 0xab, 0x9b, 0xb7, 0x0f, 0xa6, 0xee, 0xfb, + 0xff, 0xaa, 0xbb, 0x81, 0x94, 0x7c, 0x36, 0x19, 0x7e, 0x67, 0xfd, 0x58, 0x2b, 0x71, 0x8e, 0x66, + 0xfc, 0x06, 0xb7, 0x7d, 0xa5, 0x34, 0x2f, 0xb5, 0x80, 0xfd, 0x60, 0xba, 0x37, 0x32, 0xce, 0x4c, + 0x7b, 0x3e, 0x64, 0xc5, 0xaf, 0xd1, 0x63, 0xc9, 0x0e, 0x50, 0xd1, 0xfd, 0x60, 0xd2, 0x23, 0x63, + 0xd2, 0xa4, 0xa3, 0xbb, 0x0e, 0xce, 0xbf, 0xa3, 0xa7, 0x7f, 0x2d, 0x8e, 0x67, 0xc8, 0x2e, 0xe0, + 0xdc, 0xaf, 0xa4, 0x3d, 0xe2, 0x37, 0xc8, 0x39, 0xd2, 0x52, 0x83, 0xd9, 0x85, 0xbb, 0x7c, 0x46, + 0xba, 0x1d, 0x93, 0x61, 0xc7, 0x64, 0xd7, 0xaa, 0x51, 0xf7, 0x68, 0x75, 0xf7, 0xce, 0x5a, 0x7f, + 0x5a, 0x4f, 0xae, 0xa6, 0xdc, 0xc6, 0xdf, 0x56, 0x59, 0xae, 0x0e, 0x3a, 0x6e, 0x7d, 0x09, 0x7b, + 0x5f, 0xc2, 0x8b, 0x2f, 0x21, 0x2b, 0x73, 0xa8, 0x55, 0x98, 0xf1, 0x4c, 0x34, 0xec, 0x8a, 0x9b, + 0x5f, 0x16, 0x3f, 0x98, 0x3a, 0x6f, 0x7f, 0x05, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x9d, 0x18, 0xb5, + 0xab, 0x02, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/workflowsummary.pb.go b/client/gogrpc/conductor/model/workflowsummary.pb.go new file mode 100644 index 0000000000..85d5936c8e --- /dev/null +++ b/client/gogrpc/conductor/model/workflowsummary.pb.go @@ -0,0 +1,200 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflowsummary.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WorkflowSummary struct { + WorkflowType string `protobuf:"bytes,1,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + CorrelationId string `protobuf:"bytes,4,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` + EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,enum=com.netflix.conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + Input string `protobuf:"bytes,9,opt,name=input" json:"input,omitempty"` + Output string `protobuf:"bytes,10,opt,name=output" json:"output,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,11,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` + ExecutionTime int64 `protobuf:"varint,12,opt,name=execution_time,json=executionTime" json:"execution_time,omitempty"` + Event string `protobuf:"bytes,13,opt,name=event" json:"event,omitempty"` + FailedReferenceTaskNames string `protobuf:"bytes,14,opt,name=failed_reference_task_names,json=failedReferenceTaskNames" json:"failed_reference_task_names,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowSummary) Reset() { *m = WorkflowSummary{} } +func (m *WorkflowSummary) String() string { return proto.CompactTextString(m) } +func (*WorkflowSummary) ProtoMessage() {} +func (*WorkflowSummary) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowsummary_235407b7a0ddc9c7, []int{0} +} +func (m *WorkflowSummary) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowSummary.Unmarshal(m, b) +} +func (m *WorkflowSummary) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowSummary.Marshal(b, m, deterministic) +} +func (dst *WorkflowSummary) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowSummary.Merge(dst, src) +} +func (m *WorkflowSummary) XXX_Size() int { + return xxx_messageInfo_WorkflowSummary.Size(m) +} +func (m *WorkflowSummary) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowSummary.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowSummary proto.InternalMessageInfo + +func (m *WorkflowSummary) GetWorkflowType() string { + if m != nil { + return m.WorkflowType + } + return "" +} + +func (m *WorkflowSummary) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *WorkflowSummary) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *WorkflowSummary) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *WorkflowSummary) GetStartTime() string { + if m != nil { + return m.StartTime + } + return "" +} + +func (m *WorkflowSummary) GetUpdateTime() string { + if m != nil { + return m.UpdateTime + } + return "" +} + +func (m *WorkflowSummary) GetEndTime() string { + if m != nil { + return m.EndTime + } + return "" +} + +func (m *WorkflowSummary) GetStatus() Workflow_WorkflowStatus { + if m != nil { + return m.Status + } + return Workflow_RUNNING +} + +func (m *WorkflowSummary) GetInput() string { + if m != nil { + return m.Input + } + return "" +} + +func (m *WorkflowSummary) GetOutput() string { + if m != nil { + return m.Output + } + return "" +} + +func (m *WorkflowSummary) GetReasonForIncompletion() string { + if m != nil { + return m.ReasonForIncompletion + } + return "" +} + +func (m *WorkflowSummary) GetExecutionTime() int64 { + if m != nil { + return m.ExecutionTime + } + return 0 +} + +func (m *WorkflowSummary) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *WorkflowSummary) GetFailedReferenceTaskNames() string { + if m != nil { + return m.FailedReferenceTaskNames + } + return "" +} + +func init() { + proto.RegisterType((*WorkflowSummary)(nil), "com.netflix.conductor.proto.WorkflowSummary") +} + +func init() { + proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_235407b7a0ddc9c7) +} + +var fileDescriptor_workflowsummary_235407b7a0ddc9c7 = []byte{ + // 428 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x8b, 0xd3, 0x40, + 0x10, 0xc6, 0x89, 0x77, 0x6d, 0xaf, 0x7b, 0xd7, 0x8a, 0x4b, 0xd5, 0xd5, 0x22, 0x16, 0x45, 0xe8, + 0x53, 0x02, 0x2a, 0x3e, 0x08, 0xbe, 0xdc, 0x83, 0x50, 0x10, 0x95, 0x5c, 0x41, 0xf0, 0x25, 0x6c, + 0x77, 0x27, 0x75, 0x69, 0xb2, 0x1b, 0x76, 0x27, 0x77, 0xd7, 0xbf, 0xc7, 0x7f, 0x54, 0x32, 0xdb, + 0x94, 0xea, 0x83, 0x6f, 0x99, 0xdf, 0xf7, 0xcd, 0x4e, 0xe6, 0x63, 0xd8, 0xbc, 0x76, 0x1a, 0xaa, + 0xec, 0xce, 0xf9, 0x5d, 0x59, 0xb9, 0xbb, 0xd0, 0xd6, 0xb5, 0xf4, 0xfb, 0xb4, 0xf1, 0x0e, 0x1d, + 0x9f, 0x2b, 0x57, 0xa7, 0x16, 0xb0, 0xac, 0xcc, 0x7d, 0xaa, 0x9c, 0xd5, 0xad, 0x42, 0xe7, 0xa3, + 0xf8, 0x7c, 0xf6, 0x77, 0x67, 0xa4, 0xaf, 0x7e, 0x9f, 0xb3, 0x87, 0x3f, 0x0e, 0xe8, 0x26, 0x3e, + 0xc6, 0x5f, 0xb3, 0x49, 0xef, 0x2a, 0x70, 0xdf, 0x80, 0x48, 0x16, 0xc9, 0x72, 0x9c, 0x5f, 0xf5, + 0x70, 0xbd, 0x6f, 0x80, 0x0b, 0x36, 0xba, 0x05, 0x1f, 0x8c, 0xb3, 0xe2, 0xc1, 0x22, 0x59, 0x0e, + 0xf2, 0xbe, 0xe4, 0x2f, 0xd9, 0xe5, 0xb1, 0xdd, 0x68, 0x71, 0x46, 0xcd, 0xac, 0x47, 0x2b, 0xcd, + 0xdf, 0xb0, 0xa9, 0x72, 0xde, 0x43, 0x25, 0xd1, 0x38, 0xdb, 0x79, 0xce, 0xc9, 0x33, 0x39, 0xa1, + 0x2b, 0xcd, 0x5f, 0x30, 0x16, 0x50, 0x7a, 0x2c, 0xd0, 0xd4, 0x20, 0x06, 0x64, 0x19, 0x13, 0x59, + 0x9b, 0x1a, 0xba, 0x31, 0x6d, 0xa3, 0x25, 0x42, 0xd4, 0x87, 0x71, 0x4c, 0x44, 0x64, 0x78, 0xc6, + 0x2e, 0xc0, 0xea, 0xa8, 0x8e, 0x48, 0x1d, 0x81, 0xd5, 0x24, 0x7d, 0x61, 0xc3, 0x80, 0x12, 0xdb, + 0x20, 0x2e, 0x16, 0xc9, 0x72, 0xfa, 0xf6, 0x7d, 0xfa, 0x9f, 0xe4, 0xd2, 0x3e, 0x9f, 0xe3, 0xc7, + 0x0d, 0xf5, 0xe6, 0x87, 0x37, 0xf8, 0x8c, 0x0d, 0x8c, 0x6d, 0x5a, 0x14, 0x63, 0x9a, 0x12, 0x0b, + 0xfe, 0x84, 0x0d, 0x5d, 0x8b, 0x1d, 0x66, 0x84, 0x0f, 0x15, 0xff, 0xc0, 0x9e, 0x7a, 0x90, 0xc1, + 0xd9, 0xa2, 0x74, 0xbe, 0x30, 0x56, 0xb9, 0xba, 0xa9, 0xa0, 0xdb, 0x59, 0x5c, 0x92, 0xf1, 0x71, + 0x94, 0x3f, 0x3b, 0xbf, 0x3a, 0x11, 0xbb, 0xd4, 0xe0, 0x1e, 0x54, 0x4b, 0x99, 0xd1, 0x52, 0x57, + 0x8b, 0x64, 0x79, 0x96, 0x4f, 0x8e, 0x94, 0x56, 0x9b, 0xb1, 0x01, 0xdc, 0x82, 0x45, 0x31, 0x89, + 0x3f, 0x43, 0x05, 0xff, 0xc4, 0xe6, 0xa5, 0x34, 0x15, 0xe8, 0xc2, 0x43, 0x09, 0x1e, 0xac, 0x82, + 0x02, 0x65, 0xd8, 0x15, 0x56, 0xd6, 0x10, 0xc4, 0x94, 0xbc, 0x22, 0x5a, 0xf2, 0xde, 0xb1, 0x96, + 0x61, 0xf7, 0xb5, 0xd3, 0xaf, 0xbf, 0x5d, 0x3f, 0xfa, 0xe7, 0x48, 0xbe, 0x6f, 0x7e, 0x7e, 0xdc, + 0x1a, 0xfc, 0xd5, 0x6e, 0xba, 0xe8, 0xb2, 0x43, 0x74, 0xd9, 0x31, 0xba, 0x4c, 0x55, 0x06, 0x2c, + 0x66, 0x5b, 0xb7, 0xf5, 0x8d, 0x3a, 0xe1, 0x74, 0x85, 0x9b, 0x21, 0x25, 0xfb, 0xee, 0x4f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x83, 0xba, 0xab, 0xd5, 0xcf, 0x02, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/workflowtask.pb.go b/client/gogrpc/conductor/model/workflowtask.pb.go new file mode 100644 index 0000000000..87b06ccd3b --- /dev/null +++ b/client/gogrpc/conductor/model/workflowtask.pb.go @@ -0,0 +1,344 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/workflowtask.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type WorkflowTask_Type int32 + +const ( + WorkflowTask_SIMPLE WorkflowTask_Type = 0 + WorkflowTask_DYNAMIC WorkflowTask_Type = 1 + WorkflowTask_FORK_JOIN WorkflowTask_Type = 2 + WorkflowTask_FORK_JOIN_DYNAMIC WorkflowTask_Type = 3 + WorkflowTask_DECISION WorkflowTask_Type = 4 + WorkflowTask_JOIN WorkflowTask_Type = 5 + WorkflowTask_SUB_WORKFLOW WorkflowTask_Type = 6 + WorkflowTask_EVENT WorkflowTask_Type = 7 + WorkflowTask_WAIT WorkflowTask_Type = 8 + WorkflowTask_USER_DEFINED WorkflowTask_Type = 9 +) + +var WorkflowTask_Type_name = map[int32]string{ + 0: "SIMPLE", + 1: "DYNAMIC", + 2: "FORK_JOIN", + 3: "FORK_JOIN_DYNAMIC", + 4: "DECISION", + 5: "JOIN", + 6: "SUB_WORKFLOW", + 7: "EVENT", + 8: "WAIT", + 9: "USER_DEFINED", +} +var WorkflowTask_Type_value = map[string]int32{ + "SIMPLE": 0, + "DYNAMIC": 1, + "FORK_JOIN": 2, + "FORK_JOIN_DYNAMIC": 3, + "DECISION": 4, + "JOIN": 5, + "SUB_WORKFLOW": 6, + "EVENT": 7, + "WAIT": 8, + "USER_DEFINED": 9, +} + +func (x WorkflowTask_Type) String() string { + return proto.EnumName(WorkflowTask_Type_name, int32(x)) +} +func (WorkflowTask_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_workflowtask_5168db1c2690070f, []int{0, 0} +} + +type WorkflowTask struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName" json:"task_reference_name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` + InputParameters map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input_parameters,json=inputParameters" json:"input_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Type string `protobuf:"bytes,5,opt,name=type" json:"type,omitempty"` + DynamicTaskNameParam string `protobuf:"bytes,6,opt,name=dynamic_task_name_param,json=dynamicTaskNameParam" json:"dynamic_task_name_param,omitempty"` + CaseValueParam string `protobuf:"bytes,7,opt,name=case_value_param,json=caseValueParam" json:"case_value_param,omitempty"` + CaseExpression string `protobuf:"bytes,8,opt,name=case_expression,json=caseExpression" json:"case_expression,omitempty"` + DecisionCases map[string]*WorkflowTask_WorkflowTaskList `protobuf:"bytes,9,rep,name=decision_cases,json=decisionCases" json:"decision_cases,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + DynamicForkTasksParam string `protobuf:"bytes,10,opt,name=dynamic_fork_tasks_param,json=dynamicForkTasksParam" json:"dynamic_fork_tasks_param,omitempty"` + DynamicForkTasksInputParamName string `protobuf:"bytes,11,opt,name=dynamic_fork_tasks_input_param_name,json=dynamicForkTasksInputParamName" json:"dynamic_fork_tasks_input_param_name,omitempty"` + DefaultCase []*WorkflowTask `protobuf:"bytes,12,rep,name=default_case,json=defaultCase" json:"default_case,omitempty"` + ForkTasks []*WorkflowTask_WorkflowTaskList `protobuf:"bytes,13,rep,name=fork_tasks,json=forkTasks" json:"fork_tasks,omitempty"` + StartDelay int32 `protobuf:"varint,14,opt,name=start_delay,json=startDelay" json:"start_delay,omitempty"` + SubWorkflowParam *SubWorkflowParams `protobuf:"bytes,15,opt,name=sub_workflow_param,json=subWorkflowParam" json:"sub_workflow_param,omitempty"` + JoinOn []string `protobuf:"bytes,16,rep,name=join_on,json=joinOn" json:"join_on,omitempty"` + Sink string `protobuf:"bytes,17,opt,name=sink" json:"sink,omitempty"` + Optional bool `protobuf:"varint,18,opt,name=optional" json:"optional,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTask) Reset() { *m = WorkflowTask{} } +func (m *WorkflowTask) String() string { return proto.CompactTextString(m) } +func (*WorkflowTask) ProtoMessage() {} +func (*WorkflowTask) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowtask_5168db1c2690070f, []int{0} +} +func (m *WorkflowTask) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTask.Unmarshal(m, b) +} +func (m *WorkflowTask) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTask.Marshal(b, m, deterministic) +} +func (dst *WorkflowTask) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTask.Merge(dst, src) +} +func (m *WorkflowTask) XXX_Size() int { + return xxx_messageInfo_WorkflowTask.Size(m) +} +func (m *WorkflowTask) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTask.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTask proto.InternalMessageInfo + +func (m *WorkflowTask) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *WorkflowTask) GetTaskReferenceName() string { + if m != nil { + return m.TaskReferenceName + } + return "" +} + +func (m *WorkflowTask) GetDescription() string { + if m != nil { + return m.Description + } + return "" +} + +func (m *WorkflowTask) GetInputParameters() map[string]*_struct.Value { + if m != nil { + return m.InputParameters + } + return nil +} + +func (m *WorkflowTask) GetType() string { + if m != nil { + return m.Type + } + return "" +} + +func (m *WorkflowTask) GetDynamicTaskNameParam() string { + if m != nil { + return m.DynamicTaskNameParam + } + return "" +} + +func (m *WorkflowTask) GetCaseValueParam() string { + if m != nil { + return m.CaseValueParam + } + return "" +} + +func (m *WorkflowTask) GetCaseExpression() string { + if m != nil { + return m.CaseExpression + } + return "" +} + +func (m *WorkflowTask) GetDecisionCases() map[string]*WorkflowTask_WorkflowTaskList { + if m != nil { + return m.DecisionCases + } + return nil +} + +func (m *WorkflowTask) GetDynamicForkTasksParam() string { + if m != nil { + return m.DynamicForkTasksParam + } + return "" +} + +func (m *WorkflowTask) GetDynamicForkTasksInputParamName() string { + if m != nil { + return m.DynamicForkTasksInputParamName + } + return "" +} + +func (m *WorkflowTask) GetDefaultCase() []*WorkflowTask { + if m != nil { + return m.DefaultCase + } + return nil +} + +func (m *WorkflowTask) GetForkTasks() []*WorkflowTask_WorkflowTaskList { + if m != nil { + return m.ForkTasks + } + return nil +} + +func (m *WorkflowTask) GetStartDelay() int32 { + if m != nil { + return m.StartDelay + } + return 0 +} + +func (m *WorkflowTask) GetSubWorkflowParam() *SubWorkflowParams { + if m != nil { + return m.SubWorkflowParam + } + return nil +} + +func (m *WorkflowTask) GetJoinOn() []string { + if m != nil { + return m.JoinOn + } + return nil +} + +func (m *WorkflowTask) GetSink() string { + if m != nil { + return m.Sink + } + return "" +} + +func (m *WorkflowTask) GetOptional() bool { + if m != nil { + return m.Optional + } + return false +} + +type WorkflowTask_WorkflowTaskList struct { + Tasks []*WorkflowTask `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowTask_WorkflowTaskList) Reset() { *m = WorkflowTask_WorkflowTaskList{} } +func (m *WorkflowTask_WorkflowTaskList) String() string { return proto.CompactTextString(m) } +func (*WorkflowTask_WorkflowTaskList) ProtoMessage() {} +func (*WorkflowTask_WorkflowTaskList) Descriptor() ([]byte, []int) { + return fileDescriptor_workflowtask_5168db1c2690070f, []int{0, 0} +} +func (m *WorkflowTask_WorkflowTaskList) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Unmarshal(m, b) +} +func (m *WorkflowTask_WorkflowTaskList) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Marshal(b, m, deterministic) +} +func (dst *WorkflowTask_WorkflowTaskList) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowTask_WorkflowTaskList.Merge(dst, src) +} +func (m *WorkflowTask_WorkflowTaskList) XXX_Size() int { + return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Size(m) +} +func (m *WorkflowTask_WorkflowTaskList) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowTask_WorkflowTaskList.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowTask_WorkflowTaskList proto.InternalMessageInfo + +func (m *WorkflowTask_WorkflowTaskList) GetTasks() []*WorkflowTask { + if m != nil { + return m.Tasks + } + return nil +} + +func init() { + proto.RegisterType((*WorkflowTask)(nil), "com.netflix.conductor.proto.WorkflowTask") + proto.RegisterMapType((map[string]*WorkflowTask_WorkflowTaskList)(nil), "com.netflix.conductor.proto.WorkflowTask.DecisionCasesEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.WorkflowTask.InputParametersEntry") + proto.RegisterType((*WorkflowTask_WorkflowTaskList)(nil), "com.netflix.conductor.proto.WorkflowTask.WorkflowTaskList") + proto.RegisterEnum("com.netflix.conductor.proto.WorkflowTask_Type", WorkflowTask_Type_name, WorkflowTask_Type_value) +} + +func init() { + proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_5168db1c2690070f) +} + +var fileDescriptor_workflowtask_5168db1c2690070f = []byte{ + // 774 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xef, 0x6b, 0xdb, 0x3a, + 0x14, 0x7d, 0x6e, 0x7e, 0x5f, 0xa7, 0xa9, 0xab, 0xd7, 0xbe, 0x9a, 0xbc, 0xf7, 0xb6, 0xd0, 0x7d, + 0x58, 0x06, 0xc3, 0x81, 0x8e, 0xb1, 0x51, 0xc6, 0x46, 0xdb, 0x38, 0xe0, 0x35, 0x4d, 0x82, 0x93, + 0x36, 0xb4, 0x0c, 0x8c, 0x63, 0x2b, 0x99, 0x17, 0xc7, 0x0a, 0x96, 0xbc, 0x36, 0xb0, 0xbf, 0x63, + 0xdf, 0xf7, 0x9f, 0x0e, 0xc9, 0x76, 0x92, 0x76, 0xa5, 0x74, 0xec, 0x9b, 0x74, 0xef, 0xb9, 0xd7, + 0xe7, 0x1c, 0x5d, 0xc9, 0xa0, 0xce, 0x88, 0x8b, 0xfd, 0xc6, 0x35, 0x09, 0xa7, 0x63, 0x9f, 0x5c, + 0x33, 0x9b, 0x4e, 0xb5, 0x79, 0x48, 0x18, 0x41, 0xff, 0x3a, 0x64, 0xa6, 0x05, 0x98, 0x8d, 0x7d, + 0xef, 0x46, 0x73, 0x48, 0xe0, 0x46, 0x0e, 0x23, 0x61, 0x9c, 0xac, 0xfe, 0x1f, 0x97, 0xd1, 0x68, + 0x94, 0x56, 0xce, 0xed, 0xd0, 0x9e, 0xd1, 0x24, 0xfd, 0xdf, 0x84, 0x90, 0x89, 0x8f, 0x1b, 0x62, + 0x37, 0x8a, 0xc6, 0x0d, 0xca, 0xc2, 0xc8, 0x61, 0x71, 0x76, 0xff, 0x87, 0x0c, 0xe5, 0x61, 0x52, + 0x36, 0xb0, 0xe9, 0x14, 0x21, 0xc8, 0x06, 0xf6, 0x0c, 0xab, 0x52, 0x4d, 0xaa, 0x97, 0x4c, 0xb1, + 0x46, 0x1a, 0xfc, 0xcd, 0xc9, 0x58, 0x21, 0x1e, 0xe3, 0x10, 0x07, 0x0e, 0xb6, 0x04, 0x64, 0x43, + 0x40, 0xb6, 0x79, 0xca, 0x4c, 0x33, 0x1d, 0x8e, 0xaf, 0x81, 0xec, 0x62, 0xea, 0x84, 0xde, 0x9c, + 0x79, 0x24, 0x50, 0x33, 0x02, 0xb7, 0x1e, 0x42, 0x1e, 0x28, 0x5e, 0x30, 0x8f, 0x98, 0x25, 0xa8, + 0x62, 0x86, 0x43, 0xaa, 0x66, 0x6b, 0x99, 0xba, 0x7c, 0xf0, 0x5e, 0x7b, 0x40, 0xab, 0xb6, 0x4e, + 0x55, 0x33, 0x78, 0x87, 0xde, 0xb2, 0x81, 0x1e, 0xb0, 0x70, 0x61, 0x6e, 0x79, 0xb7, 0xa3, 0x5c, + 0x10, 0x5b, 0xcc, 0xb1, 0x9a, 0x8b, 0x05, 0xf1, 0x35, 0x7a, 0x0d, 0x7b, 0xee, 0x22, 0xb0, 0x67, + 0x9e, 0x63, 0x09, 0x61, 0x5c, 0x4e, 0x4c, 0x45, 0xcd, 0x0b, 0xd8, 0x4e, 0x92, 0xe6, 0xdf, 0xe1, + 0x92, 0x44, 0x3f, 0x54, 0x07, 0xc5, 0xb1, 0x29, 0xb6, 0xbe, 0xda, 0x7e, 0x94, 0xe2, 0x0b, 0x02, + 0x5f, 0xe1, 0xf1, 0x0b, 0x1e, 0x8e, 0x91, 0xcf, 0x61, 0x4b, 0x20, 0xf1, 0xcd, 0x3c, 0xc4, 0x94, + 0x72, 0x17, 0x8a, 0x2b, 0xa0, 0xbe, 0x8c, 0x22, 0x07, 0x2a, 0x2e, 0x76, 0x3c, 0xbe, 0xb6, 0x78, + 0x8a, 0xaa, 0x25, 0x61, 0xc3, 0xbb, 0xc7, 0xdb, 0xd0, 0x4c, 0xea, 0x4f, 0x78, 0x79, 0x6c, 0xc2, + 0xa6, 0xbb, 0x1e, 0x43, 0x6f, 0x40, 0x4d, 0xe5, 0x8e, 0x49, 0x38, 0x15, 0x9a, 0x69, 0xc2, 0x1f, + 0x04, 0xad, 0xdd, 0x24, 0xdf, 0x22, 0xe1, 0x94, 0x37, 0xa5, 0xb1, 0x8c, 0x53, 0x78, 0x76, 0x4f, + 0xe1, 0xda, 0xc9, 0xc5, 0x83, 0x20, 0x8b, 0x1e, 0x4f, 0xee, 0xf6, 0x58, 0x9d, 0x8f, 0x98, 0x8a, + 0x36, 0x94, 0x5d, 0x3c, 0xb6, 0x23, 0x9f, 0x09, 0xa5, 0x6a, 0x59, 0x08, 0x7d, 0xf1, 0x68, 0xa1, + 0x7c, 0x82, 0x44, 0x39, 0x17, 0x85, 0x2e, 0x01, 0x56, 0x94, 0xd4, 0x4d, 0xd1, 0xeb, 0xf0, 0xf1, + 0xa6, 0xad, 0x6f, 0xda, 0x1e, 0x65, 0x66, 0x69, 0x9c, 0xd2, 0x46, 0x4f, 0x41, 0xa6, 0xcc, 0x0e, + 0x99, 0xe5, 0x62, 0xdf, 0x5e, 0xa8, 0x95, 0x9a, 0x54, 0xcf, 0x99, 0x20, 0x42, 0x4d, 0x1e, 0x41, + 0x9f, 0x00, 0xd1, 0x68, 0x64, 0xa5, 0xd7, 0x2d, 0x71, 0x72, 0xab, 0x26, 0xd5, 0xe5, 0x03, 0xed, + 0x41, 0x0e, 0xfd, 0x68, 0x94, 0x7e, 0x59, 0x18, 0x43, 0x4d, 0x85, 0xde, 0x09, 0xa1, 0x3d, 0x28, + 0x7c, 0x21, 0x5e, 0x60, 0x91, 0x40, 0x55, 0x6a, 0x99, 0x7a, 0xc9, 0xcc, 0xf3, 0x6d, 0x37, 0xe0, + 0x93, 0x4c, 0xbd, 0x60, 0xaa, 0x6e, 0xc7, 0x93, 0xcc, 0xd7, 0xa8, 0x0a, 0x45, 0x22, 0xae, 0x94, + 0xed, 0xab, 0xa8, 0x26, 0xd5, 0x8b, 0xe6, 0x72, 0x5f, 0xed, 0x83, 0x72, 0x57, 0x26, 0xfa, 0x00, + 0xb9, 0xd8, 0x31, 0xe9, 0x77, 0xdd, 0x8f, 0xeb, 0xaa, 0x57, 0xb0, 0x73, 0xdf, 0xbd, 0x43, 0x0a, + 0x64, 0xa6, 0x78, 0x91, 0x3c, 0x1b, 0x7c, 0x89, 0x5e, 0x42, 0x4e, 0x5c, 0x14, 0xf1, 0x4e, 0xc8, + 0x07, 0xff, 0x68, 0xf1, 0x43, 0xa4, 0xa5, 0x0f, 0x91, 0x26, 0xee, 0x8b, 0x19, 0x83, 0x0e, 0x37, + 0xde, 0x4a, 0xd5, 0x6f, 0x80, 0x7e, 0x1d, 0xe6, 0x7b, 0x3a, 0xf7, 0x6e, 0x77, 0xfe, 0x93, 0x63, + 0x5f, 0x7d, 0x7d, 0xff, 0xbb, 0x04, 0xd9, 0x01, 0x7f, 0x1d, 0x00, 0xf2, 0x7d, 0xe3, 0xac, 0xd7, + 0xd6, 0x95, 0xbf, 0x90, 0x0c, 0x85, 0xe6, 0x65, 0xe7, 0xe8, 0xcc, 0x38, 0x51, 0x24, 0xb4, 0x09, + 0xa5, 0x56, 0xd7, 0x3c, 0xb5, 0x3e, 0x76, 0x8d, 0x8e, 0xb2, 0x81, 0x76, 0x61, 0x7b, 0xb9, 0xb5, + 0x52, 0x54, 0x06, 0x95, 0xa1, 0xd8, 0xd4, 0x4f, 0x8c, 0xbe, 0xd1, 0xed, 0x28, 0x59, 0x54, 0x84, + 0xac, 0x80, 0xe7, 0x90, 0x02, 0xe5, 0xfe, 0xf9, 0xb1, 0x35, 0xec, 0x9a, 0xa7, 0xad, 0x76, 0x77, + 0xa8, 0xe4, 0x51, 0x09, 0x72, 0xfa, 0x85, 0xde, 0x19, 0x28, 0x05, 0x0e, 0x1b, 0x1e, 0x19, 0x03, + 0xa5, 0xc8, 0x61, 0xe7, 0x7d, 0xdd, 0xb4, 0x9a, 0x7a, 0xcb, 0xe8, 0xe8, 0x4d, 0xa5, 0x74, 0xdc, + 0x3e, 0xae, 0xac, 0xf3, 0xee, 0x8d, 0xae, 0x0e, 0x27, 0x1e, 0xfb, 0x1c, 0x8d, 0xb8, 0xee, 0x46, + 0xa2, 0xbb, 0xb1, 0xd4, 0xdd, 0x70, 0x7c, 0x0f, 0x07, 0xac, 0x31, 0x21, 0x93, 0x70, 0xee, 0xac, + 0xc5, 0xc5, 0x7f, 0x62, 0x94, 0x17, 0xb6, 0xbc, 0xfa, 0x19, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x9c, + 0x80, 0xcd, 0x6e, 0x06, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/worker.go b/client/gogrpc/conductor/worker.go new file mode 100644 index 0000000000..25b234f912 --- /dev/null +++ b/client/gogrpc/conductor/worker.go @@ -0,0 +1,151 @@ +package conductor + +import ( + "context" + "fmt" + "os" + "runtime" + "time" + + pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + "github.com/netflix/conductor/client/gogrpc/conductor/model" +) + +type Executor interface { + Execute(*model.Task) (*model.TaskResult, error) + ConnectionError(error) error +} + +type Worker struct { + TaskType string + Identifier string + Concurrency int + Executor Executor + Client TasksClient + + tasks chan *model.Task + results chan *model.TaskResult + shutdown chan struct{} +} + +func (worker *Worker) Run() error { + if worker.Identifier == "" { + return fmt.Errorf("conductor: missing field 'Identifier'") + } + if worker.Executor == nil { + return fmt.Errorf("conductor: missing field 'Executor'") + } + if worker.Client == nil { + return fmt.Errorf("conductor: missing field 'Client'") + } + if worker.Identifier == "" { + hostname, err := os.Hostname() + if err != nil { + return err + } + worker.Identifier = fmt.Sprintf("%s (conductor-go)", hostname) + } + if worker.Concurrency == 0 { + worker.Concurrency = runtime.GOMAXPROCS(0) + } + + worker.tasks = make(chan *model.Task, worker.Concurrency) + worker.results = make(chan *model.TaskResult, worker.Concurrency) + worker.shutdown = make(chan struct{}) + + for i := 0; i < worker.Concurrency; i++ { + go worker.thread() + } + + for { + err := worker.run() + if err != nil { + err = worker.Executor.ConnectionError(err) + if err != nil { + worker.Shutdown() + return err + } + } + } +} + +func (worker *Worker) Shutdown() { + close(worker.tasks) + close(worker.shutdown) + worker.Client.Shutdown() +} + +func (worker *Worker) getRequest(pending int) *pb.PollRequest { + return &pb.PollRequest{ + TaskType: worker.TaskType, + WorkerId: worker.Identifier, + TaskCount: int32(pending), + } +} + +func (worker *Worker) thread() { + for task := range worker.tasks { + result, err := worker.Executor.Execute(task) + if err == nil { + // TODO: what if the task failed? + worker.results <- result + } + } +} + +func (worker *Worker) run() error { + stream, err := worker.Client.Tasks().PollStream(context.Background()) + if err != nil { + return err + } + defer stream.CloseSend() + + errc := make(chan error) + go func() { + for { + task, err := stream.Recv() + if err != nil { + errc <- err + return + } + worker.tasks <- task + } + }() + + pending := worker.Concurrency + + for { + timeout := time.NewTimer(1 * time.Second) + + select { + case result := <-worker.results: + _, err := worker.Client.Tasks().UpdateTask(context.Background(), result) + if err != nil { + return err + } + pending-- + + case err := <-errc: + return err + + case <-worker.shutdown: + return nil + + case <-timeout.C: + err := stream.Send(worker.getRequest(0)) + if err != nil { + return err + } + + default: + if pending > 0 { + err := stream.Send(worker.getRequest(pending)) + if err != nil { + return err + } + pending = 0 + } + } + timeout.Stop() + } +} diff --git a/client/gogrpc/conductor/worker_test.go b/client/gogrpc/conductor/worker_test.go new file mode 100644 index 0000000000..7bd830f669 --- /dev/null +++ b/client/gogrpc/conductor/worker_test.go @@ -0,0 +1,88 @@ +package conductor + +import ( + "context" + "fmt" + "testing" + + "github.com/golang/protobuf/ptypes/empty" + pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + "github.com/netflix/conductor/client/gogrpc/conductor/model" + "google.golang.org/grpc" +) + +type fakePollStream struct { + grpc.ClientStream + service *fakeTaskService + open bool +} + +func (stream *fakePollStream) Send(req *pb.PollRequest) error { + stream.service.pollRequest(req) + return nil +} + +func (stream *fakePollStream) Recv() (*model.Task, error) { + select { + case task := <-stream.service.pending: + return task, nil + default: + return nil, nil + } +} + +type fakeTaskService struct { + pending chan *model.Task +} + +func (s *fakeTaskService) pollRequest(req *pb.PollRequest) { + for i := 0; i < int(req.GetTaskCount()); i++ { + s.pending <- &model.Task{ + TaskType: req.GetTaskType(), + Status: model.Task_SCHEDULED, + } + } +} + +var ErrNotImplemented = fmt.Errorf("API call not implemented") + +func (s *fakeTaskService) Poll(ctx context.Context, in *pb.PollRequest, opts ...grpc.CallOption) (*model.Task, error) { + return nil, ErrNotImplemented +} +func (s *fakeTaskService) PollStream(ctx context.Context, opts ...grpc.CallOption) (pb.TaskService_PollStreamClient, error) { + return &fakePollStream{ + ClientStream: nil, + service: s, + open: true, + }, nil +} +func (s *fakeTaskService) GetTasksInProgress(ctx context.Context, in *pb.TasksInProgressRequest, opts ...grpc.CallOption) (*pb.TasksInProgressResponse, error) { + return nil, ErrNotImplemented +} +func (s *fakeTaskService) GetPendingTaskForWorkflow(ctx context.Context, in *pb.PendingTaskRequest, opts ...grpc.CallOption) (*model.Task, error) { + return nil, ErrNotImplemented +} +func (s *fakeTaskService) UpdateTask(ctx context.Context, in *model.TaskResult, opts ...grpc.CallOption) (*pb.TaskUpdateResponse, error) { + return nil, ErrNotImplemented +} +func (s *fakeTaskService) AckTask(ctx context.Context, in *pb.AckTaskRequest, opts ...grpc.CallOption) (*pb.AckTaskResponse, error) { + return nil, ErrNotImplemented +} +func (s *fakeTaskService) AddLog(ctx context.Context, in *pb.AddLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) { + return nil, ErrNotImplemented +} +func (s *fakeTaskService) GetLogs(ctx context.Context, in *pb.TaskId, opts ...grpc.CallOption) (*pb.GetLogsResponse, error) { + return nil, ErrNotImplemented +} + +type fakeTaskClient struct { + tasks *fakeTaskService +} + +func (c *fakeTaskClient) Tasks() pb.TaskServiceClient { + return c.tasks +} + +func TestWorkerInterface(t *testing.T) { + +} From 1e13bce8cefec8ee021840c18f0f6c43e734e5bc Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 28 May 2018 17:56:53 +0200 Subject: [PATCH 012/163] client/gogrpc: Simpler GRPC client This updated GRPC client uses the unary Poll APIs from different goroutines to offer reasonable throughput and less complexity than the old streaming APIs. Although this implementation is not optimal, its interfaces are more-or-less final and its behavior is quite resilient, particularly when shutting down the Worker gracefully. All the publicly exposed interfaces and their behaviors have been documented This updated GRPC client uses the unary Poll APIs from different goroutines to offer reasonable throughput and less complexity than the old streaming APIs. Although this implementation is not optimal, its interfaces are more-or-less final and its behavior is quite resilient, particularly when shutting down the Worker gracefully. All the publicly exposed interfaces and their behaviors have been documented. --- client/gogrpc/Gopkg.lock | 20 +- client/gogrpc/Gopkg.toml | 4 + client/gogrpc/conductor/client.go | 14 ++ .../gogrpc/conductor/grpc/task_service.pb.go | 195 ++++++++++++------ .../conductor/model/eventexecution.pb.go | 8 +- client/gogrpc/conductor/model/task.pb.go | 8 +- client/gogrpc/conductor/model/taskdef.pb.go | 10 +- .../gogrpc/conductor/model/taskresult.pb.go | 8 +- client/gogrpc/conductor/model/workflow.pb.go | 8 +- .../gogrpc/conductor/model/workflowtask.pb.go | 10 +- client/gogrpc/conductor/worker.go | 193 +++++++++-------- client/gogrpc/conductor/worker_test.go | 153 +++++++++++--- .../grpc/server/TaskServiceImpl.java | 22 +- grpc/src/main/proto/grpc/task_service.proto | 11 +- 14 files changed, 444 insertions(+), 220 deletions(-) diff --git a/client/gogrpc/Gopkg.lock b/client/gogrpc/Gopkg.lock index bd5e41573d..fad0e8c642 100644 --- a/client/gogrpc/Gopkg.lock +++ b/client/gogrpc/Gopkg.lock @@ -1,6 +1,12 @@ # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. +[[projects]] + name = "github.com/davecgh/go-spew" + packages = ["spew"] + revision = "346938d642f2ec3594ed81d874461961cd0faa76" + version = "v1.1.0" + [[projects]] name = "github.com/golang/protobuf" packages = [ @@ -15,6 +21,18 @@ revision = "b4deda0973fb4c70b50d226b1af49f3da59f5265" version = "v1.1.0" +[[projects]] + name = "github.com/pmezard/go-difflib" + packages = ["difflib"] + revision = "792786c7400a136282c1664665ae0a8db921c6c2" + version = "v1.0.0" + +[[projects]] + name = "github.com/stretchr/testify" + packages = ["assert"] + revision = "12b6f73e6084dad08a7c6e575284b177ecafbc71" + version = "v1.2.1" + [[projects]] branch = "master" name = "golang.org/x/net" @@ -90,6 +108,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "e200db17a1db72f216cd0112ddedf0980f58c213db03301286996bc4cb694904" + inputs-digest = "688b3887547a40128cd0161bf9a82edbfb79e87c4a5966cc12dd06cb01f4fa52" solver-name = "gps-cdcl" solver-version = 1 diff --git a/client/gogrpc/Gopkg.toml b/client/gogrpc/Gopkg.toml index a0c8a0fad4..e40cb23029 100644 --- a/client/gogrpc/Gopkg.toml +++ b/client/gogrpc/Gopkg.toml @@ -41,3 +41,7 @@ go-tests = true unused-packages = true non-go = true + +[[constraint]] + name = "github.com/stretchr/testify" + version = "1.2.1" diff --git a/client/gogrpc/conductor/client.go b/client/gogrpc/conductor/client.go index 6c4880c989..65a4f51c42 100644 --- a/client/gogrpc/conductor/client.go +++ b/client/gogrpc/conductor/client.go @@ -5,21 +5,29 @@ import ( grpc "google.golang.org/grpc" ) +// TasksClient is a Conductor client that exposes the Conductor +// Tasks API. type TasksClient interface { Tasks() pb.TaskServiceClient Shutdown() } +// MetadataClient is a Conductor client that exposes the Conductor +// Metadata API. type MetadataClient interface { Metadata() pb.MetadataServiceClient Shutdown() } +// WorkflowsClient is a Conductor client that exposes the Conductor +// Workflows API. type WorkflowsClient interface { Workflows() pb.WorkflowServiceClient Shutdown() } +// Client encapsulates a GRPC connection to a Conductor server and +// the different services it exposes. type Client struct { conn *grpc.ClientConn tasks pb.TaskServiceClient @@ -27,6 +35,8 @@ type Client struct { workflows pb.WorkflowServiceClient } +// NewClient returns a new Client with a GRPC connection to the given address, +// and any optional grpc.Dialoption settings. func NewClient(address string, options ...grpc.DialOption) (*Client, error) { conn, err := grpc.Dial(address, options...) if err != nil { @@ -35,10 +45,12 @@ func NewClient(address string, options ...grpc.DialOption) (*Client, error) { return &Client{conn: conn}, nil } +// Shutdown closes the underlying GRPC connection for this client. func (client *Client) Shutdown() { client.conn.Close() } +// Tasks returns the Tasks service for this client func (client *Client) Tasks() pb.TaskServiceClient { if client.tasks == nil { client.tasks = pb.NewTaskServiceClient(client.conn) @@ -46,6 +58,7 @@ func (client *Client) Tasks() pb.TaskServiceClient { return client.tasks } +// Metadata returns the Metadata service for this client func (client *Client) Metadata() pb.MetadataServiceClient { if client.metadata == nil { client.metadata = pb.NewMetadataServiceClient(client.conn) @@ -53,6 +66,7 @@ func (client *Client) Metadata() pb.MetadataServiceClient { return client.metadata } +// Workflows returns the workflows service for this client func (client *Client) Workflows() pb.WorkflowServiceClient { if client.workflows == nil { client.workflows = pb.NewWorkflowServiceClient(client.conn) diff --git a/client/gogrpc/conductor/grpc/task_service.pb.go b/client/gogrpc/conductor/grpc/task_service.pb.go index a175b4c49b..68103b314f 100644 --- a/client/gogrpc/conductor/grpc/task_service.pb.go +++ b/client/gogrpc/conductor/grpc/task_service.pb.go @@ -29,7 +29,6 @@ type PollRequest struct { TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` - TaskCount int32 `protobuf:"varint,4,opt,name=task_count,json=taskCount" json:"task_count,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -39,7 +38,7 @@ func (m *PollRequest) Reset() { *m = PollRequest{} } func (m *PollRequest) String() string { return proto.CompactTextString(m) } func (*PollRequest) ProtoMessage() {} func (*PollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{0} + return fileDescriptor_task_service_d215da243254e00c, []int{0} } func (m *PollRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PollRequest.Unmarshal(m, b) @@ -80,13 +79,76 @@ func (m *PollRequest) GetDomain() string { return "" } -func (m *PollRequest) GetTaskCount() int32 { +type StreamingPollRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` + Capacity int32 `protobuf:"varint,4,opt,name=capacity" json:"capacity,omitempty"` + Completed []*model.TaskResult `protobuf:"bytes,5,rep,name=completed" json:"completed,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StreamingPollRequest) Reset() { *m = StreamingPollRequest{} } +func (m *StreamingPollRequest) String() string { return proto.CompactTextString(m) } +func (*StreamingPollRequest) ProtoMessage() {} +func (*StreamingPollRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_d215da243254e00c, []int{1} +} +func (m *StreamingPollRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StreamingPollRequest.Unmarshal(m, b) +} +func (m *StreamingPollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StreamingPollRequest.Marshal(b, m, deterministic) +} +func (dst *StreamingPollRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_StreamingPollRequest.Merge(dst, src) +} +func (m *StreamingPollRequest) XXX_Size() int { + return xxx_messageInfo_StreamingPollRequest.Size(m) +} +func (m *StreamingPollRequest) XXX_DiscardUnknown() { + xxx_messageInfo_StreamingPollRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_StreamingPollRequest proto.InternalMessageInfo + +func (m *StreamingPollRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *StreamingPollRequest) GetWorkerId() string { if m != nil { - return m.TaskCount + return m.WorkerId + } + return "" +} + +func (m *StreamingPollRequest) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *StreamingPollRequest) GetCapacity() int32 { + if m != nil { + return m.Capacity } return 0 } +func (m *StreamingPollRequest) GetCompleted() []*model.TaskResult { + if m != nil { + return m.Completed + } + return nil +} + type TasksInProgressRequest struct { TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey" json:"start_key,omitempty"` @@ -100,7 +162,7 @@ func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } func (*TasksInProgressRequest) ProtoMessage() {} func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{1} + return fileDescriptor_task_service_d215da243254e00c, []int{2} } func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) @@ -152,7 +214,7 @@ func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } func (*TasksInProgressResponse) ProtoMessage() {} func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{2} + return fileDescriptor_task_service_d215da243254e00c, []int{3} } func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) @@ -191,7 +253,7 @@ func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } func (*PendingTaskRequest) ProtoMessage() {} func (*PendingTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{3} + return fileDescriptor_task_service_d215da243254e00c, []int{4} } func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) @@ -236,7 +298,7 @@ func (m *TaskUpdateResponse) Reset() { *m = TaskUpdateResponse{} } func (m *TaskUpdateResponse) String() string { return proto.CompactTextString(m) } func (*TaskUpdateResponse) ProtoMessage() {} func (*TaskUpdateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{4} + return fileDescriptor_task_service_d215da243254e00c, []int{5} } func (m *TaskUpdateResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskUpdateResponse.Unmarshal(m, b) @@ -275,7 +337,7 @@ func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } func (*AckTaskRequest) ProtoMessage() {} func (*AckTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{5} + return fileDescriptor_task_service_d215da243254e00c, []int{6} } func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) @@ -320,7 +382,7 @@ func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } func (*AckTaskResponse) ProtoMessage() {} func (*AckTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{6} + return fileDescriptor_task_service_d215da243254e00c, []int{7} } func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) @@ -359,7 +421,7 @@ func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } func (*AddLogRequest) ProtoMessage() {} func (*AddLogRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{7} + return fileDescriptor_task_service_d215da243254e00c, []int{8} } func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) @@ -404,7 +466,7 @@ func (m *TaskId) Reset() { *m = TaskId{} } func (m *TaskId) String() string { return proto.CompactTextString(m) } func (*TaskId) ProtoMessage() {} func (*TaskId) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{8} + return fileDescriptor_task_service_d215da243254e00c, []int{9} } func (m *TaskId) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskId.Unmarshal(m, b) @@ -442,7 +504,7 @@ func (m *GetLogsResponse) Reset() { *m = GetLogsResponse{} } func (m *GetLogsResponse) String() string { return proto.CompactTextString(m) } func (*GetLogsResponse) ProtoMessage() {} func (*GetLogsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_cfa360f15b9a9291, []int{9} + return fileDescriptor_task_service_d215da243254e00c, []int{10} } func (m *GetLogsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetLogsResponse.Unmarshal(m, b) @@ -471,6 +533,7 @@ func (m *GetLogsResponse) GetLogs() []*model.TaskExecLog { func init() { proto.RegisterType((*PollRequest)(nil), "com.netflix.conductor.grpc.PollRequest") + proto.RegisterType((*StreamingPollRequest)(nil), "com.netflix.conductor.grpc.StreamingPollRequest") proto.RegisterType((*TasksInProgressRequest)(nil), "com.netflix.conductor.grpc.TasksInProgressRequest") proto.RegisterType((*TasksInProgressResponse)(nil), "com.netflix.conductor.grpc.TasksInProgressResponse") proto.RegisterType((*PendingTaskRequest)(nil), "com.netflix.conductor.grpc.PendingTaskRequest") @@ -531,7 +594,7 @@ func (c *taskServiceClient) PollStream(ctx context.Context, opts ...grpc.CallOpt } type TaskService_PollStreamClient interface { - Send(*PollRequest) error + Send(*StreamingPollRequest) error Recv() (*model.Task, error) grpc.ClientStream } @@ -540,7 +603,7 @@ type taskServicePollStreamClient struct { grpc.ClientStream } -func (x *taskServicePollStreamClient) Send(m *PollRequest) error { +func (x *taskServicePollStreamClient) Send(m *StreamingPollRequest) error { return x.ClientStream.SendMsg(m) } @@ -646,7 +709,7 @@ func _TaskService_PollStream_Handler(srv interface{}, stream grpc.ServerStream) type TaskService_PollStreamServer interface { Send(*model.Task) error - Recv() (*PollRequest, error) + Recv() (*StreamingPollRequest, error) grpc.ServerStream } @@ -658,8 +721,8 @@ func (x *taskServicePollStreamServer) Send(m *model.Task) error { return x.ServerStream.SendMsg(m) } -func (x *taskServicePollStreamServer) Recv() (*PollRequest, error) { - m := new(PollRequest) +func (x *taskServicePollStreamServer) Recv() (*StreamingPollRequest, error) { + m := new(StreamingPollRequest) if err := x.ServerStream.RecvMsg(m); err != nil { return nil, err } @@ -819,51 +882,53 @@ var _TaskService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_cfa360f15b9a9291) -} - -var fileDescriptor_task_service_cfa360f15b9a9291 = []byte{ - // 664 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0xdd, 0x4f, 0x13, 0x4f, - 0x14, 0x4d, 0x7f, 0x85, 0x02, 0xb7, 0xe1, 0x07, 0x99, 0x98, 0xb6, 0x2e, 0x31, 0xc2, 0xfa, 0x40, - 0xd5, 0x38, 0x6b, 0xe0, 0xc1, 0x68, 0x7c, 0x01, 0x03, 0xa4, 0x42, 0xb4, 0x59, 0x30, 0x46, 0x62, - 0xd2, 0x6c, 0x77, 0x6f, 0x97, 0x4d, 0x77, 0x77, 0xd6, 0x99, 0xa9, 0xd0, 0x07, 0xfd, 0x63, 0xfc, - 0x4b, 0xcd, 0x7c, 0xf4, 0x43, 0x3e, 0x5a, 0x4c, 0x7c, 0xdb, 0x39, 0x77, 0xee, 0xb9, 0xe7, 0xce, - 0xdc, 0x33, 0x0b, 0xf5, 0x98, 0x17, 0xa1, 0x27, 0x03, 0xd1, 0xef, 0x08, 0xe4, 0xdf, 0x93, 0x10, - 0x69, 0xc1, 0x99, 0x64, 0xc4, 0x09, 0x59, 0x46, 0x73, 0x94, 0xbd, 0x34, 0xb9, 0xa2, 0x21, 0xcb, - 0xa3, 0x41, 0x28, 0x19, 0xa7, 0x6a, 0xbb, 0xb3, 0x11, 0x33, 0x16, 0xa7, 0xe8, 0xe9, 0x9d, 0xdd, - 0x41, 0xcf, 0xc3, 0xac, 0x90, 0x43, 0x93, 0xe8, 0xd4, 0x33, 0x16, 0x61, 0xaa, 0x29, 0xf1, 0x0a, - 0xc3, 0x94, 0xc5, 0x36, 0x50, 0x9b, 0x04, 0x38, 0x8a, 0x41, 0x2a, 0x2d, 0xbe, 0x3e, 0xc1, 0x0d, - 0xe2, 0xfe, 0x84, 0x6a, 0x9b, 0xa5, 0xa9, 0x8f, 0xdf, 0x06, 0x28, 0x24, 0xd9, 0x80, 0x15, 0x2d, - 0x50, 0x0e, 0x0b, 0x6c, 0x94, 0x36, 0x4b, 0xcd, 0x15, 0x7f, 0x59, 0x01, 0x67, 0xc3, 0x02, 0x55, - 0xf0, 0x92, 0xf1, 0x3e, 0xf2, 0x4e, 0x12, 0x35, 0xfe, 0x33, 0x41, 0x03, 0xb4, 0x22, 0x52, 0x83, - 0x4a, 0xc4, 0xb2, 0x20, 0xc9, 0x1b, 0x65, 0x1d, 0xb1, 0x2b, 0xf2, 0x08, 0x40, 0x33, 0x86, 0x6c, - 0x90, 0xcb, 0xc6, 0xc2, 0x66, 0xa9, 0xb9, 0xe8, 0xeb, 0x1a, 0xef, 0x14, 0xe0, 0x5e, 0x40, 0xed, - 0x2c, 0x10, 0x7d, 0xd1, 0xca, 0xdb, 0x9c, 0xc5, 0x1c, 0x85, 0xb8, 0xaf, 0x14, 0x21, 0x03, 0x2e, - 0x3b, 0x7d, 0x1c, 0x8e, 0xa4, 0x68, 0xe0, 0x18, 0x87, 0xe4, 0x01, 0x2c, 0x9a, 0x6a, 0x65, 0x5d, - 0xcd, 0x2c, 0x5c, 0x1f, 0xea, 0x37, 0x2a, 0x89, 0x82, 0xe5, 0x02, 0xc9, 0x2b, 0x58, 0x54, 0xcc, - 0xa2, 0x51, 0xda, 0x2c, 0x37, 0xab, 0x3b, 0x5b, 0xf4, 0xf6, 0x0b, 0xd1, 0x27, 0x46, 0x15, 0x89, - 0x6f, 0xf6, 0xbb, 0x5f, 0x80, 0xb4, 0x31, 0x8f, 0x92, 0x3c, 0xd6, 0xa8, 0x55, 0xfe, 0x18, 0xaa, - 0xea, 0x58, 0x7a, 0x29, 0xbb, 0x54, 0x27, 0x65, 0xb4, 0xc3, 0x08, 0x6a, 0x45, 0xc4, 0x85, 0x55, - 0xdd, 0x1a, 0xc7, 0x5e, 0x27, 0x0f, 0x32, 0xb4, 0x1d, 0x54, 0xa5, 0x26, 0xe9, 0x7d, 0x08, 0x32, - 0x74, 0x5f, 0x00, 0x51, 0x9c, 0x9f, 0x8a, 0x28, 0x90, 0x38, 0x56, 0x5a, 0x87, 0x25, 0x9d, 0x39, - 0xa6, 0xad, 0xa8, 0x65, 0x2b, 0x72, 0x0f, 0xe1, 0xff, 0xbd, 0xb0, 0x3f, 0xad, 0xe2, 0xae, 0xad, - 0x33, 0xaf, 0xd1, 0x7d, 0x02, 0x6b, 0x63, 0x1e, 0x5b, 0x73, 0x1d, 0xca, 0x41, 0xd8, 0xd7, 0x24, - 0xcb, 0xbe, 0xfa, 0x74, 0xdf, 0xc0, 0xea, 0x5e, 0x14, 0x9d, 0xb0, 0x78, 0x6e, 0xad, 0x75, 0x28, - 0xa7, 0x2c, 0xb6, 0x55, 0xd4, 0xa7, 0xbb, 0x05, 0x95, 0x33, 0x13, 0xbb, 0xb3, 0x97, 0x8f, 0xb0, - 0x76, 0x84, 0xf2, 0x84, 0xc5, 0x93, 0x1b, 0x7a, 0x0b, 0x0b, 0x29, 0x8b, 0x47, 0x17, 0xd4, 0x9c, - 0x7b, 0x41, 0x07, 0x57, 0x18, 0x2a, 0x7d, 0x3a, 0x6b, 0xe7, 0x57, 0x05, 0xaa, 0x0a, 0x3d, 0x35, - 0xb6, 0x23, 0x3e, 0x2c, 0xa8, 0xa1, 0x27, 0xdb, 0xf4, 0x6e, 0xe7, 0xd1, 0x29, 0x5b, 0x38, 0xf3, - 0x27, 0x82, 0x7c, 0x05, 0x50, 0x19, 0xa7, 0x92, 0x63, 0x90, 0xfd, 0x4b, 0xe6, 0x66, 0xe9, 0x65, - 0x89, 0xfc, 0x00, 0x72, 0x84, 0xf2, 0xda, 0xfc, 0x92, 0x9d, 0x59, 0x55, 0x6e, 0xb7, 0x95, 0xb3, - 0xfb, 0x57, 0x39, 0xf6, 0xf8, 0x73, 0x78, 0x78, 0x84, 0x72, 0x6a, 0xd4, 0x0f, 0x19, 0xff, 0x6c, - 0xe7, 0x99, 0xd0, 0x99, 0xbd, 0xde, 0xb0, 0xc7, 0x7d, 0x0e, 0x13, 0x01, 0xcc, 0xe0, 0xeb, 0xd5, - 0xf6, 0x7c, 0x3f, 0xea, 0x47, 0xce, 0xa1, 0xf3, 0x7a, 0xbb, 0xe6, 0xa6, 0x2e, 0x2c, 0xd9, 0x61, - 0x27, 0xcf, 0x66, 0xa5, 0xfe, 0xe9, 0x2c, 0xe7, 0xf9, 0xbd, 0xf6, 0xda, 0x1a, 0xc7, 0x50, 0x31, - 0x5e, 0x21, 0x4f, 0x67, 0xa6, 0x4d, 0xfb, 0xc9, 0xa9, 0x51, 0xf3, 0xec, 0xd3, 0xd1, 0xb3, 0x4f, - 0x0f, 0xd4, 0xb3, 0x4f, 0xce, 0x61, 0xc9, 0x3a, 0x83, 0xb8, 0xf3, 0x7a, 0x6d, 0x45, 0xb3, 0x85, - 0x5e, 0xb3, 0xd8, 0xfe, 0xfb, 0xfd, 0xd5, 0x29, 0x8f, 0xb4, 0xbb, 0xe7, 0xaf, 0xe3, 0x44, 0x5e, - 0x0c, 0xba, 0x8a, 0xc3, 0xb3, 0x1c, 0xde, 0x98, 0xc3, 0x0b, 0xd3, 0x04, 0x73, 0xe9, 0xc5, 0x4c, - 0xff, 0xd5, 0x26, 0xb8, 0x5a, 0x76, 0x2b, 0x5a, 0xf7, 0xee, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, - 0x4b, 0x89, 0xdb, 0xbb, 0xf3, 0x06, 0x00, 0x00, + proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_d215da243254e00c) +} + +var fileDescriptor_task_service_d215da243254e00c = []byte{ + // 701 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x4f, 0x4f, 0xdb, 0x4a, + 0x10, 0x97, 0x5f, 0x48, 0x80, 0x89, 0x78, 0xa0, 0x15, 0x4a, 0xf2, 0xcc, 0xe1, 0x81, 0xdf, 0xe1, + 0xa5, 0xad, 0x6a, 0x23, 0x38, 0x54, 0xad, 0x7a, 0x01, 0x09, 0x50, 0x0a, 0x6a, 0x23, 0x43, 0x55, + 0x95, 0x4b, 0xe4, 0xac, 0x27, 0x8b, 0x15, 0xdb, 0xeb, 0xae, 0x37, 0x85, 0x1c, 0xfa, 0x89, 0xfa, + 0x51, 0xfa, 0xa5, 0xaa, 0xdd, 0x75, 0xfe, 0x94, 0x3f, 0x4e, 0x7a, 0xe8, 0xcd, 0x33, 0xb3, 0xf3, + 0x9b, 0xf9, 0xcd, 0xce, 0xcf, 0x0b, 0x4d, 0x26, 0x32, 0xea, 0xc9, 0x20, 0x1f, 0xf6, 0x72, 0x14, + 0x5f, 0x23, 0x8a, 0x6e, 0x26, 0xb8, 0xe4, 0xc4, 0xa6, 0x3c, 0x71, 0x53, 0x94, 0x83, 0x38, 0xba, + 0x73, 0x29, 0x4f, 0xc3, 0x11, 0x95, 0x5c, 0xb8, 0xea, 0xb8, 0xbd, 0xc3, 0x38, 0x67, 0x31, 0x7a, + 0xfa, 0x64, 0x7f, 0x34, 0xf0, 0x30, 0xc9, 0xe4, 0xd8, 0x24, 0xda, 0xcd, 0x84, 0x87, 0x18, 0x6b, + 0x48, 0xbc, 0x43, 0x1a, 0x73, 0x56, 0x04, 0x1a, 0xb3, 0x80, 0xc0, 0x7c, 0x14, 0xcb, 0xc2, 0xbf, + 0x35, 0xf3, 0x1b, 0x8f, 0xd3, 0x83, 0x7a, 0x97, 0xc7, 0xb1, 0x8f, 0x5f, 0x46, 0x98, 0x4b, 0xb2, + 0x03, 0xeb, 0xba, 0x41, 0x39, 0xce, 0xb0, 0x65, 0xed, 0x5a, 0xed, 0x75, 0x7f, 0x4d, 0x39, 0xae, + 0xc6, 0x19, 0xaa, 0xe0, 0x2d, 0x17, 0x43, 0x14, 0xbd, 0x28, 0x6c, 0xfd, 0x65, 0x82, 0xc6, 0xd1, + 0x09, 0x49, 0x03, 0x6a, 0x21, 0x4f, 0x82, 0x28, 0x6d, 0x55, 0x74, 0xa4, 0xb0, 0x9c, 0x1f, 0x16, + 0x6c, 0x5f, 0x4a, 0x81, 0x41, 0x12, 0xa5, 0xec, 0x8f, 0x96, 0x22, 0x36, 0xac, 0xd1, 0x20, 0x0b, + 0x68, 0x24, 0xc7, 0xad, 0x95, 0x5d, 0xab, 0x5d, 0xf5, 0xa7, 0x36, 0x39, 0x81, 0x75, 0xca, 0x93, + 0x2c, 0x46, 0x89, 0x61, 0xab, 0xba, 0x5b, 0x69, 0xd7, 0x0f, 0xfe, 0x77, 0x1f, 0x9f, 0xbb, 0x1e, + 0x8c, 0x7b, 0x15, 0xe4, 0x43, 0x5f, 0xcf, 0xce, 0x9f, 0x65, 0x3a, 0x37, 0xd0, 0x50, 0x81, 0xbc, + 0x93, 0x76, 0x05, 0x67, 0x02, 0xf3, 0x7c, 0x59, 0x3a, 0xb9, 0x0c, 0x84, 0xec, 0x0d, 0x71, 0x3c, + 0xa1, 0xa3, 0x1d, 0xe7, 0x38, 0x26, 0xdb, 0x50, 0xa5, 0x7c, 0x94, 0x4a, 0xcd, 0xa6, 0xea, 0x1b, + 0xc3, 0xf1, 0xa1, 0xf9, 0xa0, 0x52, 0x9e, 0xf1, 0x34, 0x47, 0xf2, 0x0a, 0xaa, 0x0a, 0x39, 0x6f, + 0x59, 0x9a, 0xc7, 0xde, 0x62, 0x1e, 0xe6, 0xbc, 0xf3, 0x19, 0x48, 0x17, 0xd3, 0x30, 0x4a, 0x99, + 0x61, 0x67, 0x3a, 0xff, 0x17, 0xea, 0x6a, 0xb4, 0x83, 0x98, 0xdf, 0xaa, 0x69, 0x9b, 0xde, 0x61, + 0xe2, 0xea, 0x84, 0xc4, 0x81, 0x0d, 0x4d, 0x4d, 0xe0, 0xa0, 0x97, 0x06, 0x09, 0x16, 0x0c, 0xea, + 0x52, 0x83, 0x0c, 0xde, 0x07, 0x09, 0x3a, 0x2f, 0x81, 0x28, 0xcc, 0x8f, 0x59, 0x18, 0x48, 0x9c, + 0x76, 0xda, 0x84, 0x55, 0x9d, 0x39, 0x85, 0xad, 0x29, 0xb3, 0x13, 0x3a, 0xa7, 0xf0, 0xf7, 0x11, + 0x1d, 0xce, 0x77, 0xf1, 0xd4, 0xd1, 0xd2, 0x55, 0x70, 0xfe, 0x83, 0xcd, 0x29, 0x4e, 0x51, 0x73, + 0x0b, 0x2a, 0x01, 0x1d, 0x6a, 0x90, 0x35, 0x5f, 0x7d, 0x3a, 0x6f, 0x60, 0xe3, 0x28, 0x0c, 0x2f, + 0x38, 0x5b, 0x58, 0x6b, 0x0b, 0x2a, 0x31, 0x67, 0x45, 0x15, 0xf5, 0xe9, 0xec, 0x41, 0xed, 0xca, + 0xc4, 0x9e, 0xe4, 0xf2, 0x01, 0x36, 0xcf, 0x50, 0x5e, 0x70, 0x36, 0xbb, 0xa1, 0xb7, 0xb0, 0x12, + 0x73, 0x36, 0xb9, 0xa0, 0xf6, 0xc2, 0x0b, 0x3a, 0xb9, 0x43, 0xaa, 0xfa, 0xd3, 0x59, 0x07, 0xdf, + 0x6b, 0x50, 0x57, 0xde, 0x4b, 0xf3, 0x97, 0x20, 0x3e, 0xac, 0x28, 0xe1, 0x90, 0xa7, 0x16, 0x56, + 0xfd, 0x28, 0xdc, 0x39, 0x69, 0xd9, 0x8b, 0x37, 0x82, 0x20, 0x80, 0xca, 0x30, 0xca, 0x24, 0xfb, + 0x65, 0xc8, 0x8f, 0xa9, 0x77, 0x89, 0x12, 0x6d, 0x6b, 0xdf, 0x22, 0xdf, 0x80, 0x9c, 0xa1, 0xbc, + 0xb7, 0xc8, 0xe4, 0xa0, 0xac, 0xdc, 0xe3, 0xfa, 0xb2, 0x0f, 0x7f, 0x2b, 0xa7, 0xb8, 0x87, 0x14, + 0xfe, 0x39, 0x43, 0x39, 0xb7, 0xf3, 0xa7, 0x5c, 0x7c, 0x2a, 0x16, 0x9b, 0xb8, 0xa5, 0xe3, 0x7c, + 0xa0, 0x93, 0x25, 0xa7, 0x6a, 0x14, 0xa0, 0xad, 0x65, 0x7f, 0x30, 0xb6, 0xbb, 0x88, 0xdb, 0x3d, + 0x59, 0xf5, 0x61, 0xb5, 0xd8, 0x7a, 0xf2, 0xbc, 0x2c, 0xf5, 0x57, 0x89, 0xd9, 0x2f, 0x96, 0x3a, + 0x5b, 0xd4, 0x38, 0x87, 0x9a, 0x11, 0x0d, 0x79, 0x56, 0x9a, 0x36, 0x2f, 0x2c, 0xbb, 0xe1, 0x9a, + 0xe7, 0xca, 0x9d, 0x3c, 0x57, 0xee, 0x89, 0x7a, 0xae, 0xc8, 0x35, 0xac, 0x16, 0x12, 0x21, 0xce, + 0x22, 0xae, 0x9d, 0xb0, 0xbc, 0xd1, 0x7b, 0x5a, 0x3b, 0x7e, 0x77, 0xbc, 0x31, 0x27, 0x96, 0x6e, + 0xff, 0xfa, 0x35, 0x8b, 0xe4, 0xcd, 0xa8, 0xaf, 0x30, 0xbc, 0x02, 0xc3, 0x9b, 0x62, 0x78, 0x34, + 0x8e, 0x30, 0x95, 0x1e, 0xe3, 0xfa, 0x35, 0x9e, 0xf9, 0x95, 0xd9, 0xaf, 0xe9, 0xbe, 0x0f, 0x7f, + 0x06, 0x00, 0x00, 0xff, 0xff, 0x57, 0x8d, 0x4b, 0x26, 0xab, 0x07, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/eventexecution.pb.go b/client/gogrpc/conductor/model/eventexecution.pb.go index 855847c123..71aa03f168 100644 --- a/client/gogrpc/conductor/model/eventexecution.pb.go +++ b/client/gogrpc/conductor/model/eventexecution.pb.go @@ -45,7 +45,7 @@ func (x EventExecution_Status) String() string { return proto.EnumName(EventExecution_Status_name, int32(x)) } func (EventExecution_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_e6ddf9d2dc999f89, []int{0, 0} + return fileDescriptor_eventexecution_493ae02b3cb3fbdc, []int{0, 0} } type EventExecution struct { @@ -65,7 +65,7 @@ func (m *EventExecution) Reset() { *m = EventExecution{} } func (m *EventExecution) String() string { return proto.CompactTextString(m) } func (*EventExecution) ProtoMessage() {} func (*EventExecution) Descriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_e6ddf9d2dc999f89, []int{0} + return fileDescriptor_eventexecution_493ae02b3cb3fbdc, []int{0} } func (m *EventExecution) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventExecution.Unmarshal(m, b) @@ -141,10 +141,10 @@ func init() { } func init() { - proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_e6ddf9d2dc999f89) + proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_493ae02b3cb3fbdc) } -var fileDescriptor_eventexecution_e6ddf9d2dc999f89 = []byte{ +var fileDescriptor_eventexecution_493ae02b3cb3fbdc = []byte{ // 393 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x41, 0x8f, 0x94, 0x30, 0x18, 0x15, 0xd8, 0x61, 0x9d, 0x8f, 0x38, 0x92, 0xc6, 0x98, 0x66, 0xd4, 0x84, 0xec, 0x89, 0x83, diff --git a/client/gogrpc/conductor/model/task.pb.go b/client/gogrpc/conductor/model/task.pb.go index 0dc6dd26bd..617631eddb 100644 --- a/client/gogrpc/conductor/model/task.pb.go +++ b/client/gogrpc/conductor/model/task.pb.go @@ -60,7 +60,7 @@ func (x Task_Status) String() string { return proto.EnumName(Task_Status_name, int32(x)) } func (Task_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_task_c6cd754250bb9501, []int{0, 0} + return fileDescriptor_task_64f7e1feb6072ed5, []int{0, 0} } type Task struct { @@ -100,7 +100,7 @@ func (m *Task) Reset() { *m = Task{} } func (m *Task) String() string { return proto.CompactTextString(m) } func (*Task) ProtoMessage() {} func (*Task) Descriptor() ([]byte, []int) { - return fileDescriptor_task_c6cd754250bb9501, []int{0} + return fileDescriptor_task_64f7e1feb6072ed5, []int{0} } func (m *Task) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Task.Unmarshal(m, b) @@ -316,9 +316,9 @@ func init() { proto.RegisterEnum("com.netflix.conductor.proto.Task_Status", Task_Status_name, Task_Status_value) } -func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_c6cd754250bb9501) } +func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_64f7e1feb6072ed5) } -var fileDescriptor_task_c6cd754250bb9501 = []byte{ +var fileDescriptor_task_64f7e1feb6072ed5 = []byte{ // 899 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x6f, 0xdb, 0x36, 0x14, 0x9d, 0xf2, 0xe1, 0x8f, 0xab, 0xd8, 0x56, 0x99, 0xc4, 0x61, 0x9c, 0x0d, 0x35, 0x32, 0x74, diff --git a/client/gogrpc/conductor/model/taskdef.pb.go b/client/gogrpc/conductor/model/taskdef.pb.go index 6ac3ddbd1c..073a3e7a16 100644 --- a/client/gogrpc/conductor/model/taskdef.pb.go +++ b/client/gogrpc/conductor/model/taskdef.pb.go @@ -39,7 +39,7 @@ func (x TaskDef_RetryLogic) String() string { return proto.EnumName(TaskDef_RetryLogic_name, int32(x)) } func (TaskDef_RetryLogic) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_eb59d906db8eb198, []int{0, 0} + return fileDescriptor_taskdef_9dd365e0d8e63269, []int{0, 0} } type TaskDef_TimeoutPolicy int32 @@ -65,7 +65,7 @@ func (x TaskDef_TimeoutPolicy) String() string { return proto.EnumName(TaskDef_TimeoutPolicy_name, int32(x)) } func (TaskDef_TimeoutPolicy) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_eb59d906db8eb198, []int{0, 1} + return fileDescriptor_taskdef_9dd365e0d8e63269, []int{0, 1} } type TaskDef struct { @@ -90,7 +90,7 @@ func (m *TaskDef) Reset() { *m = TaskDef{} } func (m *TaskDef) String() string { return proto.CompactTextString(m) } func (*TaskDef) ProtoMessage() {} func (*TaskDef) Descriptor() ([]byte, []int) { - return fileDescriptor_taskdef_eb59d906db8eb198, []int{0} + return fileDescriptor_taskdef_9dd365e0d8e63269, []int{0} } func (m *TaskDef) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskDef.Unmarshal(m, b) @@ -201,9 +201,9 @@ func init() { proto.RegisterEnum("com.netflix.conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) } -func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_eb59d906db8eb198) } +func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_9dd365e0d8e63269) } -var fileDescriptor_taskdef_eb59d906db8eb198 = []byte{ +var fileDescriptor_taskdef_9dd365e0d8e63269 = []byte{ // 566 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x4f, 0x6f, 0xd3, 0x40, 0x10, 0xc5, 0xeb, 0xa6, 0x69, 0xc9, 0x84, 0xa6, 0x66, 0x23, 0x8a, 0x55, 0x40, 0x58, 0xbd, 0xe0, diff --git a/client/gogrpc/conductor/model/taskresult.pb.go b/client/gogrpc/conductor/model/taskresult.pb.go index c71164ce53..c4f12c1682 100644 --- a/client/gogrpc/conductor/model/taskresult.pb.go +++ b/client/gogrpc/conductor/model/taskresult.pb.go @@ -45,7 +45,7 @@ func (x TaskResult_Status) String() string { return proto.EnumName(TaskResult_Status_name, int32(x)) } func (TaskResult_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskresult_d94b981b2ba0d45d, []int{0, 0} + return fileDescriptor_taskresult_544d5e3612411ce1, []int{0, 0} } type TaskResult struct { @@ -65,7 +65,7 @@ func (m *TaskResult) Reset() { *m = TaskResult{} } func (m *TaskResult) String() string { return proto.CompactTextString(m) } func (*TaskResult) ProtoMessage() {} func (*TaskResult) Descriptor() ([]byte, []int) { - return fileDescriptor_taskresult_d94b981b2ba0d45d, []int{0} + return fileDescriptor_taskresult_544d5e3612411ce1, []int{0} } func (m *TaskResult) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskResult.Unmarshal(m, b) @@ -140,9 +140,9 @@ func init() { proto.RegisterEnum("com.netflix.conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) } -func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_d94b981b2ba0d45d) } +func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_544d5e3612411ce1) } -var fileDescriptor_taskresult_d94b981b2ba0d45d = []byte{ +var fileDescriptor_taskresult_544d5e3612411ce1 = []byte{ // 455 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6f, 0xd3, 0x30, 0x14, 0xc6, 0xc9, 0xba, 0x65, 0xf4, 0x15, 0x58, 0x65, 0x8d, 0x2e, 0xda, 0x38, 0x54, 0x3b, 0xf5, diff --git a/client/gogrpc/conductor/model/workflow.pb.go b/client/gogrpc/conductor/model/workflow.pb.go index 764fc0b108..c2e30ab7b2 100644 --- a/client/gogrpc/conductor/model/workflow.pb.go +++ b/client/gogrpc/conductor/model/workflow.pb.go @@ -51,7 +51,7 @@ func (x Workflow_WorkflowStatus) String() string { return proto.EnumName(Workflow_WorkflowStatus_name, int32(x)) } func (Workflow_WorkflowStatus) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflow_544173e0c7375d2a, []int{0, 0} + return fileDescriptor_workflow_827faed39aedc44b, []int{0, 0} } type Workflow struct { @@ -81,7 +81,7 @@ func (m *Workflow) Reset() { *m = Workflow{} } func (m *Workflow) String() string { return proto.CompactTextString(m) } func (*Workflow) ProtoMessage() {} func (*Workflow) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_544173e0c7375d2a, []int{0} + return fileDescriptor_workflow_827faed39aedc44b, []int{0} } func (m *Workflow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Workflow.Unmarshal(m, b) @@ -228,9 +228,9 @@ func init() { proto.RegisterEnum("com.netflix.conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) } -func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_544173e0c7375d2a) } +func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_827faed39aedc44b) } -var fileDescriptor_workflow_544173e0c7375d2a = []byte{ +var fileDescriptor_workflow_827faed39aedc44b = []byte{ // 688 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, 0x18, 0x77, 0x8c, 0x0d, 0xf6, 0x8c, 0xcd, 0x72, 0x4e, 0xa9, 0x60, 0xe2, 0xc4, 0x98, 0xec, 0x05, diff --git a/client/gogrpc/conductor/model/workflowtask.pb.go b/client/gogrpc/conductor/model/workflowtask.pb.go index 87b06ccd3b..e46ce0a2b1 100644 --- a/client/gogrpc/conductor/model/workflowtask.pb.go +++ b/client/gogrpc/conductor/model/workflowtask.pb.go @@ -63,7 +63,7 @@ func (x WorkflowTask_Type) String() string { return proto.EnumName(WorkflowTask_Type_name, int32(x)) } func (WorkflowTask_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_5168db1c2690070f, []int{0, 0} + return fileDescriptor_workflowtask_24620b75c66c94aa, []int{0, 0} } type WorkflowTask struct { @@ -94,7 +94,7 @@ func (m *WorkflowTask) Reset() { *m = WorkflowTask{} } func (m *WorkflowTask) String() string { return proto.CompactTextString(m) } func (*WorkflowTask) ProtoMessage() {} func (*WorkflowTask) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_5168db1c2690070f, []int{0} + return fileDescriptor_workflowtask_24620b75c66c94aa, []int{0} } func (m *WorkflowTask) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowTask.Unmarshal(m, b) @@ -251,7 +251,7 @@ func (m *WorkflowTask_WorkflowTaskList) Reset() { *m = WorkflowTask_Work func (m *WorkflowTask_WorkflowTaskList) String() string { return proto.CompactTextString(m) } func (*WorkflowTask_WorkflowTaskList) ProtoMessage() {} func (*WorkflowTask_WorkflowTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_5168db1c2690070f, []int{0, 0} + return fileDescriptor_workflowtask_24620b75c66c94aa, []int{0, 0} } func (m *WorkflowTask_WorkflowTaskList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Unmarshal(m, b) @@ -287,10 +287,10 @@ func init() { } func init() { - proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_5168db1c2690070f) + proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_24620b75c66c94aa) } -var fileDescriptor_workflowtask_5168db1c2690070f = []byte{ +var fileDescriptor_workflowtask_24620b75c66c94aa = []byte{ // 774 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xef, 0x6b, 0xdb, 0x3a, 0x14, 0x7d, 0x6e, 0x7e, 0x5f, 0xa7, 0xa9, 0xab, 0xd7, 0xbe, 0x9a, 0xbc, 0xf7, 0xb6, 0xd0, 0x7d, diff --git a/client/gogrpc/conductor/worker.go b/client/gogrpc/conductor/worker.go index 25b234f912..ce9970a579 100644 --- a/client/gogrpc/conductor/worker.go +++ b/client/gogrpc/conductor/worker.go @@ -5,32 +5,84 @@ import ( "fmt" "os" "runtime" + "sync" + "sync/atomic" "time" pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" "github.com/netflix/conductor/client/gogrpc/conductor/model" ) +// An Executor is a struct that executes the logic required to resolve +// a task. Each Worker instance uses an Executor to run the polled tasks. type Executor interface { - Execute(*model.Task) (*model.TaskResult, error) + // Execute attempt to resolve the given Task and returns a TaskResult + // with its output. The given Context carries a deadline which must be + // enforced by the implementation. + // This function will be called by the Worker for each incoming Task, + // and must be threadsafe as it can be called by several goroutines + // concurrently. + Execute(context.Context, *model.Task) (*model.TaskResult, error) + + // ConnectionError is called by a Worker whenever there's an error with + // a GRPC connection. The GRPC error is passed in as its only argument. + // If this function returns nil, the Worker will continue retrying the + // connection; if it returns a non-nill error, the Worker will stop its + // execution and return the given error as the result of the Worker.Run + // function. ConnectionError(error) error } +// A Worker uses a TaskClient to poll the Conductor server for new tasks and +// executes them using an Executor instance, returning the result of the task +// to the upstream server. +// The Worker struct must be created manually with the desired settings, and then +// ran with Worker.Run. +// Client implementations usually run a single Worker per process, or one worker per Task Type +// if a process needs to execute tasks of different types. The Concurrency +// field allows the worker to execute tasks concurrently in several goroutines. type Worker struct { - TaskType string - Identifier string + // TaskType is the identifier for the type of tasks that this worker can + // execute. This will be send to Conductor when polling for new tasks. + TaskType string + + // TaskTimeout is the total duration that a task will be executed for. This + // includes the time required to poll, execute and return the task's results. + // If not set, tasks will not timeout. + TaskTimeout time.Duration + + // Identifier is an unique identifier for this worker. If not set, it defaults + // to the local hostname. + Identifier string + + // Concurrency is the amount of goroutines that wil poll for tasks and execute + // them concurrently. If not set, it defaults to GOMAXPROCS, a sensible default. Concurrency int - Executor Executor - Client TasksClient - tasks chan *model.Task - results chan *model.TaskResult - shutdown chan struct{} + // Executor is an instance of an Executor that will actually run the logic required + // for each task. See conductor.Executor. + Executor Executor + + // Client is an instance of a conductor.Client that implements a Task service. + // See conductor.Client + Client TasksClient + + waitThreads sync.WaitGroup + active int32 // atomic + shutdown chan struct{} + shutdownFlag sync.Once + result error } +// Run executes the main loop of the Worker, spawning several gorutines to poll and +// resolve tasks from a Conductor server. +// This is a blocking call that will not return until Worker.Shutdown is called from +// another goroutine. When shutting down cleanly, this function returns nil; otherwise +// an error is returned if there's been a problem with the GRPC connection and the Worker +// cannot continue running. func (worker *Worker) Run() error { - if worker.Identifier == "" { - return fmt.Errorf("conductor: missing field 'Identifier'") + if worker.TaskType == "" { + return fmt.Errorf("conductor: missing field 'TaskType'") } if worker.Executor == nil { return fmt.Errorf("conductor: missing field 'Executor'") @@ -49,103 +101,76 @@ func (worker *Worker) Run() error { worker.Concurrency = runtime.GOMAXPROCS(0) } - worker.tasks = make(chan *model.Task, worker.Concurrency) - worker.results = make(chan *model.TaskResult, worker.Concurrency) + worker.active = 0 + worker.result = nil worker.shutdown = make(chan struct{}) + worker.waitThreads.Add(worker.Concurrency) for i := 0; i < worker.Concurrency; i++ { go worker.thread() } - for { - err := worker.run() - if err != nil { - err = worker.Executor.ConnectionError(err) - if err != nil { - worker.Shutdown() - return err - } - } - } + worker.waitThreads.Wait() + return worker.result } +// Shutdown stops this worker gracefully. This function is thread-safe and may +// be called from any goroutine. Only the first call to Shutdown will have +// an effect. func (worker *Worker) Shutdown() { - close(worker.tasks) - close(worker.shutdown) - worker.Client.Shutdown() + worker.shutdownOnce(nil) } -func (worker *Worker) getRequest(pending int) *pb.PollRequest { - return &pb.PollRequest{ - TaskType: worker.TaskType, - WorkerId: worker.Identifier, - TaskCount: int32(pending), - } +func (worker *Worker) shutdownOnce(err error) { + worker.shutdownFlag.Do(func() { + worker.result = err + close(worker.shutdown) + worker.waitThreads.Wait() + worker.Client.Shutdown() + }) } -func (worker *Worker) thread() { - for task := range worker.tasks { - result, err := worker.Executor.Execute(task) - if err == nil { - // TODO: what if the task failed? - worker.results <- result - } +func (worker *Worker) onError(err error) { + userErr := worker.Executor.ConnectionError(err) + if userErr != nil { + worker.shutdownOnce(userErr) } } -func (worker *Worker) run() error { - stream, err := worker.Client.Tasks().PollStream(context.Background()) +func (worker *Worker) runTask(req *pb.PollRequest) error { + ctx, cancel := context.WithTimeout(context.Background(), worker.TaskTimeout) + defer cancel() + + task, err := worker.Client.Tasks().Poll(ctx, req) if err != nil { return err } - defer stream.CloseSend() - - errc := make(chan error) - go func() { - for { - task, err := stream.Recv() - if err != nil { - errc <- err - return - } - worker.tasks <- task - } - }() - - pending := worker.Concurrency - for { - timeout := time.NewTimer(1 * time.Second) + result, err := worker.Executor.Execute(ctx, task) + // TODO: what if the task failed? + if err == nil { + _, err := worker.Client.Tasks().UpdateTask(context.Background(), result) + if err != nil { + return err + } + } + return nil +} - select { - case result := <-worker.results: - _, err := worker.Client.Tasks().UpdateTask(context.Background(), result) - if err != nil { - return err - } - pending-- +func (worker *Worker) thread() { + defer worker.waitThreads.Done() - case err := <-errc: - return err + pollRequest := &pb.PollRequest{ + TaskType: worker.TaskType, + WorkerId: worker.Identifier, + } - case <-worker.shutdown: - return nil - - case <-timeout.C: - err := stream.Send(worker.getRequest(0)) - if err != nil { - return err - } - - default: - if pending > 0 { - err := stream.Send(worker.getRequest(pending)) - if err != nil { - return err - } - pending = 0 - } + for range worker.shutdown { + atomic.AddInt32(&worker.active, 1) + err := worker.runTask(pollRequest) + if err != nil { + worker.onError(err) } - timeout.Stop() + atomic.AddInt32(&worker.active, -1) } } diff --git a/client/gogrpc/conductor/worker_test.go b/client/gogrpc/conductor/worker_test.go index 7bd830f669..152197c2e9 100644 --- a/client/gogrpc/conductor/worker_test.go +++ b/client/gogrpc/conductor/worker_test.go @@ -2,59 +2,83 @@ package conductor import ( "context" + "flag" "fmt" + "io" + "math/rand" + "sync" "testing" + "time" "github.com/golang/protobuf/ptypes/empty" pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" "github.com/netflix/conductor/client/gogrpc/conductor/model" "google.golang.org/grpc" -) -type fakePollStream struct { - grpc.ClientStream - service *fakeTaskService - open bool -} + "github.com/stretchr/testify/assert" +) -func (stream *fakePollStream) Send(req *pb.PollRequest) error { - stream.service.pollRequest(req) - return nil -} +var doTrace = flag.Bool("dotrace", false, "print tracing information") -func (stream *fakePollStream) Recv() (*model.Task, error) { - select { - case task := <-stream.service.pending: - return task, nil - default: - return nil, nil +func trace(format string, args ...interface{}) { + if *doTrace { + fmt.Printf(format, args...) } } type fakeTaskService struct { - pending chan *model.Task + latency time.Duration + shutdown chan struct{} + mu sync.Mutex + completed map[string]bool + result error } -func (s *fakeTaskService) pollRequest(req *pb.PollRequest) { - for i := 0; i < int(req.GetTaskCount()); i++ { - s.pending <- &model.Task{ - TaskType: req.GetTaskType(), - Status: model.Task_SCHEDULED, - } - } +func randomTaskID() string { + return fmt.Sprintf("task-%08x", rand.Int63()) } var ErrNotImplemented = fmt.Errorf("API call not implemented") +func (s *fakeTaskService) newTask(req *pb.PollRequest) (*model.Task, error) { + id := randomTaskID() + + s.mu.Lock() + s.completed[id] = false + s.mu.Unlock() + + return &model.Task{ + TaskType: req.GetTaskType(), + Status: model.Task_SCHEDULED, + TaskId: id, + }, nil +} + +func (s *fakeTaskService) updateTask(res *model.TaskResult) (*pb.TaskUpdateResponse, error) { + id := res.GetTaskId() + + s.mu.Lock() + if _, found := s.completed[id]; !found { + panic("missing task: " + id) + } + s.completed[id] = true + s.mu.Unlock() + + return &pb.TaskUpdateResponse{ + TaskId: id, + }, nil +} + func (s *fakeTaskService) Poll(ctx context.Context, in *pb.PollRequest, opts ...grpc.CallOption) (*model.Task, error) { - return nil, ErrNotImplemented + select { + case <-time.After(s.latency): + return s.newTask(in) + case <-s.shutdown: + return nil, s.result + } } func (s *fakeTaskService) PollStream(ctx context.Context, opts ...grpc.CallOption) (pb.TaskService_PollStreamClient, error) { - return &fakePollStream{ - ClientStream: nil, - service: s, - open: true, - }, nil + return nil, ErrNotImplemented } func (s *fakeTaskService) GetTasksInProgress(ctx context.Context, in *pb.TasksInProgressRequest, opts ...grpc.CallOption) (*pb.TasksInProgressResponse, error) { return nil, ErrNotImplemented @@ -63,7 +87,12 @@ func (s *fakeTaskService) GetPendingTaskForWorkflow(ctx context.Context, in *pb. return nil, ErrNotImplemented } func (s *fakeTaskService) UpdateTask(ctx context.Context, in *model.TaskResult, opts ...grpc.CallOption) (*pb.TaskUpdateResponse, error) { - return nil, ErrNotImplemented + select { + case <-time.After(s.latency): + return s.updateTask(in) + case <-s.shutdown: + return nil, s.result + } } func (s *fakeTaskService) AckTask(ctx context.Context, in *pb.AckTaskRequest, opts ...grpc.CallOption) (*pb.AckTaskResponse, error) { return nil, ErrNotImplemented @@ -83,6 +112,68 @@ func (c *fakeTaskClient) Tasks() pb.TaskServiceClient { return c.tasks } +func (c *fakeTaskClient) forceShutdown(err error) { + c.tasks.result = err + close(c.tasks.shutdown) +} + +func (c *fakeTaskClient) Shutdown() { + c.tasks.result = io.EOF + close(c.tasks.shutdown) +} + +func newFakeTaskClient(latency time.Duration) *fakeTaskClient { + return &fakeTaskClient{ + tasks: &fakeTaskService{ + shutdown: make(chan struct{}), + latency: latency, + }, + } +} + +type slowExecutor struct { + mu sync.Mutex + recv []*model.Task + delay time.Duration +} + +func (exe *slowExecutor) Execute(ctx context.Context, m *model.Task) (*model.TaskResult, error) { + exe.mu.Lock() + exe.recv = append(exe.recv, m) + exe.mu.Unlock() + + time.Sleep(exe.delay) + return &model.TaskResult{ + TaskId: m.GetTaskId(), + Status: model.TaskResult_COMPLETED, + }, nil +} + +func (exe *slowExecutor) ConnectionError(err error) error { + panic(err) +} + func TestWorkerInterface(t *testing.T) { + mock := newFakeTaskClient(200 * time.Millisecond) + exec := &slowExecutor{ + delay: 100 * time.Millisecond, + } + worker := &Worker{ + TaskType: "fake-task", + Concurrency: 4, + Executor: exec, + Client: mock, + } + + time.AfterFunc(1*time.Second, func() { + worker.Shutdown() + }) + + assert.NoError(t, worker.Run()) + + for id, completed := range mock.tasks.completed { + assert.Truef(t, completed, "task %s was not reported as completed", id) + } + assert.Equal(t, len(mock.tasks.completed), len(exec.recv)) } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index 04f693bdad..9c41c85f98 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -52,25 +52,25 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver resp } @Override - public StreamObserver pollStream(StreamObserver observer) { + public StreamObserver pollStream(StreamObserver observer) { final ServerCallStreamObserver responseObserver = (ServerCallStreamObserver) observer; - return new StreamObserver() { - int pending = 0; - + return new StreamObserver() { @Override - public void onNext(TaskServicePb.PollRequest req) { - pending += req.getTaskCount(); - + public void onNext(TaskServicePb.StreamingPollRequest req) { try { - List tasks = taskService.poll( + for (TaskResultPb.TaskResult result : req.getCompletedList()) { + TaskResult task = ProtoMapper.fromProto(result); + taskService.updateTask(task); + } + + List newTasks = taskService.poll( req.getTaskType(), req.getWorkerId(), req.getDomain(), - pending, POLL_TIMEOUT_MS); + req.getCapacity(), POLL_TIMEOUT_MS); - for (Task task : tasks) { + for (Task task : newTasks) { responseObserver.onNext(ProtoMapper.toProto(task)); - pending--; } } catch (Exception e) { responseObserver.onError(e); diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto index 362b4c652d..901d5a4281 100644 --- a/grpc/src/main/proto/grpc/task_service.proto +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -11,7 +11,7 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service TaskService { rpc Poll(PollRequest) returns (com.netflix.conductor.proto.Task); - rpc PollStream(stream PollRequest) returns (stream com.netflix.conductor.proto.Task); + rpc PollStream(stream StreamingPollRequest) returns (stream com.netflix.conductor.proto.Task); rpc GetTasksInProgress(TasksInProgressRequest) returns (TasksInProgressResponse); rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (com.netflix.conductor.proto.Task); rpc UpdateTask(com.netflix.conductor.proto.TaskResult) returns (TaskUpdateResponse); @@ -25,7 +25,14 @@ message PollRequest { string task_type = 1; string worker_id = 2; string domain = 3; - int32 task_count = 4; +} + +message StreamingPollRequest { + string task_type = 1; + string worker_id = 2; + string domain = 3; + int32 capacity = 4; + repeated com.netflix.conductor.proto.TaskResult completed = 5; } message TasksInProgressRequest { From 6cd5a314b8f0efbe70abe7a11c641d81d6873133 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 30 May 2018 17:59:10 +0200 Subject: [PATCH 013/163] grpc-server: Handle all GRPC errors with a helper There is no standard way to actually report Java exceptions to downstream clients, as GRPC is a multi-language framework that supports many different languages, some of which don't have a concept of "exceptions". Some documentation suggests modeling the possible error cases as ProtoBuf messages and converting the Exception into a PB object that can be sent as trailing metadata. This approach doesn't seem ideal for Conductor because we're already reporting the few meaningful errors with their proper error codes (e.g. NOTFOUND for RPC queries that target a missing entity). The rest of the exceptions that may be thrown by the different DAOs are fundamentally internal runtime exceptions. Hence, the most straightforward way to map them to the clients is by returning an INTERNAL error code and packing it with the message of the exception. For debugging purposes, we're also extending the returned error code with the full backtrace of the exception. This handling is done in a newly introduced class, GRPCUtil, which in the future could be extended as to log the exception locally. GitHub Internal: GRPCUtil.onError should also report the exception to Haystack, ideally through an abstracted interface that could be upstreamed. --- .../conductor/grpc/server/GRPCUtil.java | 26 +++++++++++++++++++ .../grpc/server/MetadataServiceImpl.java | 4 +-- .../grpc/server/TaskServiceImpl.java | 12 ++++----- .../grpc/server/WorkflowServiceImpl.java | 26 +++++++++---------- 4 files changed, 47 insertions(+), 21 deletions(-) create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java new file mode 100644 index 0000000000..c60f1c75b5 --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java @@ -0,0 +1,26 @@ +package com.netflix.conductor.grpc.server; + +import io.grpc.Status; +import io.grpc.stub.StreamObserver; + +import java.io.PrintWriter; +import java.io.StringWriter; + +public class GRPCUtil { + private GRPCUtil() {} + + private static String stacktraceToString(Throwable e) { + StringWriter stringWriter = new StringWriter(); + PrintWriter printWriter = new PrintWriter(stringWriter); + e.printStackTrace(printWriter); + return stringWriter.toString(); +} + + public static void onError(StreamObserver response, Throwable t) { + response.onError(Status.INTERNAL + .withDescription(t.getMessage()) + .augmentDescription(stacktraceToString(t)) + .withCause(t) + .asException()); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index e9d35aac14..2c1ac6bbfc 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -30,7 +30,7 @@ public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver service.registerWorkflowDef(ProtoMapper.fromProto(req)); response.onCompleted(); } catch (Exception e) { - response.onError(e); + GRPCUtil.onError(response, e); } } @@ -45,7 +45,7 @@ public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, Stream service.updateWorkflowDef(workflows); response.onCompleted(); } catch (Exception e) { - response.onError(e); + GRPCUtil.onError(response, e); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index 9c41c85f98..7f536ecf17 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -47,7 +47,7 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver resp } response.onCompleted(); } catch (Exception e) { - response.onError(e); + GRPCUtil.onError(response, e); } } @@ -73,7 +73,7 @@ public void onNext(TaskServicePb.StreamingPollRequest req) { responseObserver.onNext(ProtoMapper.toProto(task)); } } catch (Exception e) { - responseObserver.onError(e); + GRPCUtil.onError(observer, e); } } @@ -105,7 +105,7 @@ public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamO response.onNext(builder.build()); response.onCompleted(); } catch (Exception e) { - response.onError(e); + GRPCUtil.onError(response, e); } } @@ -116,7 +116,7 @@ public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, Stre response.onNext(ProtoMapper.toProto(t)); response.onCompleted(); } catch (Exception e) { - response.onError(e); + GRPCUtil.onError(response, e); } } @@ -133,7 +133,7 @@ public void updateTask(TaskResultPb.TaskResult req, StreamObserver Date: Wed, 30 May 2018 18:11:30 +0200 Subject: [PATCH 014/163] grpc-server: Report search results --- .../grpc/server/WorkflowServiceImpl.java | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java index b4566d9363..807d4ba7d8 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java @@ -250,15 +250,20 @@ private void doSearch(boolean searchByTask, SearchPb.SearchRequest req, StreamOb return; } - SearchResult searchResult; + SearchResult search; if (searchByTask) { - searchResult = service.searchWorkflowByTasks(query, freeText, start, size, sort); + search = service.searchWorkflowByTasks(query, freeText, start, size, sort); } else { - searchResult = service.search(query, freeText, start, size, sort); + search = service.search(query, freeText, start, size, sort); } - // TODO - // response.onNext(ProtoMapper.toProto(searchResult)); + response.onNext( + SearchPb.WorkflowSummarySearchResult.newBuilder() + .setTotalHits(search.getTotalHits()) + .addAllResults( + search.getResults().stream().map(ProtoMapper::toProto)::iterator + ).build() + ); response.onCompleted(); } From 36a35bc3927c85437164a277496834b614b6bdb3 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 30 May 2018 18:50:38 +0200 Subject: [PATCH 015/163] protogen: Remove dependency on Guava Guava was only be used for case conversion; we can do the same thing manually. --- protogen/build.gradle | 1 - .../com/netflix/conductor/protogen/Enum.java | 2 +- .../com/netflix/conductor/protogen/Message.java | 16 ++++++++++++++-- .../com/netflix/conductor/protogen/ProtoGen.java | 1 + 4 files changed, 16 insertions(+), 4 deletions(-) diff --git a/protogen/build.gradle b/protogen/build.gradle index 3d5ad1829d..aa6df2376f 100644 --- a/protogen/build.gradle +++ b/protogen/build.gradle @@ -1,6 +1,5 @@ dependencies { compile project(':conductor-common') compile 'com.squareup:javapoet:1.11.1' - compile 'com.google.guava:guava:25.0-jre' compile 'com.github.jknack:handlebars:4.0.6' } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java index 2fe9448ca6..0808eaf0a4 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java @@ -61,7 +61,7 @@ protected EnumField(int index, java.lang.reflect.Field field) { @Override public String getProtoTypeDeclaration() { - return String.format("%s = %d", getName(), getProtoIndex()); + return String.format("%s = %d", getName().toUpperCase(), getProtoIndex()); } } } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java index dc27d17fe4..b11a312d55 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java @@ -1,6 +1,5 @@ package com.netflix.conductor.protogen; -import com.google.common.base.CaseFormat; import com.netflix.conductor.common.annotations.ProtoField; import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.protogen.types.AbstractType; @@ -11,6 +10,8 @@ import javax.lang.model.element.Modifier; import java.util.Set; +import java.util.regex.Matcher; +import java.util.regex.Pattern; public class Message extends Element { public Message(Class cls, MessageType parent) { @@ -96,11 +97,22 @@ public AbstractType getAbstractType() { return type; } + private static Pattern CAMEL_CASE_RE = Pattern.compile("(?<=[a-z])[A-Z]"); + private static String toUnderscoreCase(String input) { + Matcher m = CAMEL_CASE_RE.matcher(input); + StringBuffer sb = new StringBuffer(); + while (m.find()) { + m.appendReplacement(sb, "_" + m.group()); + } + m.appendTail(sb); + return sb.toString().toLowerCase(); + } + @Override public String getProtoTypeDeclaration() { return String.format("%s %s = %d", getAbstractType().getProtoType(), - CaseFormat.LOWER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, getName()), + toUnderscoreCase(getName()), getProtoIndex()); } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java index f6b50916ca..1e8ea694be 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java @@ -31,6 +31,7 @@ public static void main(String[] args) throws Exception { ProtoGen generator = new ProtoGen(); generator.process(com.netflix.conductor.common.metadata.events.EventExecution.class); + generator.process(com.netflix.conductor.common.metadata.events.EventHandler.class); generator.process(com.netflix.conductor.common.metadata.tasks.PollData.class); generator.process(com.netflix.conductor.common.metadata.tasks.Task.class); From 3689d08482b676358970eb27a6008d1ba62fdb14 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 30 May 2018 18:51:29 +0200 Subject: [PATCH 016/163] common: Fix naming in EventExecution The methods and enum in EventExecution were using underscore naming patterns instead of camelCase. Fix this to make the codebase and the generated Proto schemas more consistent. --- .../metadata/events/EventExecution.java | 2 +- .../common/metadata/events/EventHandler.java | 118 +++++++++++------- .../core/events/ActionProcessor.java | 14 +-- .../core/events/TestEventProcessor.java | 12 +- docs/docs/events/index.md | 18 +-- .../dao/mysql/MySQLMetadataDAOTest.java | 6 +- .../dao/dynomite/RedisMetadataDAOTest.java | 6 +- 7 files changed, 101 insertions(+), 75 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java index 390b44faa5..70f1a1c10d 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java @@ -54,7 +54,7 @@ public enum Status { @ProtoField(id = 6) private Status status; - // TODO: Proto + @ProtoField(id = 7) private Action.Type action; @ProtoField(id = 8) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java index 775e6cbfe2..cb60e0a4a4 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java @@ -18,6 +18,10 @@ */ package com.netflix.conductor.common.metadata.events; +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; + import java.util.HashMap; import java.util.LinkedList; import java.util.List; @@ -27,16 +31,22 @@ * @author Viren * Defines an event handler */ +@ProtoMessage public class EventHandler { + @ProtoField(id = 1) private String name; - + + @ProtoField(id = 2) private String event; - + + @ProtoField(id = 3) private String condition; + @ProtoField(id = 4) private List actions = new LinkedList<>(); - + + @ProtoField(id = 5) private boolean active; public EventHandler() { @@ -119,19 +129,26 @@ public void setActive(boolean active) { } + @ProtoMessage public static class Action { - - public enum Type { start_workflow, complete_task, fail_task } - + + @ProtoEnum + public enum Type { START_WORKFLOW, COMPLETE_TASK, FAIL_TASK } + + @ProtoField(id = 1) private Type action; - - private StartWorkflow start_workflow; - - private TaskDetails complete_task; - - private TaskDetails fail_task; - - private boolean expandInlineJSON; + + @ProtoField(id = 2) + private StartWorkflow startWorkflow; + + @ProtoField(id = 3) + private TaskDetails completeTask; + + @ProtoField(id = 4) + private TaskDetails failTask; + + @ProtoField(id = 5) + private boolean expandInlineJson; /** * @return the action @@ -149,73 +166,77 @@ public void setAction(Type action) { } /** - * @return the start_workflow + * @return the startWorkflow */ - public StartWorkflow getStart_workflow() { - return start_workflow; + public StartWorkflow getStartWorkflow() { + return startWorkflow; } /** - * @param start_workflow the start_workflow to set + * @param startWorkflow the startWorkflow to set * */ - public void setStart_workflow(StartWorkflow start_workflow) { - this.start_workflow = start_workflow; + public void setStartWorkflow(StartWorkflow startWorkflow) { + this.startWorkflow = startWorkflow; } /** - * @return the complete_task + * @return the completeTask */ - public TaskDetails getComplete_task() { - return complete_task; + public TaskDetails getCompleteTask() { + return completeTask; } /** - * @param complete_task the complete_task to set + * @param completeTask the completeTask to set * */ - public void setComplete_task(TaskDetails complete_task) { - this.complete_task = complete_task; + public void setCompleteTask(TaskDetails completeTask) { + this.completeTask = completeTask; } /** - * @return the fail_task + * @return the failTask */ - public TaskDetails getFail_task() { - return fail_task; + public TaskDetails getFailTask() { + return failTask; } /** - * @param fail_task the fail_task to set + * @param failTask the failTask to set * */ - public void setFail_task(TaskDetails fail_task) { - this.fail_task = fail_task; + public void setFailTask(TaskDetails failTask) { + this.failTask = failTask; } /** * - * @param expandInlineJSON when set to true, the in-lined JSON strings are expanded to a full json document + * @param expandInlineJson when set to true, the in-lined JSON strings are expanded to a full json document */ - public void setExpandInlineJSON(boolean expandInlineJSON) { - this.expandInlineJSON = expandInlineJSON; + public void setExpandInlineJson(boolean expandInlineJson) { + this.expandInlineJson = expandInlineJson; } /** * * @return true if the json strings within the payload should be expanded. */ - public boolean isExpandInlineJSON() { - return expandInlineJSON; + public boolean isExpandInlineJson() { + return expandInlineJson; } } - + + @ProtoMessage public static class TaskDetails { - + + @ProtoField(id = 1) private String workflowId; - + + @ProtoField(id = 2) private String taskRefName; - + + @ProtoField(id = 3) private Map output = new HashMap<>(); /** @@ -266,15 +287,20 @@ public void setOutput(Map output) { } - + + @ProtoMessage public static class StartWorkflow { - + + @ProtoField(id = 1) private String name; - + + @ProtoField(id = 2) private Integer version; - + + @ProtoField(id = 3) private String correlationId; - + + @ProtoField(id = 4) private Map input = new HashMap<>(); /** diff --git a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java index cd7d70214a..a70c5c550b 100644 --- a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java +++ b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java @@ -68,19 +68,19 @@ public Map execute(Action action, String payload, String event, logger.debug("Executing {}", action.getAction()); Object jsonObj = om.readValue(payload, Object.class); - if(action.isExpandInlineJSON()) { + if(action.isExpandInlineJson()) { jsonObj = expand(jsonObj); } switch (action.getAction()) { - case start_workflow: + case START_WORKFLOW: Map op = startWorkflow(action, jsonObj, event, messageId); return op; - case complete_task: - op = completeTask(action, jsonObj, action.getComplete_task(), Status.COMPLETED, event, messageId); + case COMPLETE_TASK: + op = completeTask(action, jsonObj, action.getCompleteTask(), Status.COMPLETED, event, messageId); return op; - case fail_task: - op = completeTask(action, jsonObj, action.getFail_task(), Status.FAILED, event, messageId); + case FAIL_TASK: + op = completeTask(action, jsonObj, action.getFailTask(), Status.FAILED, event, messageId); return op; default: break; @@ -125,7 +125,7 @@ private Map completeTask(Action action, Object payload, TaskDeta } private Map startWorkflow(Action action, Object payload, String event, String messageId) throws Exception { - StartWorkflow params = action.getStart_workflow(); + StartWorkflow params = action.getStartWorkflow(); Map op = new HashMap<>(); try { diff --git a/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java b/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java index fb7ef413d3..ea4dd10307 100644 --- a/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java +++ b/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java @@ -82,10 +82,10 @@ public void testEventProcessor() throws Exception { eh.setName(UUID.randomUUID().toString()); eh.setActive(false); Action action = new Action(); - action.setAction(Type.start_workflow); - action.setStart_workflow(new StartWorkflow()); - action.getStart_workflow().setName("workflow_x"); - action.getStart_workflow().setVersion(1); //TODO: Remove this to simulate the null value for version being passed! + action.setAction(Type.START_WORKFLOW); + action.setStartWorkflow(new StartWorkflow()); + action.getStartWorkflow().setName("workflow_x"); + action.getStartWorkflow().setVersion(1); //TODO: Remove this to simulate the null value for version being passed! eh.getActions().add(action); eh.setEvent(event); @@ -110,13 +110,13 @@ public String answer(InvocationOnMock invocation) throws Throwable { started.set(true); return id; } - }).when(executor).startWorkflow(action.getStart_workflow().getName(), 1, action.getStart_workflow().getCorrelationId(), action.getStart_workflow().getInput(), event); + }).when(executor).startWorkflow(action.getStartWorkflow().getName(), 1, action.getStartWorkflow().getCorrelationId(), action.getStartWorkflow().getInput(), event); //Metadata Service Mock MetadataService metadata = mock(MetadataService.class); WorkflowDef def = new WorkflowDef(); def.setVersion(1); - def.setName(action.getStart_workflow().getName()); + def.setName(action.getStartWorkflow().getName()); when(metadata.getWorkflowDef(any(), any())).thenReturn(def); ActionProcessor ap = new ActionProcessor(executor, metadata); diff --git a/docs/docs/events/index.md b/docs/docs/events/index.md index 9b83552e2d..25351d468b 100644 --- a/docs/docs/events/index.md +++ b/docs/docs/events/index.md @@ -66,8 +66,8 @@ Given the following payload in the message: ```json { - "action": "start_workflow", - "start_workflow": { + "action": "startWorkflow", + "startWorkflow": { "name": "WORKFLOW_NAME", "version": "input": { @@ -81,15 +81,15 @@ Given the following payload in the message: ```json { - "action": "complete_task", - "complete_task": { + "action": "completeTask", + "completeTask": { "workflowId": "${source.externalId.workflowId}", "taskRefName": "task_1", "output": { "response": "${source.result}" } }, - "expandInlineJSON": true + "expandInlineJson": true } ``` @@ -97,21 +97,21 @@ Given the following payload in the message: ```json { - "action": "fail_task", - "fail_task": { + "action": "failTask", + "failTask": { "workflowId": "${source.externalId.workflowId}", "taskRefName": "task_1", "output": { "response": "${source.result}" } }, - "expandInlineJSON": true + "expandInlineJson": true } ``` Input for starting a workflow and output when completing / failing task follows the same [expressions](/metadata/#wiring-inputs-and-outputs) used for wiring workflow inputs. !!!info "Expanding stringified JSON elements in payload" - `expandInlineJSON` property, when set to true will expand the inlined stringified JSON elements in the payload to JSON documents and replace the string value with JSON document. + `expandInlineJson` property, when set to true will expand the inlined stringified JSON elements in the payload to JSON documents and replace the string value with JSON document. This feature allows such elements to be used with JSON path expressions. ## Extending diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java index 72fa8fcaaf..c015e412ab 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java @@ -177,9 +177,9 @@ public void testEventHandlers() { eh.setName(UUID.randomUUID().toString()); eh.setActive(false); EventHandler.Action action = new EventHandler.Action(); - action.setAction(EventHandler.Action.Type.start_workflow); - action.setStart_workflow(new EventHandler.StartWorkflow()); - action.getStart_workflow().setName("workflow_x"); + action.setAction(EventHandler.Action.Type.START_WORKFLOW); + action.setStartWorkflow(new EventHandler.StartWorkflow()); + action.getStartWorkflow().setName("workflow_x"); eh.getActions().add(action); eh.setEvent(event1); diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java index d971e44413..90df9f7ff0 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java @@ -225,9 +225,9 @@ public void testEventHandlers() { eh.setName(UUID.randomUUID().toString()); eh.setActive(false); Action action = new Action(); - action.setAction(Type.start_workflow); - action.setStart_workflow(new StartWorkflow()); - action.getStart_workflow().setName("workflow_x"); + action.setAction(Type.START_WORKFLOW); + action.setStartWorkflow(new StartWorkflow()); + action.getStartWorkflow().setName("workflow_x"); eh.getActions().add(action); eh.setEvent(event1); From 0663ae64a58ff82c81c4c9abeded0adf6da15006 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 30 May 2018 18:52:52 +0200 Subject: [PATCH 017/163] grpc: Map the Events models --- .../conductor/grpc/server/ProtoMapper.java | 116 ++++++++++++++++++ .../src/main/proto/model/eventexecution.proto | 2 + grpc/src/main/proto/model/eventhandler.proto | 38 ++++++ 3 files changed, 156 insertions(+) create mode 100644 grpc/src/main/proto/model/eventhandler.proto diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java index 2462838a5d..ae527e9753 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java @@ -2,6 +2,7 @@ import com.google.protobuf.Value; import com.netflix.conductor.common.metadata.events.EventExecution; +import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -21,6 +22,7 @@ import com.netflix.conductor.proto.DynamicForkJoinTaskListPb; import com.netflix.conductor.proto.DynamicForkJoinTaskPb; import com.netflix.conductor.proto.EventExecutionPb; +import com.netflix.conductor.proto.EventHandlerPb; import com.netflix.conductor.proto.PollDataPb; import com.netflix.conductor.proto.RerunWorkflowRequestPb; import com.netflix.conductor.proto.SkipTaskRequestPb; @@ -57,6 +59,7 @@ public static EventExecutionPb.EventExecution toProto(EventExecution from) { to.setEvent( from.getEvent() ); to.setCreated( from.getCreated() ); to.setStatus( toProto( from.getStatus() ) ); + to.setAction( toProto( from.getAction() ) ); for (Map.Entry pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } @@ -71,6 +74,7 @@ public static EventExecution fromProto(EventExecutionPb.EventExecution from) { to.setEvent( from.getEvent() ); to.setCreated( from.getCreated() ); to.setStatus( fromProto( from.getStatus() ) ); + to.setAction( fromProto( from.getAction() ) ); Map outputMap = new HashMap(); for (Map.Entry pair : from.getOutputMap().entrySet()) { outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); @@ -103,6 +107,118 @@ public static EventExecution.Status fromProto(EventExecutionPb.EventExecution.St return to; } + public static EventHandlerPb.EventHandler toProto(EventHandler from) { + EventHandlerPb.EventHandler.Builder to = EventHandlerPb.EventHandler.newBuilder(); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCondition( from.getCondition() ); + for (EventHandler.Action elem : from.getActions()) { + to.addActions( toProto(elem) ); + } + to.setActive( from.isActive() ); + return to.build(); + } + + public static EventHandler fromProto(EventHandlerPb.EventHandler from) { + EventHandler to = new EventHandler(); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCondition( from.getCondition() ); + to.setActions( from.getActionsList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + to.setActive( from.getActive() ); + return to; + } + + public static EventHandlerPb.EventHandler.StartWorkflow toProto( + EventHandler.StartWorkflow from) { + EventHandlerPb.EventHandler.StartWorkflow.Builder to = EventHandlerPb.EventHandler.StartWorkflow.newBuilder(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public static EventHandler.StartWorkflow fromProto( + EventHandlerPb.EventHandler.StartWorkflow from) { + EventHandler.StartWorkflow to = new EventHandler.StartWorkflow(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + return to; + } + + public static EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails from) { + EventHandlerPb.EventHandler.TaskDetails.Builder to = EventHandlerPb.EventHandler.TaskDetails.newBuilder(); + to.setWorkflowId( from.getWorkflowId() ); + to.setTaskRefName( from.getTaskRefName() ); + for (Map.Entry pair : from.getOutput().entrySet()) { + to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public static EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetails from) { + EventHandler.TaskDetails to = new EventHandler.TaskDetails(); + to.setWorkflowId( from.getWorkflowId() ); + to.setTaskRefName( from.getTaskRefName() ); + Map outputMap = new HashMap(); + for (Map.Entry pair : from.getOutputMap().entrySet()) { + outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutput(outputMap); + return to; + } + + public static EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { + EventHandlerPb.EventHandler.Action.Builder to = EventHandlerPb.EventHandler.Action.newBuilder(); + to.setAction( toProto( from.getAction() ) ); + to.setStartWorkflow( toProto( from.getStartWorkflow() ) ); + to.setCompleteTask( toProto( from.getCompleteTask() ) ); + to.setFailTask( toProto( from.getFailTask() ) ); + to.setExpandInlineJson( from.isExpandInlineJson() ); + return to.build(); + } + + public static EventHandler.Action fromProto(EventHandlerPb.EventHandler.Action from) { + EventHandler.Action to = new EventHandler.Action(); + to.setAction( fromProto( from.getAction() ) ); + to.setStartWorkflow( fromProto( from.getStartWorkflow() ) ); + to.setCompleteTask( fromProto( from.getCompleteTask() ) ); + to.setFailTask( fromProto( from.getFailTask() ) ); + to.setExpandInlineJson( from.getExpandInlineJson() ); + return to; + } + + public static EventHandlerPb.EventHandler.Action.Type toProto(EventHandler.Action.Type from) { + EventHandlerPb.EventHandler.Action.Type to; + switch (from) { + case START_WORKFLOW: to = EventHandlerPb.EventHandler.Action.Type.START_WORKFLOW; break; + case COMPLETE_TASK: to = EventHandlerPb.EventHandler.Action.Type.COMPLETE_TASK; break; + case FAIL_TASK: to = EventHandlerPb.EventHandler.Action.Type.FAIL_TASK; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public static EventHandler.Action.Type fromProto(EventHandlerPb.EventHandler.Action.Type from) { + EventHandler.Action.Type to; + switch (from) { + case START_WORKFLOW: to = EventHandler.Action.Type.START_WORKFLOW; break; + case COMPLETE_TASK: to = EventHandler.Action.Type.COMPLETE_TASK; break; + case FAIL_TASK: to = EventHandler.Action.Type.FAIL_TASK; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + public static PollDataPb.PollData toProto(PollData from) { PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); to.setQueueName( from.getQueueName() ); diff --git a/grpc/src/main/proto/model/eventexecution.proto b/grpc/src/main/proto/model/eventexecution.proto index 6d6ac7d621..db39e00305 100644 --- a/grpc/src/main/proto/model/eventexecution.proto +++ b/grpc/src/main/proto/model/eventexecution.proto @@ -1,6 +1,7 @@ syntax = "proto3"; package com.netflix.conductor.proto; +import "model/eventhandler.proto"; import "google/protobuf/struct.proto"; option java_outer_classname = "EventExecutionPb"; @@ -19,5 +20,6 @@ message EventExecution { string event = 4; int64 created = 5; EventExecution.Status status = 6; + EventHandler.Action.Type action = 7; map output = 8; } diff --git a/grpc/src/main/proto/model/eventhandler.proto b/grpc/src/main/proto/model/eventhandler.proto new file mode 100644 index 0000000000..b0c8909cb5 --- /dev/null +++ b/grpc/src/main/proto/model/eventhandler.proto @@ -0,0 +1,38 @@ +syntax = "proto3"; +package com.netflix.conductor.proto; + +import "google/protobuf/struct.proto"; + +option java_outer_classname = "EventHandlerPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message EventHandler { + message StartWorkflow { + string name = 1; + int32 version = 2; + string correlation_id = 3; + map input = 4; + } + message TaskDetails { + string workflow_id = 1; + string task_ref_name = 2; + map output = 3; + } + message Action { + enum Type { + START_WORKFLOW = 0; + COMPLETE_TASK = 1; + FAIL_TASK = 2; + } + EventHandler.Action.Type action = 1; + EventHandler.StartWorkflow start_workflow = 2; + EventHandler.TaskDetails complete_task = 3; + EventHandler.TaskDetails fail_task = 4; + bool expand_inline_json = 5; + } + string name = 1; + string event = 2; + string condition = 3; + repeated EventHandler.Action actions = 4; + bool active = 5; +} From 527e0b96ba619d928414c35d7cc6f39a7469e224 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 13:45:55 +0200 Subject: [PATCH 018/163] grpc-server: Implement EventServiceGrpc Includes a slight refactoring for GRPCServerProvider and GRPCServer where we can now pass any amount of services to the server. --- .../grpc/server/EventServiceImpl.java | 92 +++++++++++++++++++ .../conductor/grpc/server/GRPCModule.java | 7 +- .../conductor/grpc/server/GRPCServer.java | 16 ++-- .../grpc/server/GRPCServerProvider.java | 40 ++++++++ grpc/src/main/proto/grpc/event_service.proto | 56 +++++++++++ 5 files changed, 202 insertions(+), 9 deletions(-) create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java create mode 100644 grpc/src/main/proto/grpc/event_service.proto diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java new file mode 100644 index 0000000000..f827dc2a96 --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java @@ -0,0 +1,92 @@ +package com.netflix.conductor.grpc.server; + +import com.google.protobuf.Empty; +import com.netflix.conductor.common.metadata.events.EventHandler; +import com.netflix.conductor.core.events.EventProcessor; +import com.netflix.conductor.core.events.EventQueues; +import com.netflix.conductor.grpc.EventServiceGrpc; +import com.netflix.conductor.grpc.EventServicePb; +import com.netflix.conductor.proto.EventHandlerPb; +import com.netflix.conductor.service.MetadataService; +import io.grpc.stub.StreamObserver; + +import javax.inject.Inject; +import java.util.Map; + +public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { + private MetadataService service; + private EventProcessor ep; + + @Inject + public EventServiceImpl(MetadataService service, EventProcessor ep) { + this.service = service; + this.ep = ep; + } + + @Override + public void addEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { + service.addEventHandler(ProtoMapper.fromProto(req)); + response.onCompleted(); + } + + @Override + public void updateEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { + service.updateEventHandler(ProtoMapper.fromProto(req)); + response.onCompleted(); + } + + @Override + public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, StreamObserver response) { + service.removeEventHandlerStatus(req.getName()); + } + + @Override + public void getEventHandlers(Empty req, StreamObserver response) { + for (EventHandler eh : service.getEventHandlers()) { + response.onNext(ProtoMapper.toProto(eh)); + } + response.onCompleted(); + } + + @Override + public void getEventHandlersForEvent(EventServicePb.GetEventHandlersRequest req, StreamObserver response) { + for (EventHandler eh : service.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly())) { + response.onNext(ProtoMapper.toProto(eh)); + } + response.onCompleted(); + } + + @Override + public void getQueues(Empty req, StreamObserver response) { + response.onNext( + EventServicePb.GetQueuesResponse.newBuilder() + .putAllEventToQueueUri(ep.getQueues()) + .build() + ); + response.onCompleted(); + } + + @Override + public void getQueueSizes(Empty req, StreamObserver response) { + EventServicePb.GetQueueSizesResponse.Builder builder = EventServicePb.GetQueueSizesResponse.newBuilder(); + for (Map.Entry> pair : ep.getQueueSizes().entrySet()) { + EventServicePb.GetQueueSizesResponse.QueueInfo info = + EventServicePb.GetQueueSizesResponse.QueueInfo.newBuilder() + .putAllQueueSizes(pair.getValue()) + .build(); + builder.putEventToQueueInfo(pair.getKey(), info); + } + response.onNext(builder.build()); + response.onCompleted(); + } + + @Override + public void getQueueProviders(Empty req, StreamObserver response) { + response.onNext( + EventServicePb.GetQueueProvidersResponse.newBuilder() + .addAllProviders(EventQueues.providers()) + .build() + ); + response.onCompleted(); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java index 7df8aa49cd..3bf1f34a14 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java @@ -3,7 +3,8 @@ import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.dao.index.ElasticsearchModule; +import com.netflix.conductor.grpc.EventServiceGrpc; +import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; @@ -25,10 +26,12 @@ public GRPCModule(Configuration configuration){ protected void configure() { configureExecutorService(); - install(new ElasticsearchModule()); bind(Configuration.class).toInstance(configuration); bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); + bind(MetadataServiceGrpc.MetadataServiceImplBase.class).to(MetadataServiceImpl.class); bind(WorkflowServiceGrpc.WorkflowServiceImplBase.class).to(WorkflowServiceImpl.class); + bind(EventServiceGrpc.EventServiceImplBase.class).to(EventServiceImpl.class); + bind(GRPCServer.class).to(GRPCServer.class); } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index 37adb7e023..df2f2f90cf 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -4,6 +4,7 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; +import io.grpc.BindableService; import io.grpc.Server; import io.grpc.ServerBuilder; import org.slf4j.Logger; @@ -22,14 +23,15 @@ public class GRPCServer { public final static int CONFIG_PORT_DEFAULT = 8080; @Inject - public GRPCServer(TaskServiceGrpc.TaskServiceImplBase taskImpl, - WorkflowServiceGrpc.WorkflowServiceImplBase workflowImpl, - Configuration conf) { + public GRPCServer(Configuration conf, BindableService... services) { final int port = conf.getIntProperty(CONFIG_PORT, CONFIG_PORT_DEFAULT); - server = ServerBuilder.forPort(port) - .addService(taskImpl) - .addService(workflowImpl) - .build(); + + ServerBuilder builder = ServerBuilder.forPort(port); + for (BindableService s : services) { + builder.addService(s); + } + + server = builder.build(); } public void start() throws IOException { diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java new file mode 100644 index 0000000000..4d5373b1ac --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java @@ -0,0 +1,40 @@ +package com.netflix.conductor.grpc.server; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.grpc.EventServiceGrpc; +import com.netflix.conductor.grpc.MetadataServiceGrpc; +import com.netflix.conductor.grpc.TaskServiceGrpc; +import com.netflix.conductor.grpc.WorkflowServiceGrpc; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class GRPCServerProvider implements Provider { + private final TaskServiceGrpc.TaskServiceImplBase taskServiceImplBase; + private final WorkflowServiceGrpc.WorkflowServiceImplBase workflowServiceImplBase; + private final MetadataServiceGrpc.MetadataServiceImplBase metadataServiceImplBase; + private final EventServiceGrpc.EventServiceImplBase eventServiceImplBase; + private final Configuration configuration; + + @Inject + public GRPCServerProvider(TaskServiceGrpc.TaskServiceImplBase taskImpl, + WorkflowServiceGrpc.WorkflowServiceImplBase workflowImpl, + MetadataServiceGrpc.MetadataServiceImplBase metaImpl, + EventServiceGrpc.EventServiceImplBase eventImpl, + Configuration conf) { + this.taskServiceImplBase = taskImpl; + this.workflowServiceImplBase = workflowImpl; + this.metadataServiceImplBase = metaImpl; + this.eventServiceImplBase = eventImpl; + this.configuration = conf; + } + + @Override + public GRPCServer get() { + return new GRPCServer(configuration, + taskServiceImplBase, + workflowServiceImplBase, + metadataServiceImplBase, + eventServiceImplBase); + } +} diff --git a/grpc/src/main/proto/grpc/event_service.proto b/grpc/src/main/proto/grpc/event_service.proto new file mode 100644 index 0000000000..2a1028a1b4 --- /dev/null +++ b/grpc/src/main/proto/grpc/event_service.proto @@ -0,0 +1,56 @@ +syntax = "proto3"; +package com.netflix.conductor.grpc; + +import "google/protobuf/empty.proto"; +import "model/eventhandler.proto"; + +option java_outer_classname = "EventServicePb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; + +service EventService { + // POST / + rpc AddEventHandler(com.netflix.conductor.proto.EventHandler) returns (google.protobuf.Empty); + + // PUT / + rpc UpdateEventHandler(com.netflix.conductor.proto.EventHandler) returns (google.protobuf.Empty); + + // DELETE /{name} + rpc RemoveEventHandler(RemoveEventHandlerRequest) returns (google.protobuf.Empty); + + // GET / + rpc GetEventHandlers(google.protobuf.Empty) returns (stream com.netflix.conductor.proto.EventHandler); + + // GET /{name} + rpc GetEventHandlersForEvent(GetEventHandlersRequest) returns (stream com.netflix.conductor.proto.EventHandler); + + // GET /queues + rpc GetQueues(google.protobuf.Empty) returns (GetQueuesResponse); + rpc GetQueueSizes(google.protobuf.Empty) returns (GetQueueSizesResponse); + + // GET /queues/providers + rpc GetQueueProviders(google.protobuf.Empty) returns (GetQueueProvidersResponse); +} + +message RemoveEventHandlerRequest { + string name = 1; +} + +message GetEventHandlersRequest { + string event = 1; + bool active_only = 2; +} + +message GetQueuesResponse { + map event_to_queue_uri = 1; +} + +message GetQueueSizesResponse { + message QueueInfo { + map queue_sizes = 1; + } + map event_to_queue_info = 2; +} + +message GetQueueProvidersResponse { + repeated string providers = 1; +} From 09d31ed069e9e4858e6d0317b10039f5c3345b3e Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 17:18:56 +0200 Subject: [PATCH 019/163] protogen: Improve ProtoMapper generation ProtoMapper is now generated as an abstract base class, with all the custom user mapping methods defined as Abstract methods that need to be implemented. --- .../netflix/conductor/protogen/Element.java | 12 +++ .../com/netflix/conductor/protogen/Enum.java | 2 +- .../netflix/conductor/protogen/Message.java | 9 ++- .../netflix/conductor/protogen/ProtoGen.java | 23 +++--- .../protogen/types/AbstractType.java | 47 +----------- .../conductor/protogen/types/AnyType.java | 24 +++++- .../conductor/protogen/types/GenericType.java | 7 +- .../conductor/protogen/types/ListType.java | 2 +- .../conductor/protogen/types/MessageType.java | 3 + .../conductor/protogen/types/ScalarType.java | 7 ++ .../conductor/protogen/types/WrappedType.java | 75 +++++++++++++++++++ 11 files changed, 150 insertions(+), 61 deletions(-) create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Element.java b/protogen/src/main/java/com/netflix/conductor/protogen/Element.java index 6942b76f1a..0e662b03db 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Element.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Element.java @@ -4,6 +4,7 @@ import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.protogen.types.AbstractType; import com.netflix.conductor.protogen.types.MessageType; +import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeSpec; import java.util.*; @@ -53,6 +54,16 @@ public void generateJavaMapper(TypeSpec.Builder builder) { } } + public void generateAbstractMethods(Set specs) { + for (Field field : fields) { + field.generateAbstractMethods(specs); + } + + for (Element elem : nested) { + elem.generateAbstractMethods(specs); + } + } + public void findDependencies(Set dependencies) { for (Field field : fields) { field.getDependencies(dependencies); @@ -95,5 +106,6 @@ public String getName() { } public void getDependencies(Set deps) {} + public void generateAbstractMethods(Set specs) {} } } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java index 0808eaf0a4..2ce3138890 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java @@ -25,7 +25,7 @@ public String getProtoClass() { private MethodSpec javaMap(String methodName, TypeName from, TypeName to) { MethodSpec.Builder method = MethodSpec.methodBuilder(methodName); - method.addModifiers(Modifier.STATIC, Modifier.PUBLIC); + method.addModifiers(Modifier.PUBLIC); method.returns(to); method.addParameter(from, "from"); diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java index b11a312d55..958cb0814a 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java @@ -42,7 +42,7 @@ protected void javaMapToProto(TypeSpec.Builder type) { ClassName javaProtoType = (ClassName)this.type.getJavaProtoType(); MethodSpec.Builder method = MethodSpec.methodBuilder("toProto"); - method.addModifiers(Modifier.STATIC, Modifier.PUBLIC); + method.addModifiers(Modifier.PUBLIC); method.returns(javaProtoType); method.addParameter(this.clazz, "from"); @@ -66,7 +66,7 @@ protected void javaMapFromProto(TypeSpec.Builder type) { return; MethodSpec.Builder method = MethodSpec.methodBuilder("fromProto"); - method.addModifiers(Modifier.STATIC, Modifier.PUBLIC); + method.addModifiers(Modifier.PUBLIC); method.returns(this.clazz); method.addParameter(this.type.getJavaProtoType(), "from"); @@ -120,5 +120,10 @@ public String getProtoTypeDeclaration() { public void getDependencies(Set deps) { getAbstractType().getDependencies(deps); } + + @Override + public void generateAbstractMethods(Set specs) { + getAbstractType().generateAbstractMethods(specs); + } } } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java index 1e8ea694be..7e0726d6b6 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java @@ -5,10 +5,7 @@ import com.github.jknack.handlebars.Template; import com.github.jknack.handlebars.io.FileTemplateLoader; import com.github.jknack.handlebars.io.TemplateLoader; -import com.squareup.javapoet.AnnotationSpec; -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.JavaFile; -import com.squareup.javapoet.TypeSpec; +import com.squareup.javapoet.*; import javax.annotation.Generated; import javax.lang.model.element.Modifier; @@ -23,7 +20,6 @@ public class ProtoGen { public static String GENERATED_MAPPER_PACKAGE = "com.netflix.conductor.grpc.server"; public static String GENERATOR_NAME = "com.netflix.conductor.protogen.ProtoGen"; public static String GENERATED_GO_PACKAGE = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - public static ClassName CUSTOM_MAPPER_CLASS = ClassName.get(GENERATED_MAPPER_PACKAGE, "ProtoMapperBase"); private List files = new ArrayList<>(); @@ -60,19 +56,24 @@ public ProtoGen() { } public void writeMapper(String root) throws Exception { - TypeSpec.Builder protoMapper = TypeSpec.classBuilder("ProtoMapper") - .addModifiers(Modifier.PUBLIC, Modifier.FINAL) + TypeSpec.Builder protoMapper = TypeSpec.classBuilder("AbstractProtoMapper") + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) .addAnnotation(AnnotationSpec.builder(Generated.class) - .addMember("value", "$S", GENERATOR_NAME).build()) - .superclass(CUSTOM_MAPPER_CLASS); + .addMember("value", "$S", GENERATOR_NAME).build()); + + Set abstractMethods = new HashSet<>(); for (File file : files) { - file.getMessage().generateJavaMapper(protoMapper); + Element elem = file.getMessage(); + elem.generateJavaMapper(protoMapper); + elem.generateAbstractMethods(abstractMethods); } + protoMapper.addMethods(abstractMethods); + JavaFile javaFile = JavaFile.builder(GENERATED_MAPPER_PACKAGE, protoMapper.build()) .indent(" ").build(); - Path filename = Paths.get(root, "ProtoMapper.java"); + Path filename = Paths.get(root, "AbstractProtoMapper.java"); try (Writer writer = new FileWriter(filename.toString())) { javaFile.writeTo(writer); } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java index 3932ad5033..abdb67b23f 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java @@ -5,7 +5,9 @@ import com.squareup.javapoet.ClassName; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; +import com.squareup.javapoet.TypeSpec; +import javax.lang.model.element.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.*; @@ -101,52 +103,11 @@ public TypeName getJavaProtoType() { public abstract void mapToProto(String field, MethodSpec.Builder method); public abstract void mapFromProto(String field, MethodSpec.Builder method); - public void getDependencies(Set deps) {} + public abstract void getDependencies(Set deps); + public abstract void generateAbstractMethods(Set specs); protected String fieldMethod(String m, String field) { return m + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, field); } - public static class WrappedType extends AbstractType { - private AbstractType realType; - private MessageType wrappedType; - - public static WrappedType wrap(GenericType realType) { - Type valueType = realType.getValueType().getJavaType(); - if (!(valueType instanceof Class)) - throw new IllegalArgumentException("cannot wrap primitive type: "+ valueType); - - String className = ((Class) valueType).getSimpleName() + realType.getWrapperSuffix(); - MessageType wrappedType = AbstractType.get(className); - if (wrappedType == null) - throw new IllegalArgumentException("missing wrapper class: "+className); - return new WrappedType(realType, wrappedType); - } - - public WrappedType(AbstractType realType, MessageType wrappedType) { - super(realType.getJavaType(), wrappedType.getJavaProtoType()); - this.realType = realType; - this.wrappedType = wrappedType; - } - - @Override - public String getProtoType() { - return wrappedType.getProtoType(); - } - - @Override - public TypeName getRawJavaType() { - return realType.getRawJavaType(); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - wrappedType.mapToProto(field, method); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - wrappedType.mapFromProto(field, method); - } - } } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java index 3e18600930..98fd67376e 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java @@ -1,7 +1,9 @@ package com.netflix.conductor.protogen.types; import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; +import javax.lang.model.element.Modifier; import java.util.Set; public class AnyType extends MessageType { @@ -9,13 +11,31 @@ public AnyType() { super(Object.class, ClassName.get("com.google.protobuf", "Value"), null); } + @Override + public String getProtoType() { + return "google.protobuf.Value"; + } + @Override public void getDependencies(Set deps) { deps.add("google/protobuf/struct.proto"); } @Override - public String getProtoType() { - return "google.protobuf.Value"; + public void generateAbstractMethods(Set specs) { + MethodSpec fromProto = MethodSpec.methodBuilder("fromProto") + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .returns(this.getJavaType()) + .addParameter(this.getJavaProtoType(), "in") + .build(); + + MethodSpec toProto = MethodSpec.methodBuilder("toProto") + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .returns(this.getJavaProtoType()) + .addParameter(this.getJavaType(), "in") + .build(); + + specs.add(fromProto); + specs.add(toProto); } } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java index 2250cce1b8..e1e61175eb 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java @@ -1,7 +1,7 @@ package com.netflix.conductor.protogen.types; -import com.netflix.conductor.protogen.types.AbstractType; import com.squareup.javapoet.ClassName; +import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; import java.lang.reflect.ParameterizedType; @@ -43,6 +43,11 @@ public void getDependencies(Set deps) { getValueType().getDependencies(deps); } + @Override + public void generateAbstractMethods(Set specs) { + getValueType().generateAbstractMethods(specs); + } + @Override public TypeName getJavaProtoType() { if (javaProtoType == null) { diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java index 3713160c7b..77854e70b5 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java @@ -59,7 +59,7 @@ public void mapFromProto(String field, MethodSpec.Builder method) { fieldMethod("set", field), fieldMethod("get", field) + "List"); } } else { - method.addStatement("to.$L( from.$L().stream().map(ProtoMapper::fromProto).collect($T.toCollection($T::new)) )", + method.addStatement("to.$L( from.$L().stream().map(this::fromProto).collect($T.toCollection($T::new)) )", fieldMethod("set", field), fieldMethod("get", field)+"List", Collectors.class, collector); } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java index bbba66c86e..d8f0257890 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java @@ -49,4 +49,7 @@ public void mapFromProto(String field, MethodSpec.Builder method) { public void getDependencies(Set deps) { deps.add(getProtoFile().getFilePath()); } + + @Override + public void generateAbstractMethods(Set specs) {} } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java index 1f16d9a17e..556cb8bec4 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java @@ -5,6 +5,7 @@ import com.squareup.javapoet.TypeName; import java.lang.reflect.Type; +import java.util.Set; public class ScalarType extends AbstractType { private String protoType; @@ -40,4 +41,10 @@ public void mapToProto(String field, MethodSpec.Builder method) { getJavaType().equals(Boolean.class)) ? "is" : "get"; mapCode(field, method, getter); } + + @Override + public void getDependencies(Set deps) {} + + @Override + public void generateAbstractMethods(Set specs) {} } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java new file mode 100644 index 0000000000..1c95bdefcf --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java @@ -0,0 +1,75 @@ +package com.netflix.conductor.protogen.types; + +import com.squareup.javapoet.MethodSpec; +import com.squareup.javapoet.TypeName; + +import javax.lang.model.element.Modifier; +import java.lang.reflect.Type; +import java.util.Set; + +public class WrappedType extends AbstractType { + private AbstractType realType; + private MessageType wrappedType; + + public static com.netflix.conductor.protogen.types.WrappedType wrap(GenericType realType) { + Type valueType = realType.getValueType().getJavaType(); + if (!(valueType instanceof Class)) + throw new IllegalArgumentException("cannot wrap primitive type: "+ valueType); + + String className = ((Class) valueType).getSimpleName() + realType.getWrapperSuffix(); + MessageType wrappedType = AbstractType.get(className); + if (wrappedType == null) + throw new IllegalArgumentException("missing wrapper class: "+className); + return new com.netflix.conductor.protogen.types.WrappedType(realType, wrappedType); + } + + public WrappedType(AbstractType realType, MessageType wrappedType) { + super(realType.getJavaType(), wrappedType.getJavaProtoType()); + this.realType = realType; + this.wrappedType = wrappedType; + } + + @Override + public String getProtoType() { + return wrappedType.getProtoType(); + } + + @Override + public TypeName getRawJavaType() { + return realType.getRawJavaType(); + } + + @Override + public void mapToProto(String field, MethodSpec.Builder method) { + wrappedType.mapToProto(field, method); + } + + @Override + public void mapFromProto(String field, MethodSpec.Builder method) { + wrappedType.mapFromProto(field, method); + } + + @Override + public void getDependencies(Set deps) { + this.realType.getDependencies(deps); + this.wrappedType.getDependencies(deps); + } + + @Override + public void generateAbstractMethods(Set specs) { + MethodSpec fromProto = MethodSpec.methodBuilder("fromProto") + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .returns(this.realType.getJavaType()) + .addParameter(this.wrappedType.getJavaProtoType(), "in") + .build(); + + MethodSpec toProto = MethodSpec.methodBuilder("toProto") + .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) + .returns(this.wrappedType.getJavaProtoType()) + .addParameter(this.realType.getJavaType(), "in") + .build(); + + specs.add(fromProto); + specs.add(toProto); + } +} From 9b5de85b87497a061f5b2e76a07d35564f76c6db Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 17:20:27 +0200 Subject: [PATCH 020/163] grpc-server: Improve ProtoMapper implementation The autogenerated code is now an abstract base class, which we can extend with our custom mapping objects. This makes the separation between generated and custom code more obvious. The ProtoMapper class is no longer static, so we expose it as a singleton now. --- .../grpc/server/AbstractProtoMapper.java | 867 ++++++++++++++++ .../grpc/server/EventServiceImpl.java | 10 +- .../conductor/grpc/server/GRPCUtil.java | 10 +- .../grpc/server/MetadataServiceImpl.java | 18 +- .../conductor/grpc/server/ProtoMapper.java | 928 ++---------------- .../grpc/server/ProtoMapperBase.java | 81 -- .../grpc/server/TaskServiceImpl.java | 15 +- .../grpc/server/WorkflowServiceImpl.java | 13 +- 8 files changed, 982 insertions(+), 960 deletions(-) create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java delete mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java new file mode 100644 index 0000000000..28d38379b1 --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java @@ -0,0 +1,867 @@ +package com.netflix.conductor.grpc.server; + +import com.google.protobuf.Value; +import com.netflix.conductor.common.metadata.events.EventExecution; +import com.netflix.conductor.common.metadata.events.EventHandler; +import com.netflix.conductor.common.metadata.tasks.PollData; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskExecLog; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask; +import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; +import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.TaskSummary; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.proto.DynamicForkJoinTaskListPb; +import com.netflix.conductor.proto.DynamicForkJoinTaskPb; +import com.netflix.conductor.proto.EventExecutionPb; +import com.netflix.conductor.proto.EventHandlerPb; +import com.netflix.conductor.proto.PollDataPb; +import com.netflix.conductor.proto.RerunWorkflowRequestPb; +import com.netflix.conductor.proto.SkipTaskRequestPb; +import com.netflix.conductor.proto.StartWorkflowRequestPb; +import com.netflix.conductor.proto.SubWorkflowParamsPb; +import com.netflix.conductor.proto.TaskDefPb; +import com.netflix.conductor.proto.TaskExecLogPb; +import com.netflix.conductor.proto.TaskPb; +import com.netflix.conductor.proto.TaskResultPb; +import com.netflix.conductor.proto.TaskSummaryPb; +import com.netflix.conductor.proto.WorkflowDefPb; +import com.netflix.conductor.proto.WorkflowPb; +import com.netflix.conductor.proto.WorkflowSummaryPb; +import com.netflix.conductor.proto.WorkflowTaskPb; +import java.lang.IllegalArgumentException; +import java.lang.Object; +import java.lang.String; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import javax.annotation.Generated; + +@Generated("com.netflix.conductor.protogen.ProtoGen") +public abstract class AbstractProtoMapper { + public EventExecutionPb.EventExecution toProto(EventExecution from) { + EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); + to.setId( from.getId() ); + to.setMessageId( from.getMessageId() ); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCreated( from.getCreated() ); + to.setStatus( toProto( from.getStatus() ) ); + to.setAction( toProto( from.getAction() ) ); + for (Map.Entry pair : from.getOutput().entrySet()) { + to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public EventExecution fromProto(EventExecutionPb.EventExecution from) { + EventExecution to = new EventExecution(); + to.setId( from.getId() ); + to.setMessageId( from.getMessageId() ); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCreated( from.getCreated() ); + to.setStatus( fromProto( from.getStatus() ) ); + to.setAction( fromProto( from.getAction() ) ); + Map outputMap = new HashMap(); + for (Map.Entry pair : from.getOutputMap().entrySet()) { + outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutput(outputMap); + return to; + } + + public EventExecutionPb.EventExecution.Status toProto(EventExecution.Status from) { + EventExecutionPb.EventExecution.Status to; + switch (from) { + case IN_PROGRESS: to = EventExecutionPb.EventExecution.Status.IN_PROGRESS; break; + case COMPLETED: to = EventExecutionPb.EventExecution.Status.COMPLETED; break; + case FAILED: to = EventExecutionPb.EventExecution.Status.FAILED; break; + case SKIPPED: to = EventExecutionPb.EventExecution.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public EventExecution.Status fromProto(EventExecutionPb.EventExecution.Status from) { + EventExecution.Status to; + switch (from) { + case IN_PROGRESS: to = EventExecution.Status.IN_PROGRESS; break; + case COMPLETED: to = EventExecution.Status.COMPLETED; break; + case FAILED: to = EventExecution.Status.FAILED; break; + case SKIPPED: to = EventExecution.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public EventHandlerPb.EventHandler toProto(EventHandler from) { + EventHandlerPb.EventHandler.Builder to = EventHandlerPb.EventHandler.newBuilder(); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCondition( from.getCondition() ); + for (EventHandler.Action elem : from.getActions()) { + to.addActions( toProto(elem) ); + } + to.setActive( from.isActive() ); + return to.build(); + } + + public EventHandler fromProto(EventHandlerPb.EventHandler from) { + EventHandler to = new EventHandler(); + to.setName( from.getName() ); + to.setEvent( from.getEvent() ); + to.setCondition( from.getCondition() ); + to.setActions( from.getActionsList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + to.setActive( from.getActive() ); + return to; + } + + public EventHandlerPb.EventHandler.StartWorkflow toProto(EventHandler.StartWorkflow from) { + EventHandlerPb.EventHandler.StartWorkflow.Builder to = EventHandlerPb.EventHandler.StartWorkflow.newBuilder(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public EventHandler.StartWorkflow fromProto(EventHandlerPb.EventHandler.StartWorkflow from) { + EventHandler.StartWorkflow to = new EventHandler.StartWorkflow(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + return to; + } + + public EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails from) { + EventHandlerPb.EventHandler.TaskDetails.Builder to = EventHandlerPb.EventHandler.TaskDetails.newBuilder(); + to.setWorkflowId( from.getWorkflowId() ); + to.setTaskRefName( from.getTaskRefName() ); + for (Map.Entry pair : from.getOutput().entrySet()) { + to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetails from) { + EventHandler.TaskDetails to = new EventHandler.TaskDetails(); + to.setWorkflowId( from.getWorkflowId() ); + to.setTaskRefName( from.getTaskRefName() ); + Map outputMap = new HashMap(); + for (Map.Entry pair : from.getOutputMap().entrySet()) { + outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutput(outputMap); + return to; + } + + public EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { + EventHandlerPb.EventHandler.Action.Builder to = EventHandlerPb.EventHandler.Action.newBuilder(); + to.setAction( toProto( from.getAction() ) ); + to.setStartWorkflow( toProto( from.getStartWorkflow() ) ); + to.setCompleteTask( toProto( from.getCompleteTask() ) ); + to.setFailTask( toProto( from.getFailTask() ) ); + to.setExpandInlineJson( from.isExpandInlineJson() ); + return to.build(); + } + + public EventHandler.Action fromProto(EventHandlerPb.EventHandler.Action from) { + EventHandler.Action to = new EventHandler.Action(); + to.setAction( fromProto( from.getAction() ) ); + to.setStartWorkflow( fromProto( from.getStartWorkflow() ) ); + to.setCompleteTask( fromProto( from.getCompleteTask() ) ); + to.setFailTask( fromProto( from.getFailTask() ) ); + to.setExpandInlineJson( from.getExpandInlineJson() ); + return to; + } + + public EventHandlerPb.EventHandler.Action.Type toProto(EventHandler.Action.Type from) { + EventHandlerPb.EventHandler.Action.Type to; + switch (from) { + case START_WORKFLOW: to = EventHandlerPb.EventHandler.Action.Type.START_WORKFLOW; break; + case COMPLETE_TASK: to = EventHandlerPb.EventHandler.Action.Type.COMPLETE_TASK; break; + case FAIL_TASK: to = EventHandlerPb.EventHandler.Action.Type.FAIL_TASK; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public EventHandler.Action.Type fromProto(EventHandlerPb.EventHandler.Action.Type from) { + EventHandler.Action.Type to; + switch (from) { + case START_WORKFLOW: to = EventHandler.Action.Type.START_WORKFLOW; break; + case COMPLETE_TASK: to = EventHandler.Action.Type.COMPLETE_TASK; break; + case FAIL_TASK: to = EventHandler.Action.Type.FAIL_TASK; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public PollDataPb.PollData toProto(PollData from) { + PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); + to.setQueueName( from.getQueueName() ); + to.setDomain( from.getDomain() ); + to.setWorkerId( from.getWorkerId() ); + to.setLastPollTime( from.getLastPollTime() ); + return to.build(); + } + + public PollData fromProto(PollDataPb.PollData from) { + PollData to = new PollData(); + to.setQueueName( from.getQueueName() ); + to.setDomain( from.getDomain() ); + to.setWorkerId( from.getWorkerId() ); + to.setLastPollTime( from.getLastPollTime() ); + return to; + } + + public TaskPb.Task toProto(Task from) { + TaskPb.Task.Builder to = TaskPb.Task.newBuilder(); + to.setTaskType( from.getTaskType() ); + to.setStatus( toProto( from.getStatus() ) ); + for (Map.Entry pair : from.getInputData().entrySet()) { + to.putInputData( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setReferenceTaskName( from.getReferenceTaskName() ); + to.setRetryCount( from.getRetryCount() ); + to.setSeq( from.getSeq() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setPollCount( from.getPollCount() ); + to.setTaskDefName( from.getTaskDefName() ); + to.setScheduledTime( from.getScheduledTime() ); + to.setStartTime( from.getStartTime() ); + to.setEndTime( from.getEndTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); + to.setRetriedTaskId( from.getRetriedTaskId() ); + to.setRetried( from.isRetried() ); + to.setCallbackFromWorker( from.isCallbackFromWorker() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setWorkflowType( from.getWorkflowType() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + for (Map.Entry pair : from.getOutputData().entrySet()) { + to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); + to.setDomain( from.getDomain() ); + return to.build(); + } + + public Task fromProto(TaskPb.Task from) { + Task to = new Task(); + to.setTaskType( from.getTaskType() ); + to.setStatus( fromProto( from.getStatus() ) ); + Map inputDataMap = new HashMap(); + for (Map.Entry pair : from.getInputDataMap().entrySet()) { + inputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInputData(inputDataMap); + to.setReferenceTaskName( from.getReferenceTaskName() ); + to.setRetryCount( from.getRetryCount() ); + to.setSeq( from.getSeq() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setPollCount( from.getPollCount() ); + to.setTaskDefName( from.getTaskDefName() ); + to.setScheduledTime( from.getScheduledTime() ); + to.setStartTime( from.getStartTime() ); + to.setEndTime( from.getEndTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); + to.setRetriedTaskId( from.getRetriedTaskId() ); + to.setRetried( from.getRetried() ); + to.setCallbackFromWorker( from.getCallbackFromWorker() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setWorkflowType( from.getWorkflowType() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + Map outputDataMap = new HashMap(); + for (Map.Entry pair : from.getOutputDataMap().entrySet()) { + outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutputData(outputDataMap); + to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); + to.setDomain( from.getDomain() ); + return to; + } + + public TaskPb.Task.Status toProto(Task.Status from) { + TaskPb.Task.Status to; + switch (from) { + case IN_PROGRESS: to = TaskPb.Task.Status.IN_PROGRESS; break; + case CANCELED: to = TaskPb.Task.Status.CANCELED; break; + case FAILED: to = TaskPb.Task.Status.FAILED; break; + case COMPLETED: to = TaskPb.Task.Status.COMPLETED; break; + case COMPLETED_WITH_ERRORS: to = TaskPb.Task.Status.COMPLETED_WITH_ERRORS; break; + case SCHEDULED: to = TaskPb.Task.Status.SCHEDULED; break; + case TIMED_OUT: to = TaskPb.Task.Status.TIMED_OUT; break; + case READY_FOR_RERUN: to = TaskPb.Task.Status.READY_FOR_RERUN; break; + case SKIPPED: to = TaskPb.Task.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public Task.Status fromProto(TaskPb.Task.Status from) { + Task.Status to; + switch (from) { + case IN_PROGRESS: to = Task.Status.IN_PROGRESS; break; + case CANCELED: to = Task.Status.CANCELED; break; + case FAILED: to = Task.Status.FAILED; break; + case COMPLETED: to = Task.Status.COMPLETED; break; + case COMPLETED_WITH_ERRORS: to = Task.Status.COMPLETED_WITH_ERRORS; break; + case SCHEDULED: to = Task.Status.SCHEDULED; break; + case TIMED_OUT: to = Task.Status.TIMED_OUT; break; + case READY_FOR_RERUN: to = Task.Status.READY_FOR_RERUN; break; + case SKIPPED: to = Task.Status.SKIPPED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskDefPb.TaskDef toProto(TaskDef from) { + TaskDefPb.TaskDef.Builder to = TaskDefPb.TaskDef.newBuilder(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setRetryCount( from.getRetryCount() ); + to.setTimeoutSeconds( from.getTimeoutSeconds() ); + to.addAllInputKeys( from.getInputKeys() ); + to.addAllOutputKeys( from.getOutputKeys() ); + to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); + to.setRetryLogic( toProto( from.getRetryLogic() ) ); + to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); + for (Map.Entry pair : from.getInputTemplate().entrySet()) { + to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public TaskDef fromProto(TaskDefPb.TaskDef from) { + TaskDef to = new TaskDef(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setRetryCount( from.getRetryCount() ); + to.setTimeoutSeconds( from.getTimeoutSeconds() ); + to.setInputKeys( from.getInputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + to.setOutputKeys( from.getOutputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); + to.setRetryLogic( fromProto( from.getRetryLogic() ) ); + to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); + to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); + to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); + Map inputTemplateMap = new HashMap(); + for (Map.Entry pair : from.getInputTemplateMap().entrySet()) { + inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInputTemplate(inputTemplateMap); + return to; + } + + public TaskDefPb.TaskDef.RetryLogic toProto(TaskDef.RetryLogic from) { + TaskDefPb.TaskDef.RetryLogic to; + switch (from) { + case FIXED: to = TaskDefPb.TaskDef.RetryLogic.FIXED; break; + case EXPONENTIAL_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskDef.RetryLogic fromProto(TaskDefPb.TaskDef.RetryLogic from) { + TaskDef.RetryLogic to; + switch (from) { + case FIXED: to = TaskDef.RetryLogic.FIXED; break; + case EXPONENTIAL_BACKOFF: to = TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskDefPb.TaskDef.TimeoutPolicy toProto(TaskDef.TimeoutPolicy from) { + TaskDefPb.TaskDef.TimeoutPolicy to; + switch (from) { + case RETRY: to = TaskDefPb.TaskDef.TimeoutPolicy.RETRY; break; + case TIME_OUT_WF: to = TaskDefPb.TaskDef.TimeoutPolicy.TIME_OUT_WF; break; + case ALERT_ONLY: to = TaskDefPb.TaskDef.TimeoutPolicy.ALERT_ONLY; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskDef.TimeoutPolicy fromProto(TaskDefPb.TaskDef.TimeoutPolicy from) { + TaskDef.TimeoutPolicy to; + switch (from) { + case RETRY: to = TaskDef.TimeoutPolicy.RETRY; break; + case TIME_OUT_WF: to = TaskDef.TimeoutPolicy.TIME_OUT_WF; break; + case ALERT_ONLY: to = TaskDef.TimeoutPolicy.ALERT_ONLY; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskExecLogPb.TaskExecLog toProto(TaskExecLog from) { + TaskExecLogPb.TaskExecLog.Builder to = TaskExecLogPb.TaskExecLog.newBuilder(); + to.setLog( from.getLog() ); + to.setTaskId( from.getTaskId() ); + to.setCreatedTime( from.getCreatedTime() ); + return to.build(); + } + + public TaskExecLog fromProto(TaskExecLogPb.TaskExecLog from) { + TaskExecLog to = new TaskExecLog(); + to.setLog( from.getLog() ); + to.setTaskId( from.getTaskId() ); + to.setCreatedTime( from.getCreatedTime() ); + return to; + } + + public TaskResultPb.TaskResult toProto(TaskResult from) { + TaskResultPb.TaskResult.Builder to = TaskResultPb.TaskResult.newBuilder(); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + to.setStatus( toProto( from.getStatus() ) ); + for (Map.Entry pair : from.getOutputData().entrySet()) { + to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); + } + return to.build(); + } + + public TaskResult fromProto(TaskResultPb.TaskResult from) { + TaskResult to = new TaskResult(); + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + to.setTaskId( from.getTaskId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); + to.setWorkerId( from.getWorkerId() ); + to.setStatus( fromProto( from.getStatus() ) ); + Map outputDataMap = new HashMap(); + for (Map.Entry pair : from.getOutputDataMap().entrySet()) { + outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutputData(outputDataMap); + return to; + } + + public TaskResultPb.TaskResult.Status toProto(TaskResult.Status from) { + TaskResultPb.TaskResult.Status to; + switch (from) { + case IN_PROGRESS: to = TaskResultPb.TaskResult.Status.IN_PROGRESS; break; + case FAILED: to = TaskResultPb.TaskResult.Status.FAILED; break; + case COMPLETED: to = TaskResultPb.TaskResult.Status.COMPLETED; break; + case SCHEDULED: to = TaskResultPb.TaskResult.Status.SCHEDULED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { + TaskResult.Status to; + switch (from) { + case IN_PROGRESS: to = TaskResult.Status.IN_PROGRESS; break; + case FAILED: to = TaskResult.Status.FAILED; break; + case COMPLETED: to = TaskResult.Status.COMPLETED; break; + case SCHEDULED: to = TaskResult.Status.SCHEDULED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public DynamicForkJoinTaskPb.DynamicForkJoinTask toProto(DynamicForkJoinTask from) { + DynamicForkJoinTaskPb.DynamicForkJoinTask.Builder to = DynamicForkJoinTaskPb.DynamicForkJoinTask.newBuilder(); + to.setTaskName( from.getTaskName() ); + to.setWorkflowName( from.getWorkflowName() ); + to.setReferenceName( from.getReferenceName() ); + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setType( from.getType() ); + return to.build(); + } + + public DynamicForkJoinTask fromProto(DynamicForkJoinTaskPb.DynamicForkJoinTask from) { + DynamicForkJoinTask to = new DynamicForkJoinTask(); + to.setTaskName( from.getTaskName() ); + to.setWorkflowName( from.getWorkflowName() ); + to.setReferenceName( from.getReferenceName() ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + to.setType( from.getType() ); + return to; + } + + public DynamicForkJoinTaskListPb.DynamicForkJoinTaskList toProto(DynamicForkJoinTaskList from) { + DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.Builder to = DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.newBuilder(); + for (DynamicForkJoinTask elem : from.getDynamicTasks()) { + to.addDynamicTasks( toProto(elem) ); + } + return to.build(); + } + + public DynamicForkJoinTaskList fromProto( + DynamicForkJoinTaskListPb.DynamicForkJoinTaskList from) { + DynamicForkJoinTaskList to = new DynamicForkJoinTaskList(); + to.setDynamicTasks( from.getDynamicTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + return to; + } + + public RerunWorkflowRequest fromProto(RerunWorkflowRequestPb.RerunWorkflowRequest from) { + RerunWorkflowRequest to = new RerunWorkflowRequest(); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + Map workflowInputMap = new HashMap(); + for (Map.Entry pair : from.getWorkflowInputMap().entrySet()) { + workflowInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setWorkflowInput(workflowInputMap); + to.setReRunFromTaskId( from.getReRunFromTaskId() ); + Map taskInputMap = new HashMap(); + for (Map.Entry pair : from.getTaskInputMap().entrySet()) { + taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setTaskInput(taskInputMap); + to.setCorrelationId( from.getCorrelationId() ); + return to; + } + + public SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { + SkipTaskRequest to = new SkipTaskRequest(); + Map taskInputMap = new HashMap(); + for (Map.Entry pair : from.getTaskInputMap().entrySet()) { + taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setTaskInput(taskInputMap); + Map taskOutputMap = new HashMap(); + for (Map.Entry pair : from.getTaskOutputMap().entrySet()) { + taskOutputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setTaskOutput(taskOutputMap); + return to; + } + + public StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowRequest from) { + StartWorkflowRequest to = new StartWorkflowRequest(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + to.setTaskToDomain( from.getTaskToDomainMap() ); + return to; + } + + public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { + SubWorkflowParamsPb.SubWorkflowParams.Builder to = SubWorkflowParamsPb.SubWorkflowParams.newBuilder(); + to.setName( from.getName() ); + to.setVersion( toProto( from.getVersion() ) ); + return to.build(); + } + + public SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { + SubWorkflowParams to = new SubWorkflowParams(); + to.setName( from.getName() ); + to.setVersion( fromProto( from.getVersion() ) ); + return to; + } + + public WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { + WorkflowDefPb.WorkflowDef.Builder to = WorkflowDefPb.WorkflowDef.newBuilder(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setVersion( from.getVersion() ); + for (WorkflowTask elem : from.getTasks()) { + to.addTasks( toProto(elem) ); + } + to.addAllInputParameters( from.getInputParameters() ); + for (Map.Entry pair : from.getOutputParameters().entrySet()) { + to.putOutputParameters( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setFailureWorkflow( from.getFailureWorkflow() ); + to.setSchemaVersion( from.getSchemaVersion() ); + return to.build(); + } + + public WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { + WorkflowDef to = new WorkflowDef(); + to.setName( from.getName() ); + to.setDescription( from.getDescription() ); + to.setVersion( from.getVersion() ); + to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(LinkedList::new)) ); + to.setInputParameters( from.getInputParametersList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + Map outputParametersMap = new HashMap(); + for (Map.Entry pair : from.getOutputParametersMap().entrySet()) { + outputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutputParameters(outputParametersMap); + to.setFailureWorkflow( from.getFailureWorkflow() ); + to.setSchemaVersion( from.getSchemaVersion() ); + return to; + } + + public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { + WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(); + to.setName( from.getName() ); + to.setTaskReferenceName( from.getTaskReferenceName() ); + to.setDescription( from.getDescription() ); + for (Map.Entry pair : from.getInputParameters().entrySet()) { + to.putInputParameters( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setType( from.getType() ); + to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); + to.setCaseValueParam( from.getCaseValueParam() ); + to.setCaseExpression( from.getCaseExpression() ); + for (Map.Entry> pair : from.getDecisionCases().entrySet()) { + to.putDecisionCases( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); + to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); + for (WorkflowTask elem : from.getDefaultCase()) { + to.addDefaultCase( toProto(elem) ); + } + for (List elem : from.getForkTasks()) { + to.addForkTasks( toProto(elem) ); + } + to.setStartDelay( from.getStartDelay() ); + to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); + to.addAllJoinOn( from.getJoinOn() ); + to.setSink( from.getSink() ); + to.setOptional( from.isOptional() ); + return to.build(); + } + + public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { + WorkflowTask to = new WorkflowTask(); + to.setName( from.getName() ); + to.setTaskReferenceName( from.getTaskReferenceName() ); + to.setDescription( from.getDescription() ); + Map inputParametersMap = new HashMap(); + for (Map.Entry pair : from.getInputParametersMap().entrySet()) { + inputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInputParameters(inputParametersMap); + to.setType( from.getType() ); + to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); + to.setCaseValueParam( from.getCaseValueParam() ); + to.setCaseExpression( from.getCaseExpression() ); + Map> decisionCasesMap = new HashMap>(); + for (Map.Entry pair : from.getDecisionCasesMap().entrySet()) { + decisionCasesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setDecisionCases(decisionCasesMap); + to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); + to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); + to.setDefaultCase( from.getDefaultCaseList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + to.setForkTasks( from.getForkTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + to.setStartDelay( from.getStartDelay() ); + to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); + to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); + to.setSink( from.getSink() ); + to.setOptional( from.getOptional() ); + return to; + } + + public WorkflowTaskPb.WorkflowTask.Type toProto(WorkflowTask.Type from) { + WorkflowTaskPb.WorkflowTask.Type to; + switch (from) { + case SIMPLE: to = WorkflowTaskPb.WorkflowTask.Type.SIMPLE; break; + case DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.DYNAMIC; break; + case FORK_JOIN: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN; break; + case FORK_JOIN_DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; + case DECISION: to = WorkflowTaskPb.WorkflowTask.Type.DECISION; break; + case JOIN: to = WorkflowTaskPb.WorkflowTask.Type.JOIN; break; + case SUB_WORKFLOW: to = WorkflowTaskPb.WorkflowTask.Type.SUB_WORKFLOW; break; + case EVENT: to = WorkflowTaskPb.WorkflowTask.Type.EVENT; break; + case WAIT: to = WorkflowTaskPb.WorkflowTask.Type.WAIT; break; + case USER_DEFINED: to = WorkflowTaskPb.WorkflowTask.Type.USER_DEFINED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public WorkflowTask.Type fromProto(WorkflowTaskPb.WorkflowTask.Type from) { + WorkflowTask.Type to; + switch (from) { + case SIMPLE: to = WorkflowTask.Type.SIMPLE; break; + case DYNAMIC: to = WorkflowTask.Type.DYNAMIC; break; + case FORK_JOIN: to = WorkflowTask.Type.FORK_JOIN; break; + case FORK_JOIN_DYNAMIC: to = WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; + case DECISION: to = WorkflowTask.Type.DECISION; break; + case JOIN: to = WorkflowTask.Type.JOIN; break; + case SUB_WORKFLOW: to = WorkflowTask.Type.SUB_WORKFLOW; break; + case EVENT: to = WorkflowTask.Type.EVENT; break; + case WAIT: to = WorkflowTask.Type.WAIT; break; + case USER_DEFINED: to = WorkflowTask.Type.USER_DEFINED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public TaskSummaryPb.TaskSummary toProto(TaskSummary from) { + TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); + to.setWorkflowId( from.getWorkflowId() ); + to.setWorkflowType( from.getWorkflowType() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setScheduledTime( from.getScheduledTime() ); + to.setStartTime( from.getStartTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setEndTime( from.getEndTime() ); + to.setStatus( toProto( from.getStatus() ) ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setExecutionTime( from.getExecutionTime() ); + to.setQueueWaitTime( from.getQueueWaitTime() ); + to.setTaskDefName( from.getTaskDefName() ); + to.setTaskType( from.getTaskType() ); + to.setInput( from.getInput() ); + to.setOutput( from.getOutput() ); + to.setTaskId( from.getTaskId() ); + return to.build(); + } + + public WorkflowPb.Workflow toProto(Workflow from) { + WorkflowPb.Workflow.Builder to = WorkflowPb.Workflow.newBuilder(); + to.setStatus( toProto( from.getStatus() ) ); + to.setEndTime( from.getEndTime() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setParentWorkflowId( from.getParentWorkflowId() ); + to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); + for (Task elem : from.getTasks()) { + to.addTasks( toProto(elem) ); + } + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + for (Map.Entry pair : from.getOutput().entrySet()) { + to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setSchemaVersion( from.getSchemaVersion() ); + to.setEvent( from.getEvent() ); + to.putAllTaskToDomain( from.getTaskToDomain() ); + to.addAllFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + return to.build(); + } + + public Workflow fromProto(WorkflowPb.Workflow from) { + Workflow to = new Workflow(); + to.setStatus( fromProto( from.getStatus() ) ); + to.setEndTime( from.getEndTime() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setParentWorkflowId( from.getParentWorkflowId() ); + to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); + to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); + Map inputMap = new HashMap(); + for (Map.Entry pair : from.getInputMap().entrySet()) { + inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setInput(inputMap); + Map outputMap = new HashMap(); + for (Map.Entry pair : from.getOutputMap().entrySet()) { + outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); + } + to.setOutput(outputMap); + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setSchemaVersion( from.getSchemaVersion() ); + to.setEvent( from.getEvent() ); + to.setTaskToDomain( from.getTaskToDomainMap() ); + to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); + return to; + } + + public WorkflowPb.Workflow.WorkflowStatus toProto(Workflow.WorkflowStatus from) { + WorkflowPb.Workflow.WorkflowStatus to; + switch (from) { + case RUNNING: to = WorkflowPb.Workflow.WorkflowStatus.RUNNING; break; + case COMPLETED: to = WorkflowPb.Workflow.WorkflowStatus.COMPLETED; break; + case FAILED: to = WorkflowPb.Workflow.WorkflowStatus.FAILED; break; + case TIMED_OUT: to = WorkflowPb.Workflow.WorkflowStatus.TIMED_OUT; break; + case TERMINATED: to = WorkflowPb.Workflow.WorkflowStatus.TERMINATED; break; + case PAUSED: to = WorkflowPb.Workflow.WorkflowStatus.PAUSED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public Workflow.WorkflowStatus fromProto(WorkflowPb.Workflow.WorkflowStatus from) { + Workflow.WorkflowStatus to; + switch (from) { + case RUNNING: to = Workflow.WorkflowStatus.RUNNING; break; + case COMPLETED: to = Workflow.WorkflowStatus.COMPLETED; break; + case FAILED: to = Workflow.WorkflowStatus.FAILED; break; + case TIMED_OUT: to = Workflow.WorkflowStatus.TIMED_OUT; break; + case TERMINATED: to = Workflow.WorkflowStatus.TERMINATED; break; + case PAUSED: to = Workflow.WorkflowStatus.PAUSED; break; + default: throw new IllegalArgumentException("Unexpected enum constant: " + from); + } + return to; + } + + public WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { + WorkflowSummaryPb.WorkflowSummary.Builder to = WorkflowSummaryPb.WorkflowSummary.newBuilder(); + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setStartTime( from.getStartTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setEndTime( from.getEndTime() ); + to.setStatus( toProto( from.getStatus() ) ); + to.setInput( from.getInput() ); + to.setOutput( from.getOutput() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setExecutionTime( from.getExecutionTime() ); + to.setEvent( from.getEvent() ); + to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + return to.build(); + } + + public abstract WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List in); + + public abstract List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList in); + + public abstract Value toProto(Object in); + + public abstract Object fromProto(Value in); +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java index f827dc2a96..aabde4911c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java @@ -14,6 +14,8 @@ import java.util.Map; public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { + private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; + private MetadataService service; private EventProcessor ep; @@ -25,13 +27,13 @@ public EventServiceImpl(MetadataService service, EventProcessor ep) { @Override public void addEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { - service.addEventHandler(ProtoMapper.fromProto(req)); + service.addEventHandler(protoMapper.fromProto(req)); response.onCompleted(); } @Override public void updateEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { - service.updateEventHandler(ProtoMapper.fromProto(req)); + service.updateEventHandler(protoMapper.fromProto(req)); response.onCompleted(); } @@ -43,7 +45,7 @@ public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, Str @Override public void getEventHandlers(Empty req, StreamObserver response) { for (EventHandler eh : service.getEventHandlers()) { - response.onNext(ProtoMapper.toProto(eh)); + response.onNext(protoMapper.toProto(eh)); } response.onCompleted(); } @@ -51,7 +53,7 @@ public void getEventHandlers(Empty req, StreamObserver response) { for (EventHandler eh : service.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly())) { - response.onNext(ProtoMapper.toProto(eh)); + response.onNext(protoMapper.toProto(eh)); } response.onCompleted(); } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java index c60f1c75b5..aea8206d07 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java @@ -10,11 +10,11 @@ public class GRPCUtil { private GRPCUtil() {} private static String stacktraceToString(Throwable e) { - StringWriter stringWriter = new StringWriter(); - PrintWriter printWriter = new PrintWriter(stringWriter); - e.printStackTrace(printWriter); - return stringWriter.toString(); -} + StringWriter stringWriter = new StringWriter(); + PrintWriter printWriter = new PrintWriter(stringWriter); + e.printStackTrace(printWriter); + return stringWriter.toString(); + } public static void onError(StreamObserver response, Throwable t) { response.onError(Status.INTERNAL diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index 2c1ac6bbfc..000839bf55 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -17,6 +17,8 @@ import java.util.List; public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { + private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; + private MetadataService service; @Inject @@ -27,7 +29,7 @@ public MetadataServiceImpl(MetadataService service) { @Override public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver response) { try { - service.registerWorkflowDef(ProtoMapper.fromProto(req)); + service.registerWorkflowDef(protoMapper.fromProto(req)); response.onCompleted(); } catch (Exception e) { GRPCUtil.onError(response, e); @@ -38,7 +40,7 @@ public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver response) { List workflows = new ArrayList<>(); for (WorkflowDefPb.WorkflowDef def : req.getDefsList()) { - workflows.add(ProtoMapper.fromProto(def)); + workflows.add(protoMapper.fromProto(def)); } try { @@ -54,7 +56,7 @@ public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver // TODO: req.getVersion optional WorkflowDef def = service.getWorkflowDef(req.getName(), req.getVersion()); if (def != null) { - response.onNext(ProtoMapper.toProto(def)); + response.onNext(protoMapper.toProto(def)); response.onCompleted(); } else { response.onError(Status.NOT_FOUND @@ -67,7 +69,7 @@ public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver @Override public void getAllWorkflows(Empty _request, StreamObserver response) { for (WorkflowDef def : service.getWorkflowDefs()) { - response.onNext(ProtoMapper.toProto(def)); + response.onNext(protoMapper.toProto(def)); } response.onCompleted(); } @@ -76,7 +78,7 @@ public void getAllWorkflows(Empty _request, StreamObserver response) { List allTasks = new ArrayList<>(); for (TaskDefPb.TaskDef task : req.getDefsList()) { - allTasks.add(ProtoMapper.fromProto(task)); + allTasks.add(protoMapper.fromProto(task)); } service.registerTaskDef(allTasks); response.onCompleted(); @@ -84,14 +86,14 @@ public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver @Override public void updateTask(TaskDefPb.TaskDef req, StreamObserver response) { - service.updateTaskDef(ProtoMapper.fromProto(req)); + service.updateTaskDef(protoMapper.fromProto(req)); response.onCompleted(); } @Override public void getAllTasks(Empty _request, StreamObserver response) { for (TaskDef def : service.getTaskDefs()) { - response.onNext(ProtoMapper.toProto(def)); + response.onNext(protoMapper.toProto(def)); } response.onCompleted(); } @@ -100,7 +102,7 @@ public void getAllTasks(Empty _request, StreamObserver respon public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { TaskDef def = service.getTaskDef(req.getTaskType()); if (def != null) { - response.onNext(ProtoMapper.toProto(def)); + response.onNext(protoMapper.toProto(def)); response.onCompleted(); } else { response.onError(Status.NOT_FOUND diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java index ae527e9753..491d9fc18f 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java @@ -1,862 +1,92 @@ package com.netflix.conductor.grpc.server; +import com.google.protobuf.ListValue; +import com.google.protobuf.NullValue; +import com.google.protobuf.Struct; import com.google.protobuf.Value; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.events.EventHandler; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; -import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.proto.DynamicForkJoinTaskListPb; -import com.netflix.conductor.proto.DynamicForkJoinTaskPb; -import com.netflix.conductor.proto.EventExecutionPb; -import com.netflix.conductor.proto.EventHandlerPb; -import com.netflix.conductor.proto.PollDataPb; -import com.netflix.conductor.proto.RerunWorkflowRequestPb; -import com.netflix.conductor.proto.SkipTaskRequestPb; -import com.netflix.conductor.proto.StartWorkflowRequestPb; -import com.netflix.conductor.proto.SubWorkflowParamsPb; -import com.netflix.conductor.proto.TaskDefPb; -import com.netflix.conductor.proto.TaskExecLogPb; -import com.netflix.conductor.proto.TaskPb; -import com.netflix.conductor.proto.TaskResultPb; -import com.netflix.conductor.proto.TaskSummaryPb; -import com.netflix.conductor.proto.WorkflowDefPb; -import com.netflix.conductor.proto.WorkflowPb; -import com.netflix.conductor.proto.WorkflowSummaryPb; import com.netflix.conductor.proto.WorkflowTaskPb; -import java.lang.IllegalArgumentException; -import java.lang.Object; -import java.lang.String; + import java.util.ArrayList; import java.util.HashMap; -import java.util.HashSet; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -import javax.annotation.Generated; - -@Generated("com.netflix.conductor.protogen.ProtoGen") -public final class ProtoMapper extends ProtoMapperBase { - public static EventExecutionPb.EventExecution toProto(EventExecution from) { - EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); - to.setId( from.getId() ); - to.setMessageId( from.getMessageId() ); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCreated( from.getCreated() ); - to.setStatus( toProto( from.getStatus() ) ); - to.setAction( toProto( from.getAction() ) ); - for (Map.Entry pair : from.getOutput().entrySet()) { - to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public static EventExecution fromProto(EventExecutionPb.EventExecution from) { - EventExecution to = new EventExecution(); - to.setId( from.getId() ); - to.setMessageId( from.getMessageId() ); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCreated( from.getCreated() ); - to.setStatus( fromProto( from.getStatus() ) ); - to.setAction( fromProto( from.getAction() ) ); - Map outputMap = new HashMap(); - for (Map.Entry pair : from.getOutputMap().entrySet()) { - outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutput(outputMap); - return to; - } - - public static EventExecutionPb.EventExecution.Status toProto(EventExecution.Status from) { - EventExecutionPb.EventExecution.Status to; - switch (from) { - case IN_PROGRESS: to = EventExecutionPb.EventExecution.Status.IN_PROGRESS; break; - case COMPLETED: to = EventExecutionPb.EventExecution.Status.COMPLETED; break; - case FAILED: to = EventExecutionPb.EventExecution.Status.FAILED; break; - case SKIPPED: to = EventExecutionPb.EventExecution.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static EventExecution.Status fromProto(EventExecutionPb.EventExecution.Status from) { - EventExecution.Status to; - switch (from) { - case IN_PROGRESS: to = EventExecution.Status.IN_PROGRESS; break; - case COMPLETED: to = EventExecution.Status.COMPLETED; break; - case FAILED: to = EventExecution.Status.FAILED; break; - case SKIPPED: to = EventExecution.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static EventHandlerPb.EventHandler toProto(EventHandler from) { - EventHandlerPb.EventHandler.Builder to = EventHandlerPb.EventHandler.newBuilder(); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCondition( from.getCondition() ); - for (EventHandler.Action elem : from.getActions()) { - to.addActions( toProto(elem) ); - } - to.setActive( from.isActive() ); - return to.build(); - } - - public static EventHandler fromProto(EventHandlerPb.EventHandler from) { - EventHandler to = new EventHandler(); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCondition( from.getCondition() ); - to.setActions( from.getActionsList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setActive( from.getActive() ); - return to; - } - - public static EventHandlerPb.EventHandler.StartWorkflow toProto( - EventHandler.StartWorkflow from) { - EventHandlerPb.EventHandler.StartWorkflow.Builder to = EventHandlerPb.EventHandler.StartWorkflow.newBuilder(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public static EventHandler.StartWorkflow fromProto( - EventHandlerPb.EventHandler.StartWorkflow from) { - EventHandler.StartWorkflow to = new EventHandler.StartWorkflow(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - return to; - } - - public static EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails from) { - EventHandlerPb.EventHandler.TaskDetails.Builder to = EventHandlerPb.EventHandler.TaskDetails.newBuilder(); - to.setWorkflowId( from.getWorkflowId() ); - to.setTaskRefName( from.getTaskRefName() ); - for (Map.Entry pair : from.getOutput().entrySet()) { - to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public static EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetails from) { - EventHandler.TaskDetails to = new EventHandler.TaskDetails(); - to.setWorkflowId( from.getWorkflowId() ); - to.setTaskRefName( from.getTaskRefName() ); - Map outputMap = new HashMap(); - for (Map.Entry pair : from.getOutputMap().entrySet()) { - outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutput(outputMap); - return to; - } - - public static EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { - EventHandlerPb.EventHandler.Action.Builder to = EventHandlerPb.EventHandler.Action.newBuilder(); - to.setAction( toProto( from.getAction() ) ); - to.setStartWorkflow( toProto( from.getStartWorkflow() ) ); - to.setCompleteTask( toProto( from.getCompleteTask() ) ); - to.setFailTask( toProto( from.getFailTask() ) ); - to.setExpandInlineJson( from.isExpandInlineJson() ); - return to.build(); - } - - public static EventHandler.Action fromProto(EventHandlerPb.EventHandler.Action from) { - EventHandler.Action to = new EventHandler.Action(); - to.setAction( fromProto( from.getAction() ) ); - to.setStartWorkflow( fromProto( from.getStartWorkflow() ) ); - to.setCompleteTask( fromProto( from.getCompleteTask() ) ); - to.setFailTask( fromProto( from.getFailTask() ) ); - to.setExpandInlineJson( from.getExpandInlineJson() ); - return to; - } - - public static EventHandlerPb.EventHandler.Action.Type toProto(EventHandler.Action.Type from) { - EventHandlerPb.EventHandler.Action.Type to; - switch (from) { - case START_WORKFLOW: to = EventHandlerPb.EventHandler.Action.Type.START_WORKFLOW; break; - case COMPLETE_TASK: to = EventHandlerPb.EventHandler.Action.Type.COMPLETE_TASK; break; - case FAIL_TASK: to = EventHandlerPb.EventHandler.Action.Type.FAIL_TASK; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static EventHandler.Action.Type fromProto(EventHandlerPb.EventHandler.Action.Type from) { - EventHandler.Action.Type to; - switch (from) { - case START_WORKFLOW: to = EventHandler.Action.Type.START_WORKFLOW; break; - case COMPLETE_TASK: to = EventHandler.Action.Type.COMPLETE_TASK; break; - case FAIL_TASK: to = EventHandler.Action.Type.FAIL_TASK; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static PollDataPb.PollData toProto(PollData from) { - PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); - to.setQueueName( from.getQueueName() ); - to.setDomain( from.getDomain() ); - to.setWorkerId( from.getWorkerId() ); - to.setLastPollTime( from.getLastPollTime() ); - return to.build(); - } - - public static PollData fromProto(PollDataPb.PollData from) { - PollData to = new PollData(); - to.setQueueName( from.getQueueName() ); - to.setDomain( from.getDomain() ); - to.setWorkerId( from.getWorkerId() ); - to.setLastPollTime( from.getLastPollTime() ); - return to; - } - - public static TaskPb.Task toProto(Task from) { - TaskPb.Task.Builder to = TaskPb.Task.newBuilder(); - to.setTaskType( from.getTaskType() ); - to.setStatus( toProto( from.getStatus() ) ); - for (Map.Entry pair : from.getInputData().entrySet()) { - to.putInputData( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setReferenceTaskName( from.getReferenceTaskName() ); - to.setRetryCount( from.getRetryCount() ); - to.setSeq( from.getSeq() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setPollCount( from.getPollCount() ); - to.setTaskDefName( from.getTaskDefName() ); - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setEndTime( from.getEndTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); - to.setRetriedTaskId( from.getRetriedTaskId() ); - to.setRetried( from.isRetried() ); - to.setCallbackFromWorker( from.isCallbackFromWorker() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - for (Map.Entry pair : from.getOutputData().entrySet()) { - to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); - to.setDomain( from.getDomain() ); - return to.build(); - } - - public static Task fromProto(TaskPb.Task from) { - Task to = new Task(); - to.setTaskType( from.getTaskType() ); - to.setStatus( fromProto( from.getStatus() ) ); - Map inputDataMap = new HashMap(); - for (Map.Entry pair : from.getInputDataMap().entrySet()) { - inputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputData(inputDataMap); - to.setReferenceTaskName( from.getReferenceTaskName() ); - to.setRetryCount( from.getRetryCount() ); - to.setSeq( from.getSeq() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setPollCount( from.getPollCount() ); - to.setTaskDefName( from.getTaskDefName() ); - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setEndTime( from.getEndTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); - to.setRetriedTaskId( from.getRetriedTaskId() ); - to.setRetried( from.getRetried() ); - to.setCallbackFromWorker( from.getCallbackFromWorker() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - Map outputDataMap = new HashMap(); - for (Map.Entry pair : from.getOutputDataMap().entrySet()) { - outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutputData(outputDataMap); - to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); - to.setDomain( from.getDomain() ); - return to; - } - - public static TaskPb.Task.Status toProto(Task.Status from) { - TaskPb.Task.Status to; - switch (from) { - case IN_PROGRESS: to = TaskPb.Task.Status.IN_PROGRESS; break; - case CANCELED: to = TaskPb.Task.Status.CANCELED; break; - case FAILED: to = TaskPb.Task.Status.FAILED; break; - case COMPLETED: to = TaskPb.Task.Status.COMPLETED; break; - case COMPLETED_WITH_ERRORS: to = TaskPb.Task.Status.COMPLETED_WITH_ERRORS; break; - case SCHEDULED: to = TaskPb.Task.Status.SCHEDULED; break; - case TIMED_OUT: to = TaskPb.Task.Status.TIMED_OUT; break; - case READY_FOR_RERUN: to = TaskPb.Task.Status.READY_FOR_RERUN; break; - case SKIPPED: to = TaskPb.Task.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static Task.Status fromProto(TaskPb.Task.Status from) { - Task.Status to; - switch (from) { - case IN_PROGRESS: to = Task.Status.IN_PROGRESS; break; - case CANCELED: to = Task.Status.CANCELED; break; - case FAILED: to = Task.Status.FAILED; break; - case COMPLETED: to = Task.Status.COMPLETED; break; - case COMPLETED_WITH_ERRORS: to = Task.Status.COMPLETED_WITH_ERRORS; break; - case SCHEDULED: to = Task.Status.SCHEDULED; break; - case TIMED_OUT: to = Task.Status.TIMED_OUT; break; - case READY_FOR_RERUN: to = Task.Status.READY_FOR_RERUN; break; - case SKIPPED: to = Task.Status.SKIPPED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskDefPb.TaskDef toProto(TaskDef from) { - TaskDefPb.TaskDef.Builder to = TaskDefPb.TaskDef.newBuilder(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); - to.setRetryCount( from.getRetryCount() ); - to.setTimeoutSeconds( from.getTimeoutSeconds() ); - to.addAllInputKeys( from.getInputKeys() ); - to.addAllOutputKeys( from.getOutputKeys() ); - to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); - to.setRetryLogic( toProto( from.getRetryLogic() ) ); - to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); - for (Map.Entry pair : from.getInputTemplate().entrySet()) { - to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public static TaskDef fromProto(TaskDefPb.TaskDef from) { - TaskDef to = new TaskDef(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); - to.setRetryCount( from.getRetryCount() ); - to.setTimeoutSeconds( from.getTimeoutSeconds() ); - to.setInputKeys( from.getInputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setOutputKeys( from.getOutputKeysList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setTimeoutPolicy( fromProto( from.getTimeoutPolicy() ) ); - to.setRetryLogic( fromProto( from.getRetryLogic() ) ); - to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); - to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); - Map inputTemplateMap = new HashMap(); - for (Map.Entry pair : from.getInputTemplateMap().entrySet()) { - inputTemplateMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputTemplate(inputTemplateMap); - return to; - } - - public static TaskDefPb.TaskDef.RetryLogic toProto(TaskDef.RetryLogic from) { - TaskDefPb.TaskDef.RetryLogic to; - switch (from) { - case FIXED: to = TaskDefPb.TaskDef.RetryLogic.FIXED; break; - case EXPONENTIAL_BACKOFF: to = TaskDefPb.TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskDef.RetryLogic fromProto(TaskDefPb.TaskDef.RetryLogic from) { - TaskDef.RetryLogic to; - switch (from) { - case FIXED: to = TaskDef.RetryLogic.FIXED; break; - case EXPONENTIAL_BACKOFF: to = TaskDef.RetryLogic.EXPONENTIAL_BACKOFF; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskDefPb.TaskDef.TimeoutPolicy toProto(TaskDef.TimeoutPolicy from) { - TaskDefPb.TaskDef.TimeoutPolicy to; - switch (from) { - case RETRY: to = TaskDefPb.TaskDef.TimeoutPolicy.RETRY; break; - case TIME_OUT_WF: to = TaskDefPb.TaskDef.TimeoutPolicy.TIME_OUT_WF; break; - case ALERT_ONLY: to = TaskDefPb.TaskDef.TimeoutPolicy.ALERT_ONLY; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskDef.TimeoutPolicy fromProto(TaskDefPb.TaskDef.TimeoutPolicy from) { - TaskDef.TimeoutPolicy to; - switch (from) { - case RETRY: to = TaskDef.TimeoutPolicy.RETRY; break; - case TIME_OUT_WF: to = TaskDef.TimeoutPolicy.TIME_OUT_WF; break; - case ALERT_ONLY: to = TaskDef.TimeoutPolicy.ALERT_ONLY; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskExecLogPb.TaskExecLog toProto(TaskExecLog from) { - TaskExecLogPb.TaskExecLog.Builder to = TaskExecLogPb.TaskExecLog.newBuilder(); - to.setLog( from.getLog() ); - to.setTaskId( from.getTaskId() ); - to.setCreatedTime( from.getCreatedTime() ); - return to.build(); - } - - public static TaskExecLog fromProto(TaskExecLogPb.TaskExecLog from) { - TaskExecLog to = new TaskExecLog(); - to.setLog( from.getLog() ); - to.setTaskId( from.getTaskId() ); - to.setCreatedTime( from.getCreatedTime() ); - return to; - } - - public static TaskResultPb.TaskResult toProto(TaskResult from) { - TaskResultPb.TaskResult.Builder to = TaskResultPb.TaskResult.newBuilder(); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - to.setStatus( toProto( from.getStatus() ) ); - for (Map.Entry pair : from.getOutputData().entrySet()) { - to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); - } - return to.build(); - } - - public static TaskResult fromProto(TaskResultPb.TaskResult from) { - TaskResult to = new TaskResult(); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - to.setStatus( fromProto( from.getStatus() ) ); - Map outputDataMap = new HashMap(); - for (Map.Entry pair : from.getOutputDataMap().entrySet()) { - outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutputData(outputDataMap); - return to; - } - - public static TaskResultPb.TaskResult.Status toProto(TaskResult.Status from) { - TaskResultPb.TaskResult.Status to; - switch (from) { - case IN_PROGRESS: to = TaskResultPb.TaskResult.Status.IN_PROGRESS; break; - case FAILED: to = TaskResultPb.TaskResult.Status.FAILED; break; - case COMPLETED: to = TaskResultPb.TaskResult.Status.COMPLETED; break; - case SCHEDULED: to = TaskResultPb.TaskResult.Status.SCHEDULED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { - TaskResult.Status to; - switch (from) { - case IN_PROGRESS: to = TaskResult.Status.IN_PROGRESS; break; - case FAILED: to = TaskResult.Status.FAILED; break; - case COMPLETED: to = TaskResult.Status.COMPLETED; break; - case SCHEDULED: to = TaskResult.Status.SCHEDULED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static DynamicForkJoinTaskPb.DynamicForkJoinTask toProto(DynamicForkJoinTask from) { - DynamicForkJoinTaskPb.DynamicForkJoinTask.Builder to = DynamicForkJoinTaskPb.DynamicForkJoinTask.newBuilder(); - to.setTaskName( from.getTaskName() ); - to.setWorkflowName( from.getWorkflowName() ); - to.setReferenceName( from.getReferenceName() ); - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setType( from.getType() ); - return to.build(); - } - - public static DynamicForkJoinTask fromProto(DynamicForkJoinTaskPb.DynamicForkJoinTask from) { - DynamicForkJoinTask to = new DynamicForkJoinTask(); - to.setTaskName( from.getTaskName() ); - to.setWorkflowName( from.getWorkflowName() ); - to.setReferenceName( from.getReferenceName() ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - to.setType( from.getType() ); - return to; - } - - public static DynamicForkJoinTaskListPb.DynamicForkJoinTaskList toProto( - DynamicForkJoinTaskList from) { - DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.Builder to = DynamicForkJoinTaskListPb.DynamicForkJoinTaskList.newBuilder(); - for (DynamicForkJoinTask elem : from.getDynamicTasks()) { - to.addDynamicTasks( toProto(elem) ); - } - return to.build(); - } - - public static DynamicForkJoinTaskList fromProto( - DynamicForkJoinTaskListPb.DynamicForkJoinTaskList from) { - DynamicForkJoinTaskList to = new DynamicForkJoinTaskList(); - to.setDynamicTasks( from.getDynamicTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - return to; - } - - public static RerunWorkflowRequest fromProto(RerunWorkflowRequestPb.RerunWorkflowRequest from) { - RerunWorkflowRequest to = new RerunWorkflowRequest(); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - Map workflowInputMap = new HashMap(); - for (Map.Entry pair : from.getWorkflowInputMap().entrySet()) { - workflowInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setWorkflowInput(workflowInputMap); - to.setReRunFromTaskId( from.getReRunFromTaskId() ); - Map taskInputMap = new HashMap(); - for (Map.Entry pair : from.getTaskInputMap().entrySet()) { - taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setTaskInput(taskInputMap); - to.setCorrelationId( from.getCorrelationId() ); - return to; - } - - public static SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { - SkipTaskRequest to = new SkipTaskRequest(); - Map taskInputMap = new HashMap(); - for (Map.Entry pair : from.getTaskInputMap().entrySet()) { - taskInputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setTaskInput(taskInputMap); - Map taskOutputMap = new HashMap(); - for (Map.Entry pair : from.getTaskOutputMap().entrySet()) { - taskOutputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setTaskOutput(taskOutputMap); - return to; - } - - public static StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowRequest from) { - StartWorkflowRequest to = new StartWorkflowRequest(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - to.setTaskToDomain( from.getTaskToDomainMap() ); - return to; - } - - public static SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { - SubWorkflowParamsPb.SubWorkflowParams.Builder to = SubWorkflowParamsPb.SubWorkflowParams.newBuilder(); - to.setName( from.getName() ); - to.setVersion( toProto( from.getVersion() ) ); - return to.build(); - } - - public static SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { - SubWorkflowParams to = new SubWorkflowParams(); - to.setName( from.getName() ); - to.setVersion( fromProto( from.getVersion() ) ); - return to; - } - - public static WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { - WorkflowDefPb.WorkflowDef.Builder to = WorkflowDefPb.WorkflowDef.newBuilder(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); - to.setVersion( from.getVersion() ); - for (WorkflowTask elem : from.getTasks()) { - to.addTasks( toProto(elem) ); - } - to.addAllInputParameters( from.getInputParameters() ); - for (Map.Entry pair : from.getOutputParameters().entrySet()) { - to.putOutputParameters( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setFailureWorkflow( from.getFailureWorkflow() ); - to.setSchemaVersion( from.getSchemaVersion() ); - return to.build(); - } - - public static WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { - WorkflowDef to = new WorkflowDef(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); - to.setVersion( from.getVersion() ); - to.setTasks( from.getTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(LinkedList::new)) ); - to.setInputParameters( from.getInputParametersList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - Map outputParametersMap = new HashMap(); - for (Map.Entry pair : from.getOutputParametersMap().entrySet()) { - outputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutputParameters(outputParametersMap); - to.setFailureWorkflow( from.getFailureWorkflow() ); - to.setSchemaVersion( from.getSchemaVersion() ); - return to; - } - - public static WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { - WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(); - to.setName( from.getName() ); - to.setTaskReferenceName( from.getTaskReferenceName() ); - to.setDescription( from.getDescription() ); - for (Map.Entry pair : from.getInputParameters().entrySet()) { - to.putInputParameters( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setType( from.getType() ); - to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); - to.setCaseValueParam( from.getCaseValueParam() ); - to.setCaseExpression( from.getCaseExpression() ); - for (Map.Entry> pair : from.getDecisionCases().entrySet()) { - to.putDecisionCases( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); - to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); - for (WorkflowTask elem : from.getDefaultCase()) { - to.addDefaultCase( toProto(elem) ); - } - for (List elem : from.getForkTasks()) { - to.addForkTasks( toProto(elem) ); - } - to.setStartDelay( from.getStartDelay() ); - to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); - to.addAllJoinOn( from.getJoinOn() ); - to.setSink( from.getSink() ); - to.setOptional( from.isOptional() ); - return to.build(); - } - - public static WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { - WorkflowTask to = new WorkflowTask(); - to.setName( from.getName() ); - to.setTaskReferenceName( from.getTaskReferenceName() ); - to.setDescription( from.getDescription() ); - Map inputParametersMap = new HashMap(); - for (Map.Entry pair : from.getInputParametersMap().entrySet()) { - inputParametersMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInputParameters(inputParametersMap); - to.setType( from.getType() ); - to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); - to.setCaseValueParam( from.getCaseValueParam() ); - to.setCaseExpression( from.getCaseExpression() ); - Map> decisionCasesMap = new HashMap>(); - for (Map.Entry pair : from.getDecisionCasesMap().entrySet()) { - decisionCasesMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setDecisionCases(decisionCasesMap); - to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); - to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); - to.setDefaultCase( from.getDefaultCaseList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setForkTasks( from.getForkTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - to.setStartDelay( from.getStartDelay() ); - to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); - to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); - to.setSink( from.getSink() ); - to.setOptional( from.getOptional() ); - return to; - } - - public static WorkflowTaskPb.WorkflowTask.Type toProto(WorkflowTask.Type from) { - WorkflowTaskPb.WorkflowTask.Type to; - switch (from) { - case SIMPLE: to = WorkflowTaskPb.WorkflowTask.Type.SIMPLE; break; - case DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.DYNAMIC; break; - case FORK_JOIN: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN; break; - case FORK_JOIN_DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; - case DECISION: to = WorkflowTaskPb.WorkflowTask.Type.DECISION; break; - case JOIN: to = WorkflowTaskPb.WorkflowTask.Type.JOIN; break; - case SUB_WORKFLOW: to = WorkflowTaskPb.WorkflowTask.Type.SUB_WORKFLOW; break; - case EVENT: to = WorkflowTaskPb.WorkflowTask.Type.EVENT; break; - case WAIT: to = WorkflowTaskPb.WorkflowTask.Type.WAIT; break; - case USER_DEFINED: to = WorkflowTaskPb.WorkflowTask.Type.USER_DEFINED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static WorkflowTask.Type fromProto(WorkflowTaskPb.WorkflowTask.Type from) { - WorkflowTask.Type to; - switch (from) { - case SIMPLE: to = WorkflowTask.Type.SIMPLE; break; - case DYNAMIC: to = WorkflowTask.Type.DYNAMIC; break; - case FORK_JOIN: to = WorkflowTask.Type.FORK_JOIN; break; - case FORK_JOIN_DYNAMIC: to = WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; - case DECISION: to = WorkflowTask.Type.DECISION; break; - case JOIN: to = WorkflowTask.Type.JOIN; break; - case SUB_WORKFLOW: to = WorkflowTask.Type.SUB_WORKFLOW; break; - case EVENT: to = WorkflowTask.Type.EVENT; break; - case WAIT: to = WorkflowTask.Type.WAIT; break; - case USER_DEFINED: to = WorkflowTask.Type.USER_DEFINED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static TaskSummaryPb.TaskSummary toProto(TaskSummary from) { - TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); - to.setWorkflowId( from.getWorkflowId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setEndTime( from.getEndTime() ); - to.setStatus( toProto( from.getStatus() ) ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setExecutionTime( from.getExecutionTime() ); - to.setQueueWaitTime( from.getQueueWaitTime() ); - to.setTaskDefName( from.getTaskDefName() ); - to.setTaskType( from.getTaskType() ); - to.setInput( from.getInput() ); - to.setOutput( from.getOutput() ); - to.setTaskId( from.getTaskId() ); - return to.build(); - } - - public static WorkflowPb.Workflow toProto(Workflow from) { - WorkflowPb.Workflow.Builder to = WorkflowPb.Workflow.newBuilder(); - to.setStatus( toProto( from.getStatus() ) ); - to.setEndTime( from.getEndTime() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setParentWorkflowId( from.getParentWorkflowId() ); - to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); - for (Task elem : from.getTasks()) { - to.addTasks( toProto(elem) ); - } - for (Map.Entry pair : from.getInput().entrySet()) { - to.putInput( pair.getKey(), toProto( pair.getValue() ) ); - } - for (Map.Entry pair : from.getOutput().entrySet()) { - to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); - } - to.setWorkflowType( from.getWorkflowType() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setSchemaVersion( from.getSchemaVersion() ); - to.setEvent( from.getEvent() ); - to.putAllTaskToDomain( from.getTaskToDomain() ); - to.addAllFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); - return to.build(); - } - - public static Workflow fromProto(WorkflowPb.Workflow from) { - Workflow to = new Workflow(); - to.setStatus( fromProto( from.getStatus() ) ); - to.setEndTime( from.getEndTime() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setParentWorkflowId( from.getParentWorkflowId() ); - to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); - to.setTasks( from.getTasksList().stream().map(ProtoMapper::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); - Map inputMap = new HashMap(); - for (Map.Entry pair : from.getInputMap().entrySet()) { - inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setInput(inputMap); - Map outputMap = new HashMap(); - for (Map.Entry pair : from.getOutputMap().entrySet()) { - outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); - } - to.setOutput(outputMap); - to.setWorkflowType( from.getWorkflowType() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setSchemaVersion( from.getSchemaVersion() ); - to.setEvent( from.getEvent() ); - to.setTaskToDomain( from.getTaskToDomainMap() ); - to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNamesList().stream().collect(Collectors.toCollection(HashSet::new)) ); - return to; - } - - public static WorkflowPb.Workflow.WorkflowStatus toProto(Workflow.WorkflowStatus from) { - WorkflowPb.Workflow.WorkflowStatus to; - switch (from) { - case RUNNING: to = WorkflowPb.Workflow.WorkflowStatus.RUNNING; break; - case COMPLETED: to = WorkflowPb.Workflow.WorkflowStatus.COMPLETED; break; - case FAILED: to = WorkflowPb.Workflow.WorkflowStatus.FAILED; break; - case TIMED_OUT: to = WorkflowPb.Workflow.WorkflowStatus.TIMED_OUT; break; - case TERMINATED: to = WorkflowPb.Workflow.WorkflowStatus.TERMINATED; break; - case PAUSED: to = WorkflowPb.Workflow.WorkflowStatus.PAUSED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public static Workflow.WorkflowStatus fromProto(WorkflowPb.Workflow.WorkflowStatus from) { - Workflow.WorkflowStatus to; - switch (from) { - case RUNNING: to = Workflow.WorkflowStatus.RUNNING; break; - case COMPLETED: to = Workflow.WorkflowStatus.COMPLETED; break; - case FAILED: to = Workflow.WorkflowStatus.FAILED; break; - case TIMED_OUT: to = Workflow.WorkflowStatus.TIMED_OUT; break; - case TERMINATED: to = Workflow.WorkflowStatus.TERMINATED; break; - case PAUSED: to = Workflow.WorkflowStatus.PAUSED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - public static WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { - WorkflowSummaryPb.WorkflowSummary.Builder to = WorkflowSummaryPb.WorkflowSummary.newBuilder(); - to.setWorkflowType( from.getWorkflowType() ); - to.setVersion( from.getVersion() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setStartTime( from.getStartTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setEndTime( from.getEndTime() ); - to.setStatus( toProto( from.getStatus() ) ); - to.setInput( from.getInput() ); - to.setOutput( from.getOutput() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); - to.setExecutionTime( from.getExecutionTime() ); - to.setEvent( from.getEvent() ); - to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); - return to.build(); +public final class ProtoMapper extends AbstractProtoMapper { + public static final ProtoMapper INSTANCE = new ProtoMapper(); + + private ProtoMapper() {} + + @Override + public Value toProto(Object val) { + Value.Builder builder = Value.newBuilder(); + + if (val == null) { + builder.setNullValue(NullValue.NULL_VALUE); + } else if (val instanceof Boolean) { + builder.setBoolValue((Boolean) val); + } else if (val instanceof Double) { + builder.setNumberValue((Double) val); + } else if (val instanceof String) { + builder.setStringValue((String) val); + } else if (val instanceof Map) { + Map map = (Map) val; + Struct.Builder struct = Struct.newBuilder(); + for (Map.Entry pair : map.entrySet()) { + struct.putFields(pair.getKey(), toProto(pair.getValue())); + } + builder.setStructValue(struct.build()); + } else if (val instanceof List) { + ListValue.Builder list = ListValue.newBuilder(); + for (Object obj : (List)val) { + list.addValues(toProto(obj)); + } + builder.setListValue(list.build()); + } else { + throw new ClassCastException("cannot map to Value type: "+val); + } + return builder.build(); + } + + @Override + public Object fromProto(Value any) { + switch (any.getKindCase()) { + case NULL_VALUE: + return null; + case BOOL_VALUE: + return any.getBoolValue(); + case NUMBER_VALUE: + return any.getNumberValue(); + case STRING_VALUE: + return any.getStringValue(); + case STRUCT_VALUE: + Struct struct = any.getStructValue(); + Map map = new HashMap<>(); + for (Map.Entry pair : struct.getFieldsMap().entrySet()) { + map.put(pair.getKey(), fromProto(pair.getValue())); + } + return map; + case LIST_VALUE: + List list = new ArrayList<>(); + for (Value val : any.getListValue().getValuesList()) { + list.add(fromProto(val)); + } + return list; + default: + throw new ClassCastException("unset Value element: "+any); + } + } + + @Override + public List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList list) { + return list.getTasksList().stream().map(this::fromProto).collect(Collectors.toList()); + } + + @Override + public WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List list) { + return WorkflowTaskPb.WorkflowTask.WorkflowTaskList.newBuilder() + .addAllTasks(list.stream().map(this::toProto)::iterator) + .build(); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java deleted file mode 100644 index 897692efed..0000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapperBase.java +++ /dev/null @@ -1,81 +0,0 @@ -package com.netflix.conductor.grpc.server; - -import com.google.protobuf.ListValue; -import com.google.protobuf.NullValue; -import com.google.protobuf.Struct; -import com.google.protobuf.Value; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.proto.WorkflowTaskPb; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class ProtoMapperBase { - public static Value toProto(Object val) { - Value.Builder builder = Value.newBuilder(); - - if (val == null) { - builder.setNullValue(NullValue.NULL_VALUE); - } else if (val instanceof Boolean) { - builder.setBoolValue((Boolean) val); - } else if (val instanceof Double) { - builder.setNumberValue((Double) val); - } else if (val instanceof String) { - builder.setStringValue((String) val); - } else if (val instanceof Map) { - Map map = (Map) val; - Struct.Builder struct = Struct.newBuilder(); - for (Map.Entry pair : map.entrySet()) { - struct.putFields(pair.getKey(), toProto(pair.getValue())); - } - builder.setStructValue(struct.build()); - } else if (val instanceof List) { - ListValue.Builder list = ListValue.newBuilder(); - for (Object obj : (List)val) { - list.addValues(toProto(obj)); - } - builder.setListValue(list.build()); - } else { - throw new ClassCastException("cannot map to Value type: "+val); - } - return builder.build(); - } - - public static Object fromProto(Value any) { - switch (any.getKindCase()) { - case NULL_VALUE: - return null; - case BOOL_VALUE: - return any.getBoolValue(); - case NUMBER_VALUE: - return any.getNumberValue(); - case STRING_VALUE: - return any.getStringValue(); - case STRUCT_VALUE: - Struct struct = any.getStructValue(); - Map map = new HashMap<>(); - for (Map.Entry pair : struct.getFieldsMap().entrySet()) { - map.put(pair.getKey(), fromProto(pair.getValue())); - } - return map; - case LIST_VALUE: - List list = new ArrayList<>(); - for (Value val : any.getListValue().getValuesList()) { - list.add(fromProto(val)); - } - return list; - default: - throw new ClassCastException("unset Value element: "+any); - } - } - - public static List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList list) { - return null; - } - - public static WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List list) { - return null; - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index 7f536ecf17..f70a4c6bf7 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -24,6 +24,7 @@ public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { private static final Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class); + private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; private static final int MAX_TASK_COUNT = 100; private static final int POLL_TIMEOUT_MS = 100; @@ -42,7 +43,7 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver resp try { List tasks = taskService.poll(req.getTaskType(), req.getWorkerId(), req.getDomain(), 1, POLL_TIMEOUT_MS); if (!tasks.isEmpty()) { - TaskPb.Task t = ProtoMapper.toProto(tasks.get(0)); + TaskPb.Task t = protoMapper.toProto(tasks.get(0)); response.onNext(t); } response.onCompleted(); @@ -61,7 +62,7 @@ public StreamObserver pollStream(StreamObser public void onNext(TaskServicePb.StreamingPollRequest req) { try { for (TaskResultPb.TaskResult result : req.getCompletedList()) { - TaskResult task = ProtoMapper.fromProto(result); + TaskResult task = protoMapper.fromProto(result); taskService.updateTask(task); } @@ -70,7 +71,7 @@ public void onNext(TaskServicePb.StreamingPollRequest req) { req.getCapacity(), POLL_TIMEOUT_MS); for (Task task : newTasks) { - responseObserver.onNext(ProtoMapper.toProto(task)); + responseObserver.onNext(protoMapper.toProto(task)); } } catch (Exception e) { GRPCUtil.onError(observer, e); @@ -99,7 +100,7 @@ public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamO TaskServicePb.TasksInProgressResponse.newBuilder(); for (Task t : tasks) { - builder.addTasks(ProtoMapper.toProto(t)); + builder.addTasks(protoMapper.toProto(t)); } response.onNext(builder.build()); @@ -113,7 +114,7 @@ public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamO public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, StreamObserver response) { try { Task t = taskService.getPendingTaskForWorkflow(req.getTaskRefName(), req.getWorkflowId()); - response.onNext(ProtoMapper.toProto(t)); + response.onNext(protoMapper.toProto(t)); response.onCompleted(); } catch (Exception e) { GRPCUtil.onError(response, e); @@ -123,7 +124,7 @@ public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, Stre @Override public void updateTask(TaskResultPb.TaskResult req, StreamObserver response) { try { - TaskResult task = ProtoMapper.fromProto(req); + TaskResult task = protoMapper.fromProto(req); taskService.updateTask(task); TaskServicePb.TaskUpdateResponse resp = TaskServicePb.TaskUpdateResponse @@ -162,7 +163,7 @@ public void getLogs(TaskServicePb.TaskId req, StreamObserver response) { - StartWorkflowRequest request = ProtoMapper.fromProto(pbRequest); + StartWorkflowRequest request = protoMapper.fromProto(pbRequest); WorkflowDef def = metadata.getWorkflowDef(request.getName(), request.getVersion()); if(def == null){ response.onError(Status.NOT_FOUND @@ -90,7 +91,7 @@ public void getWorkflows(WorkflowServicePb.GetWorkflowsRequest req, StreamObserv WorkflowServicePb.GetWorkflowsResponse.Workflows.Builder pbWorkflows = WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder(); for (Workflow wf : service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks)) { - pbWorkflows.addWorkflows(ProtoMapper.toProto(wf)); + pbWorkflows.addWorkflows(protoMapper.toProto(wf)); } builder.putWorkflowsById(correlationId, pbWorkflows.build()); } @@ -103,7 +104,7 @@ public void getWorkflows(WorkflowServicePb.GetWorkflowsRequest req, StreamObserv public void getWorkflowStatus(WorkflowServicePb.GetWorkflowStatusRequest req, StreamObserver response) { try { Workflow workflow = service.getExecutionStatus(req.getWorkflowId(), req.getIncludeTasks()); - response.onNext(ProtoMapper.toProto(workflow)); + response.onNext(protoMapper.toProto(workflow)); response.onCompleted(); } catch (Exception e) { GRPCUtil.onError(response, e); @@ -175,7 +176,7 @@ public void resumeWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { - SkipTaskRequest skipTask = ProtoMapper.fromProto(req.getRequest()); + SkipTaskRequest skipTask = protoMapper.fromProto(req.getRequest()); executor.skipTaskFromWorkflow(req.getWorkflowId(), req.getTaskReferenceName(), skipTask); response.onCompleted(); } catch (Exception e) { @@ -186,7 +187,7 @@ public void skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest req, StreamOb @Override public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, StreamObserver response) { try { - String id = executor.rerun(ProtoMapper.fromProto(req)); + String id = executor.rerun(protoMapper.fromProto(req)); response.onNext(newWorkflowId(id)); response.onCompleted(); } catch (Exception e) { @@ -261,7 +262,7 @@ private void doSearch(boolean searchByTask, SearchPb.SearchRequest req, StreamOb SearchPb.WorkflowSummarySearchResult.newBuilder() .setTotalHits(search.getTotalHits()) .addAllResults( - search.getResults().stream().map(ProtoMapper::toProto)::iterator + search.getResults().stream().map(protoMapper::toProto)::iterator ).build() ); response.onCompleted(); From b9a903cb38861c4ac935acf765f827c47dea9820 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 17:23:27 +0200 Subject: [PATCH 021/163] grpc-server: Remove apt from build.gradle --- grpc-server/build.gradle | 4 ---- 1 file changed, 4 deletions(-) diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index 86f8e1c04d..faee1a45fc 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -1,7 +1,3 @@ -plugins { - id 'net.ltgt.apt' version '0.8' -} - dependencies { compile project(':conductor-common') compile project(':conductor-core') From f80a7e93965848c02c3aade59bd7e40f2609c61d Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 17:24:31 +0200 Subject: [PATCH 022/163] grpc-server: Mark all member fields as Final --- .../conductor/grpc/server/EventServiceImpl.java | 4 ++-- .../conductor/grpc/server/MetadataServiceImpl.java | 2 +- .../conductor/grpc/server/WorkflowServiceImpl.java | 13 ++++--------- 3 files changed, 7 insertions(+), 12 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java index aabde4911c..d37445b135 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java @@ -16,8 +16,8 @@ public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; - private MetadataService service; - private EventProcessor ep; + private final MetadataService service; + private final EventProcessor ep; @Inject public EventServiceImpl(MetadataService service, EventProcessor ep) { diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index 000839bf55..fd207a83da 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -19,7 +19,7 @@ public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; - private MetadataService service; + private final MetadataService service; @Inject public MetadataServiceImpl(MetadataService service) { diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java index 8dfa7a38f5..51e2cf4604 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java @@ -26,20 +26,15 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; public class WorkflowServiceImpl extends WorkflowServiceGrpc.WorkflowServiceImplBase { private static final Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class); private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; - private WorkflowExecutor executor; - - private ExecutionService service; - - private MetadataService metadata; - - private int maxSearchSize; + private final WorkflowExecutor executor; + private final ExecutionService service; + private final MetadataService metadata; + private final int maxSearchSize; @Inject public WorkflowServiceImpl(WorkflowExecutor executor, ExecutionService service, MetadataService metadata, Configuration config) { From 84b0671287701377d2d98783f33bc53efd77eb63 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 17:55:36 +0200 Subject: [PATCH 023/163] grpc-server: A more functional approach --- .../grpc/server/EventServiceImpl.java | 16 +++----- .../grpc/server/MetadataServiceImpl.java | 23 ++++------- .../grpc/server/TaskServiceImpl.java | 39 +++++++------------ .../grpc/server/WorkflowServiceImpl.java | 13 ++++--- 4 files changed, 36 insertions(+), 55 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java index d37445b135..b92e556113 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java @@ -44,17 +44,14 @@ public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, Str @Override public void getEventHandlers(Empty req, StreamObserver response) { - for (EventHandler eh : service.getEventHandlers()) { - response.onNext(protoMapper.toProto(eh)); - } + service.getEventHandlers().stream().map(protoMapper::toProto).forEach(response::onNext); response.onCompleted(); } @Override public void getEventHandlersForEvent(EventServicePb.GetEventHandlersRequest req, StreamObserver response) { - for (EventHandler eh : service.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly())) { - response.onNext(protoMapper.toProto(eh)); - } + service.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly()) + .stream().map(protoMapper::toProto).forEach(response::onNext); response.onCompleted(); } @@ -72,11 +69,10 @@ public void getQueues(Empty req, StreamObserver response) { EventServicePb.GetQueueSizesResponse.Builder builder = EventServicePb.GetQueueSizesResponse.newBuilder(); for (Map.Entry> pair : ep.getQueueSizes().entrySet()) { - EventServicePb.GetQueueSizesResponse.QueueInfo info = + builder.putEventToQueueInfo(pair.getKey(), EventServicePb.GetQueueSizesResponse.QueueInfo.newBuilder() - .putAllQueueSizes(pair.getValue()) - .build(); - builder.putEventToQueueInfo(pair.getKey(), info); + .putAllQueueSizes(pair.getValue()).build() + ); } response.onNext(builder.build()); response.onCompleted(); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index fd207a83da..862c445683 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -15,6 +15,7 @@ import javax.inject.Inject; import java.util.ArrayList; import java.util.List; +import java.util.stream.Collectors; public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; @@ -38,10 +39,8 @@ public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver @Override public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver response) { - List workflows = new ArrayList<>(); - for (WorkflowDefPb.WorkflowDef def : req.getDefsList()) { - workflows.add(protoMapper.fromProto(def)); - } + List workflows = req.getDefsList().stream() + .map(protoMapper::fromProto).collect(Collectors.toList()); try { service.updateWorkflowDef(workflows); @@ -68,19 +67,15 @@ public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver @Override public void getAllWorkflows(Empty _request, StreamObserver response) { - for (WorkflowDef def : service.getWorkflowDefs()) { - response.onNext(protoMapper.toProto(def)); - } + service.getWorkflowDefs().stream().map(protoMapper::toProto).forEach(response::onNext); response.onCompleted(); } @Override public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver response) { - List allTasks = new ArrayList<>(); - for (TaskDefPb.TaskDef task : req.getDefsList()) { - allTasks.add(protoMapper.fromProto(task)); - } - service.registerTaskDef(allTasks); + service.registerTaskDef( + req.getDefsList().stream().map(protoMapper::fromProto).collect(Collectors.toList()) + ); response.onCompleted(); } @@ -92,9 +87,7 @@ public void updateTask(TaskDefPb.TaskDef req, StreamObserver response) { @Override public void getAllTasks(Empty _request, StreamObserver response) { - for (TaskDef def : service.getTaskDefs()) { - response.onNext(protoMapper.toProto(def)); - } + service.getTaskDefs().stream().map(protoMapper::toProto).forEach(response::onNext); response.onCompleted(); } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index f70a4c6bf7..329c6d7452 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -95,15 +95,12 @@ public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamO final int count = (req.getCount() != 0) ? req.getCount() : MAX_TASK_COUNT; try { - List tasks = taskService.getTasks(req.getTaskType(), req.getStartKey(), count); - TaskServicePb.TasksInProgressResponse.Builder builder = - TaskServicePb.TasksInProgressResponse.newBuilder(); - - for (Task t : tasks) { - builder.addTasks(protoMapper.toProto(t)); - } - - response.onNext(builder.build()); + response.onNext( + TaskServicePb.TasksInProgressResponse.newBuilder().addAllTasks( + taskService.getTasks(req.getTaskType(), req.getStartKey(), count).stream() + .map(protoMapper::toProto)::iterator + ).build() + ); response.onCompleted(); } catch (Exception e) { GRPCUtil.onError(response, e); @@ -127,11 +124,10 @@ public void updateTask(TaskResultPb.TaskResult req, StreamObserver response) { try { boolean ack = taskService.ackTaskReceived(req.getTaskId()); - TaskServicePb.AckTaskResponse resp = TaskServicePb.AckTaskResponse - .newBuilder().setAck(ack).build(); - response.onNext(resp); + response.onNext(TaskServicePb.AckTaskResponse.newBuilder().setAck(ack).build()); response.onCompleted(); } catch (Exception e) { GRPCUtil.onError(response, e); @@ -160,13 +154,10 @@ public void addLog(TaskServicePb.AddLogRequest req, StreamObserver respon @Override public void getLogs(TaskServicePb.TaskId req, StreamObserver response) { List logs = taskService.getTaskLogs(req.getTaskId()); - TaskServicePb.GetLogsResponse.Builder builder = TaskServicePb.GetLogsResponse.newBuilder(); - - for (TaskExecLog l : logs) { - builder.addLogs(protoMapper.toProto(l)); - } - - response.onNext(builder.build()); + response.onNext(TaskServicePb.GetLogsResponse.newBuilder() + .addAllLogs(logs.stream().map(protoMapper::toProto)::iterator) + .build() + ); response.onCompleted(); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java index 51e2cf4604..cc8905ff5f 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java @@ -17,6 +17,7 @@ import com.netflix.conductor.grpc.WorkflowServicePb; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; +import com.sun.corba.se.spi.orbutil.threadpool.Work; import io.grpc.Status; import io.grpc.stub.StreamObserver; import org.slf4j.Logger; @@ -83,12 +84,12 @@ public void getWorkflows(WorkflowServicePb.GetWorkflowsRequest req, StreamObserv WorkflowServicePb.GetWorkflowsResponse.Builder builder = WorkflowServicePb.GetWorkflowsResponse.newBuilder(); for (String correlationId : req.getCorrelationIdList()) { - WorkflowServicePb.GetWorkflowsResponse.Workflows.Builder pbWorkflows = - WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder(); - for (Workflow wf : service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks)) { - pbWorkflows.addWorkflows(protoMapper.toProto(wf)); - } - builder.putWorkflowsById(correlationId, pbWorkflows.build()); + List workflows = service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks); + builder.putWorkflowsById(correlationId, + WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder() + .addAllWorkflows(workflows.stream().map(protoMapper::toProto)::iterator) + .build() + ); } response.onNext(builder.build()); From 75bf49875a35278d73098468d979e50189f27416 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 31 May 2018 18:09:33 +0200 Subject: [PATCH 024/163] grpc-server: Simplify GRPCUtil --- .../conductor/grpc/server/GRPCUtil.java | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java index aea8206d07..34634f74ac 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java @@ -2,24 +2,13 @@ import io.grpc.Status; import io.grpc.stub.StreamObserver; +import org.apache.commons.lang3.exception.ExceptionUtils; -import java.io.PrintWriter; -import java.io.StringWriter; - -public class GRPCUtil { - private GRPCUtil() {} - - private static String stacktraceToString(Throwable e) { - StringWriter stringWriter = new StringWriter(); - PrintWriter printWriter = new PrintWriter(stringWriter); - e.printStackTrace(printWriter); - return stringWriter.toString(); - } - - public static void onError(StreamObserver response, Throwable t) { +public interface GRPCUtil { + static void onError(StreamObserver response, Throwable t) { response.onError(Status.INTERNAL .withDescription(t.getMessage()) - .augmentDescription(stacktraceToString(t)) + .augmentDescription(ExceptionUtils.getStackTrace(t)) .withCause(t) .asException()); } From be6cf505d50bb18e9ed487429ee7eb20a60b5b09 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Fri, 1 Jun 2018 13:00:18 +0200 Subject: [PATCH 025/163] grpc-server: Make more functional --- .../com/netflix/conductor/grpc/server/GRPCServer.java | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index df2f2f90cf..0358d1199e 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -2,8 +2,6 @@ import com.google.inject.Inject; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.grpc.TaskServiceGrpc; -import com.netflix.conductor.grpc.WorkflowServiceGrpc; import io.grpc.BindableService; import io.grpc.Server; import io.grpc.ServerBuilder; @@ -12,6 +10,7 @@ import javax.inject.Singleton; import java.io.IOException; +import java.util.Arrays; @Singleton public class GRPCServer { @@ -27,10 +26,7 @@ public GRPCServer(Configuration conf, BindableService... services) { final int port = conf.getIntProperty(CONFIG_PORT, CONFIG_PORT_DEFAULT); ServerBuilder builder = ServerBuilder.forPort(port); - for (BindableService s : services) { - builder.addService(s); - } - + Arrays.stream(services).forEach(builder::addService); server = builder.build(); } From a2e88212f3ad05d5cf1505b50a7c2ff559cee1f1 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Fri, 1 Jun 2018 13:16:47 +0200 Subject: [PATCH 026/163] grpc-server: Refactor into GRPCHelper and add logging --- .../server/{GRPCUtil.java => GRPCHelper.java} | 12 ++++++-- .../grpc/server/MetadataServiceImpl.java | 10 ++++--- .../grpc/server/TaskServiceImpl.java | 13 +++++---- .../grpc/server/WorkflowServiceImpl.java | 28 +++++++++---------- 4 files changed, 37 insertions(+), 26 deletions(-) rename grpc-server/src/main/java/com/netflix/conductor/grpc/server/{GRPCUtil.java => GRPCHelper.java} (59%) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java similarity index 59% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java rename to grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java index 34634f74ac..0dbd37acfd 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCUtil.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java @@ -3,9 +3,17 @@ import io.grpc.Status; import io.grpc.stub.StreamObserver; import org.apache.commons.lang3.exception.ExceptionUtils; +import org.slf4j.Logger; -public interface GRPCUtil { - static void onError(StreamObserver response, Throwable t) { +public class GRPCHelper { + private final Logger logger; + + public GRPCHelper(Logger log) { + this.logger = log; + } + + void onError(StreamObserver response, Throwable t) { + logger.error("error during GRPC request", t); response.onError(Status.INTERNAL .withDescription(t.getMessage()) .augmentDescription(ExceptionUtils.getStackTrace(t)) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index 862c445683..80a3d6fe18 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -1,7 +1,6 @@ package com.netflix.conductor.grpc.server; import com.google.protobuf.Empty; -import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.grpc.MetadataServiceGrpc; @@ -11,14 +10,17 @@ import com.netflix.conductor.service.MetadataService; import io.grpc.Status; import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.inject.Inject; -import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { + private static final Logger logger = LoggerFactory.getLogger(MetadataServiceImpl.class); private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; + private static final GRPCHelper grpcHelper = new GRPCHelper(logger); private final MetadataService service; @@ -33,7 +35,7 @@ public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver service.registerWorkflowDef(protoMapper.fromProto(req)); response.onCompleted(); } catch (Exception e) { - GRPCUtil.onError(response, e); + grpcHelper.onError(response, e); } } @@ -46,7 +48,7 @@ public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, Stream service.updateWorkflowDef(workflows); response.onCompleted(); } catch (Exception e) { - GRPCUtil.onError(response, e); + grpcHelper.onError(response, e); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index 329c6d7452..d3f3bdd1ef 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -25,6 +25,7 @@ public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { private static final Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class); private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; + private static final GRPCHelper grpcHelper = new GRPCHelper(logger); private static final int MAX_TASK_COUNT = 100; private static final int POLL_TIMEOUT_MS = 100; @@ -48,7 +49,7 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver resp } response.onCompleted(); } catch (Exception e) { - GRPCUtil.onError(response, e); + grpcHelper.onError(response, e); } } @@ -74,7 +75,7 @@ public void onNext(TaskServicePb.StreamingPollRequest req) { responseObserver.onNext(protoMapper.toProto(task)); } } catch (Exception e) { - GRPCUtil.onError(observer, e); + grpcHelper.onError(observer, e); } } @@ -103,7 +104,7 @@ public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamO ); response.onCompleted(); } catch (Exception e) { - GRPCUtil.onError(response, e); + grpcHelper.onError(response, e); } } @@ -114,7 +115,7 @@ public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, Stre response.onNext(protoMapper.toProto(t)); response.onCompleted(); } catch (Exception e) { - GRPCUtil.onError(response, e); + grpcHelper.onError(response, e); } } @@ -130,7 +131,7 @@ public void updateTask(TaskResultPb.TaskResult req, StreamObserver Date: Wed, 6 Jun 2018 18:13:43 +0200 Subject: [PATCH 027/163] protogen: Enforce Java package explicitly --- .../main/java/com/netflix/conductor/protogen/File.java | 10 +++++++--- .../java/com/netflix/conductor/protogen/ProtoGen.java | 6 ++++-- protogen/templates/file.proto | 1 + 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/File.java b/protogen/src/main/java/com/netflix/conductor/protogen/File.java index f7258c8d99..7c00640117 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/File.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/File.java @@ -13,7 +13,7 @@ public class File { public File(Class object) { String className = object.getSimpleName() + PROTO_SUFFIX; - baseClass = ClassName.get(ProtoGen.GENERATED_PROTO_PACKAGE, className); + baseClass = ClassName.get(ProtoGen.PROTO_JAVA_PACKAGE_NAME, className); this.message = new Message(object, AbstractType.baseClass(baseClass, this)); } @@ -26,11 +26,15 @@ public String getFilePath() { } public String getPackageName() { - return ProtoGen.GENERATED_PROTO_PACKAGE; + return ProtoGen.PROTO_PACKAGE_NAME; + } + + public String getJavaPackageName() { + return ProtoGen.PROTO_JAVA_PACKAGE_NAME; } public String getGoPackage() { - return ProtoGen.GENERATED_GO_PACKAGE; + return ProtoGen.PROTO_GO_PACKAGE_NAME; } public Element getMessage() { diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java index 7e0726d6b6..9b56548024 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java @@ -16,10 +16,12 @@ import java.util.*; public class ProtoGen { - public static String GENERATED_PROTO_PACKAGE = "com.netflix.conductor.proto"; + public static String PROTO_PACKAGE_NAME = "conductor.proto"; + public static String PROTO_JAVA_PACKAGE_NAME = "com.netflix.conductor.proto"; + public static String PROTO_GO_PACKAGE_NAME = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + public static String GENERATED_MAPPER_PACKAGE = "com.netflix.conductor.grpc.server"; public static String GENERATOR_NAME = "com.netflix.conductor.protogen.ProtoGen"; - public static String GENERATED_GO_PACKAGE = "github.com/netflix/conductor/client/gogrpc/conductor/model"; private List files = new ArrayList<>(); diff --git a/protogen/templates/file.proto b/protogen/templates/file.proto index 23dc9401ae..a8a940fca3 100644 --- a/protogen/templates/file.proto +++ b/protogen/templates/file.proto @@ -5,6 +5,7 @@ package {{packageName}}; import "{{this}}"; {{/includes}} +option java_package = "{{javaPackageName}}"; option java_outer_classname = "{{javaClassName}}"; option go_package = "{{goPackage}}"; From a9248252ace9cde92129601433a000cd9e5fd42b Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 6 Jun 2018 18:14:04 +0200 Subject: [PATCH 028/163] grpc: Declare all Java package names explicitly We were previously using the default Proto package names as the java package. This results in good codegen for Java and Go, but all the other Protobufs and GRPC generators based around C (particularly the Ruby generator) would end up namespacing their generated code as "Com::Netflix::Conductor::...`, which is not acceptable or idomatic Ruby. Since there's is no `option rb_package` or `option py_package` in the Protobuf generator, we're instead opting for giving Ruby/Python-friendly package names to the Protobuf files, and explicitly declaring the Java package with `option java_package`. This change should be fully backwards compatible. --- grpc/src/main/proto/grpc/event_service.proto | 11 ++++---- .../main/proto/grpc/metadata_service.proto | 19 +++++++------- grpc/src/main/proto/grpc/search.proto | 5 ++-- grpc/src/main/proto/grpc/task_service.proto | 17 +++++++------ .../main/proto/grpc/workflow_service.proto | 13 +++++----- .../proto/model/dynamicforkjointask.proto | 3 ++- .../proto/model/dynamicforkjointasklist.proto | 3 ++- .../src/main/proto/model/eventexecution.proto | 25 ------------------- grpc/src/main/proto/model/eventhandler.proto | 3 ++- grpc/src/main/proto/model/polldata.proto | 3 ++- .../proto/model/rerunworkflowrequest.proto | 3 ++- .../main/proto/model/skiptaskrequest.proto | 3 ++- .../proto/model/startworkflowrequest.proto | 3 ++- .../main/proto/model/subworkflowparams.proto | 3 ++- grpc/src/main/proto/model/task.proto | 3 ++- grpc/src/main/proto/model/taskdef.proto | 3 ++- grpc/src/main/proto/model/taskexeclog.proto | 3 ++- grpc/src/main/proto/model/taskresult.proto | 3 ++- grpc/src/main/proto/model/tasksummary.proto | 3 ++- grpc/src/main/proto/model/workflow.proto | 3 ++- grpc/src/main/proto/model/workflowdef.proto | 3 ++- .../main/proto/model/workflowsummary.proto | 3 ++- grpc/src/main/proto/model/workflowtask.proto | 3 ++- 23 files changed, 69 insertions(+), 72 deletions(-) diff --git a/grpc/src/main/proto/grpc/event_service.proto b/grpc/src/main/proto/grpc/event_service.proto index 2a1028a1b4..2c0a2afac2 100644 --- a/grpc/src/main/proto/grpc/event_service.proto +++ b/grpc/src/main/proto/grpc/event_service.proto @@ -1,27 +1,28 @@ syntax = "proto3"; -package com.netflix.conductor.grpc; +package conductor.grpc; import "google/protobuf/empty.proto"; import "model/eventhandler.proto"; +option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "EventServicePb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service EventService { // POST / - rpc AddEventHandler(com.netflix.conductor.proto.EventHandler) returns (google.protobuf.Empty); + rpc AddEventHandler(conductor.proto.EventHandler) returns (google.protobuf.Empty); // PUT / - rpc UpdateEventHandler(com.netflix.conductor.proto.EventHandler) returns (google.protobuf.Empty); + rpc UpdateEventHandler(conductor.proto.EventHandler) returns (google.protobuf.Empty); // DELETE /{name} rpc RemoveEventHandler(RemoveEventHandlerRequest) returns (google.protobuf.Empty); // GET / - rpc GetEventHandlers(google.protobuf.Empty) returns (stream com.netflix.conductor.proto.EventHandler); + rpc GetEventHandlers(google.protobuf.Empty) returns (stream conductor.proto.EventHandler); // GET /{name} - rpc GetEventHandlersForEvent(GetEventHandlersRequest) returns (stream com.netflix.conductor.proto.EventHandler); + rpc GetEventHandlersForEvent(GetEventHandlersRequest) returns (stream conductor.proto.EventHandler); // GET /queues rpc GetQueues(google.protobuf.Empty) returns (GetQueuesResponse); diff --git a/grpc/src/main/proto/grpc/metadata_service.proto b/grpc/src/main/proto/grpc/metadata_service.proto index 92402a9f36..e92a85a860 100644 --- a/grpc/src/main/proto/grpc/metadata_service.proto +++ b/grpc/src/main/proto/grpc/metadata_service.proto @@ -1,48 +1,49 @@ syntax = "proto3"; -package com.netflix.conductor.grpc; +package conductor.grpc; import "google/protobuf/empty.proto"; import "model/taskdef.proto"; import "model/workflowdef.proto"; +option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "MetadataServicePb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service MetadataService { // POST /workflow - rpc CreateWorkflow(com.netflix.conductor.proto.WorkflowDef) returns (google.protobuf.Empty); + rpc CreateWorkflow(conductor.proto.WorkflowDef) returns (google.protobuf.Empty); // PUT /workflow rpc UpdateWorkflows(UpdateWorkflowsRequest) returns (google.protobuf.Empty); // GET /workflow/{name} - rpc GetWorkflow(GetWorkflowRequest) returns (com.netflix.conductor.proto.WorkflowDef); + rpc GetWorkflow(GetWorkflowRequest) returns (conductor.proto.WorkflowDef); // GET /workflow - rpc GetAllWorkflows(google.protobuf.Empty) returns (stream com.netflix.conductor.proto.WorkflowDef); + rpc GetAllWorkflows(google.protobuf.Empty) returns (stream conductor.proto.WorkflowDef); // POST /taskdefs rpc CreateTasks(CreateTasksRequest) returns (google.protobuf.Empty); // PUT /taskdefs - rpc UpdateTask(com.netflix.conductor.proto.TaskDef) returns (google.protobuf.Empty); + rpc UpdateTask(conductor.proto.TaskDef) returns (google.protobuf.Empty); // GET /taskdefs/{tasktype} - rpc GetTask(GetTaskRequest) returns (com.netflix.conductor.proto.TaskDef); + rpc GetTask(GetTaskRequest) returns (conductor.proto.TaskDef); // GET /taskdefs - rpc GetAllTasks(google.protobuf.Empty) returns (stream com.netflix.conductor.proto.TaskDef); + rpc GetAllTasks(google.protobuf.Empty) returns (stream conductor.proto.TaskDef); // DELETE /taskdefs/{tasktype} rpc DeleteTask(GetTaskRequest) returns (google.protobuf.Empty); } message UpdateWorkflowsRequest { - repeated com.netflix.conductor.proto.WorkflowDef defs = 1; + repeated conductor.proto.WorkflowDef defs = 1; } message CreateTasksRequest { - repeated com.netflix.conductor.proto.TaskDef defs = 1; + repeated conductor.proto.TaskDef defs = 1; } message GetWorkflowRequest { diff --git a/grpc/src/main/proto/grpc/search.proto b/grpc/src/main/proto/grpc/search.proto index 48faf2c746..41c7f4b58d 100644 --- a/grpc/src/main/proto/grpc/search.proto +++ b/grpc/src/main/proto/grpc/search.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.grpc; +package conductor.grpc; import "model/workflowsummary.proto"; +option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "SearchPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; @@ -16,5 +17,5 @@ message SearchRequest { message WorkflowSummarySearchResult { int64 total_hits = 1; - repeated com.netflix.conductor.proto.WorkflowSummary results = 2; + repeated conductor.proto.WorkflowSummary results = 2; } \ No newline at end of file diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto index 901d5a4281..db853598f9 100644 --- a/grpc/src/main/proto/grpc/task_service.proto +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -1,20 +1,21 @@ syntax = "proto3"; -package com.netflix.conductor.grpc; +package conductor.grpc; import "google/protobuf/empty.proto"; import "model/taskexeclog.proto"; import "model/taskresult.proto"; import "model/task.proto"; +option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "TaskServicePb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service TaskService { - rpc Poll(PollRequest) returns (com.netflix.conductor.proto.Task); - rpc PollStream(stream StreamingPollRequest) returns (stream com.netflix.conductor.proto.Task); + rpc Poll(PollRequest) returns (conductor.proto.Task); + rpc PollStream(stream StreamingPollRequest) returns (stream conductor.proto.Task); rpc GetTasksInProgress(TasksInProgressRequest) returns (TasksInProgressResponse); - rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (com.netflix.conductor.proto.Task); - rpc UpdateTask(com.netflix.conductor.proto.TaskResult) returns (TaskUpdateResponse); + rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (conductor.proto.Task); + rpc UpdateTask(conductor.proto.TaskResult) returns (TaskUpdateResponse); rpc AckTask(AckTaskRequest) returns (AckTaskResponse); rpc AddLog(AddLogRequest) returns (google.protobuf.Empty); @@ -32,7 +33,7 @@ message StreamingPollRequest { string worker_id = 2; string domain = 3; int32 capacity = 4; - repeated com.netflix.conductor.proto.TaskResult completed = 5; + repeated conductor.proto.TaskResult completed = 5; } message TasksInProgressRequest { @@ -42,7 +43,7 @@ message TasksInProgressRequest { } message TasksInProgressResponse { - repeated com.netflix.conductor.proto.Task tasks = 1; + repeated conductor.proto.Task tasks = 1; } message PendingTaskRequest { @@ -73,5 +74,5 @@ message TaskId { } message GetLogsResponse { - repeated com.netflix.conductor.proto.TaskExecLog logs = 1; + repeated conductor.proto.TaskExecLog logs = 1; } diff --git a/grpc/src/main/proto/grpc/workflow_service.proto b/grpc/src/main/proto/grpc/workflow_service.proto index 4f01c85cd2..79f4e48571 100644 --- a/grpc/src/main/proto/grpc/workflow_service.proto +++ b/grpc/src/main/proto/grpc/workflow_service.proto @@ -1,5 +1,5 @@ syntax = "proto3"; -package com.netflix.conductor.grpc; +package conductor.grpc; import "google/protobuf/empty.proto"; import "grpc/search.proto"; @@ -8,18 +8,19 @@ import "model/skiptaskrequest.proto"; import "model/startworkflowrequest.proto"; import "model/rerunworkflowrequest.proto"; +option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "WorkflowServicePb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service WorkflowService { // POST / - rpc StartWorkflow(com.netflix.conductor.proto.StartWorkflowRequest) returns (WorkflowId); + rpc StartWorkflow(conductor.proto.StartWorkflowRequest) returns (WorkflowId); // GET /{name}/correlated/{correlationId} rpc GetWorkflows(GetWorkflowsRequest) returns (GetWorkflowsResponse); // GET /{workflowId} - rpc GetWorkflowStatus(GetWorkflowStatusRequest) returns (com.netflix.conductor.proto.Workflow); + rpc GetWorkflowStatus(GetWorkflowStatusRequest) returns (conductor.proto.Workflow); // DELETE /{workflodId}/remove rpc RemoveWorkflow(RemoveWorkflowRequest) returns (google.protobuf.Empty); @@ -40,7 +41,7 @@ service WorkflowService { rpc SkipTaskFromWorkflow(SkipTaskRequest) returns (google.protobuf.Empty); // POST /{workflowId}/rerun - rpc RerunWorkflow(com.netflix.conductor.proto.RerunWorkflowRequest) returns (WorkflowId); + rpc RerunWorkflow(conductor.proto.RerunWorkflowRequest) returns (WorkflowId); // POST /{workflowId}/restart rpc RestartWorkflow(WorkflowId) returns (google.protobuf.Empty); @@ -68,7 +69,7 @@ message GetWorkflowsRequest { message GetWorkflowsResponse { message Workflows { - repeated com.netflix.conductor.proto.Workflow workflows = 1; + repeated conductor.proto.Workflow workflows = 1; } map workflows_by_id = 1; } @@ -101,7 +102,7 @@ message WorkflowId { message SkipTaskRequest { string workflow_id = 1; string task_reference_name = 2; - com.netflix.conductor.proto.SkipTaskRequest request = 3; + conductor.proto.SkipTaskRequest request = 3; } message TerminateWorkflowRequest { diff --git a/grpc/src/main/proto/model/dynamicforkjointask.proto b/grpc/src/main/proto/model/dynamicforkjointask.proto index e7f33426ac..12e66bb1e6 100644 --- a/grpc/src/main/proto/model/dynamicforkjointask.proto +++ b/grpc/src/main/proto/model/dynamicforkjointask.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "DynamicForkJoinTaskPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/dynamicforkjointasklist.proto b/grpc/src/main/proto/model/dynamicforkjointasklist.proto index 0fc1ccb4e6..3ac3f44d9e 100644 --- a/grpc/src/main/proto/model/dynamicforkjointasklist.proto +++ b/grpc/src/main/proto/model/dynamicforkjointasklist.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/dynamicforkjointask.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "DynamicForkJoinTaskListPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/eventexecution.proto b/grpc/src/main/proto/model/eventexecution.proto index db39e00305..e69de29bb2 100644 --- a/grpc/src/main/proto/model/eventexecution.proto +++ b/grpc/src/main/proto/model/eventexecution.proto @@ -1,25 +0,0 @@ -syntax = "proto3"; -package com.netflix.conductor.proto; - -import "model/eventhandler.proto"; -import "google/protobuf/struct.proto"; - -option java_outer_classname = "EventExecutionPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - -message EventExecution { - enum Status { - IN_PROGRESS = 0; - COMPLETED = 1; - FAILED = 2; - SKIPPED = 3; - } - string id = 1; - string message_id = 2; - string name = 3; - string event = 4; - int64 created = 5; - EventExecution.Status status = 6; - EventHandler.Action.Type action = 7; - map output = 8; -} diff --git a/grpc/src/main/proto/model/eventhandler.proto b/grpc/src/main/proto/model/eventhandler.proto index b0c8909cb5..d4ff3611f8 100644 --- a/grpc/src/main/proto/model/eventhandler.proto +++ b/grpc/src/main/proto/model/eventhandler.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "EventHandlerPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/polldata.proto b/grpc/src/main/proto/model/polldata.proto index b19b579e2c..59169430c3 100644 --- a/grpc/src/main/proto/model/polldata.proto +++ b/grpc/src/main/proto/model/polldata.proto @@ -1,7 +1,8 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "PollDataPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/rerunworkflowrequest.proto b/grpc/src/main/proto/model/rerunworkflowrequest.proto index a23b59efda..280e8cfae6 100644 --- a/grpc/src/main/proto/model/rerunworkflowrequest.proto +++ b/grpc/src/main/proto/model/rerunworkflowrequest.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "RerunWorkflowRequestPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/skiptaskrequest.proto b/grpc/src/main/proto/model/skiptaskrequest.proto index 9b8f77f6cf..aef7ac2b93 100644 --- a/grpc/src/main/proto/model/skiptaskrequest.proto +++ b/grpc/src/main/proto/model/skiptaskrequest.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "SkipTaskRequestPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/startworkflowrequest.proto b/grpc/src/main/proto/model/startworkflowrequest.proto index 61fe3db0ef..a575b3adc4 100644 --- a/grpc/src/main/proto/model/startworkflowrequest.proto +++ b/grpc/src/main/proto/model/startworkflowrequest.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "StartWorkflowRequestPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/subworkflowparams.proto b/grpc/src/main/proto/model/subworkflowparams.proto index 50fa03846b..6720f2341b 100644 --- a/grpc/src/main/proto/model/subworkflowparams.proto +++ b/grpc/src/main/proto/model/subworkflowparams.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "SubWorkflowParamsPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/task.proto b/grpc/src/main/proto/model/task.proto index 1176cc729e..1bc83d5230 100644 --- a/grpc/src/main/proto/model/task.proto +++ b/grpc/src/main/proto/model/task.proto @@ -1,9 +1,10 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/workflowtask.proto"; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/taskdef.proto b/grpc/src/main/proto/model/taskdef.proto index ae5f833bd8..71fcf13f2b 100644 --- a/grpc/src/main/proto/model/taskdef.proto +++ b/grpc/src/main/proto/model/taskdef.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskDefPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/taskexeclog.proto b/grpc/src/main/proto/model/taskexeclog.proto index 3b3faf8f4a..f67b2e4b2e 100644 --- a/grpc/src/main/proto/model/taskexeclog.proto +++ b/grpc/src/main/proto/model/taskexeclog.proto @@ -1,7 +1,8 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskExecLogPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/taskresult.proto b/grpc/src/main/proto/model/taskresult.proto index 07d51e0f10..abece11c27 100644 --- a/grpc/src/main/proto/model/taskresult.proto +++ b/grpc/src/main/proto/model/taskresult.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskResultPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/tasksummary.proto b/grpc/src/main/proto/model/tasksummary.proto index 2ff72d2afa..6ad99d0dc3 100644 --- a/grpc/src/main/proto/model/tasksummary.proto +++ b/grpc/src/main/proto/model/tasksummary.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/task.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskSummaryPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/workflow.proto b/grpc/src/main/proto/model/workflow.proto index 59575d57e0..accf59546b 100644 --- a/grpc/src/main/proto/model/workflow.proto +++ b/grpc/src/main/proto/model/workflow.proto @@ -1,9 +1,10 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/task.proto"; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "WorkflowPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/workflowdef.proto b/grpc/src/main/proto/model/workflowdef.proto index 5a22086a98..7ca7747295 100644 --- a/grpc/src/main/proto/model/workflowdef.proto +++ b/grpc/src/main/proto/model/workflowdef.proto @@ -1,9 +1,10 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/workflowtask.proto"; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "WorkflowDefPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/workflowsummary.proto b/grpc/src/main/proto/model/workflowsummary.proto index 463bf01678..6e3d4202a0 100644 --- a/grpc/src/main/proto/model/workflowsummary.proto +++ b/grpc/src/main/proto/model/workflowsummary.proto @@ -1,8 +1,9 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/workflow.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "WorkflowSummaryPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto index 9bad330d3d..85b6b76704 100644 --- a/grpc/src/main/proto/model/workflowtask.proto +++ b/grpc/src/main/proto/model/workflowtask.proto @@ -1,9 +1,10 @@ syntax = "proto3"; -package com.netflix.conductor.proto; +package conductor.proto; import "model/subworkflowparams.proto"; import "google/protobuf/struct.proto"; +option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "WorkflowTaskPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; From fe449174834cccc67f4b45accec420862ca546c0 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 6 Jun 2018 18:48:34 +0200 Subject: [PATCH 029/163] grpc-server: Return DebugInfo as metadata Google provides a set of standard error objects that can be returned as metadata. In this case we're using DebugInfo for all internal exceptions, as we can attach a stack trace and a detailed error message to the payload. Note that returning this in a compatible way with Java and Go is rather tricky, as the required API is missing. The comment in the implementation gives details on how we're working around the issue. --- .../conductor/grpc/server/GRPCHelper.java | 79 +++++++++++++++++-- 1 file changed, 74 insertions(+), 5 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java index 0dbd37acfd..a2b19c9cb6 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java @@ -1,23 +1,92 @@ package com.netflix.conductor.grpc.server; +import com.google.rpc.DebugInfo; +import io.grpc.Metadata; import io.grpc.Status; +import io.grpc.StatusException; +import io.grpc.protobuf.lite.ProtoLiteUtils; import io.grpc.stub.StreamObserver; import org.apache.commons.lang3.exception.ExceptionUtils; import org.slf4j.Logger; +import java.util.Arrays; + public class GRPCHelper { private final Logger logger; + private static final Metadata.Key STATUS_DETAILS_KEY = + Metadata.Key.of( + "grpc-status-details-bin", + ProtoLiteUtils.metadataMarshaller(DebugInfo.getDefaultInstance())); + public GRPCHelper(Logger log) { this.logger = log; } - void onError(StreamObserver response, Throwable t) { - logger.error("error during GRPC request", t); - response.onError(Status.INTERNAL + /** + * Converts an internal exception thrown by Conductor into an StatusException + * that uses modern "Status" metadata for GRPC. + * + * Note that this is trickier than it ought to be because the GRPC APIs have + * not been upgraded yet. Here's a quick breakdown of how this works in practice: + * + * Reporting a "status" result back to a client with GRPC is pretty straightforward. + * GRPC implementations simply serialize the status into several HTTP/2 trailer headers that + * are sent back to the client before shutting down the HTTP/2 stream. + * + * - 'grpc-status', which is a string representation of a {@link com.google.rpc.Code} + * - 'grpc-message', which is the description of the returned status + * - 'grpc-status-details-bin' (optional), which is an arbitrary payload with a serialized + * ProtoBuf object, containing an accurate description of the error in case the status is not + * successful. + * + * By convention, Google provides a default set of ProtoBuf messages for the most common + * error cases. Here, we'll be using {@link DebugInfo}, as we're reporting an internal + * Java exception which we couldn't properly handle. + * + * Now, how do we go about sending all those headers _and_ the {@link DebugInfo} payload + * using the Java GRPC API? + * + * The only way we can return an error with the Java API is by passing an instance of + * {@link io.grpc.StatusException} or {@link io.grpc.StatusRuntimeException} to + * {@link StreamObserver#onError(Throwable)}. The easiest way to create either of these + * exceptions is by using the {@link Status} class and one of its predefined code + * identifiers (in this case, {@link Status#INTERNAL} because we're reporting an internal + * exception). The {@link Status} class has setters to set its most relevant attributes, + * namely those that will be automatically serialized into the 'grpc-status' and 'grpc-message' + * trailers in the response. There is, however, no setter to pass an arbitrary ProtoBuf message + * to be serialized into a `grpc-status-details-bin` trailer. This feature exists in the other + * language implementations but it hasn't been brought to Java yet. + * + * Fortunately, {@link Status#asException(Metadata)} exists, allowing us to pass any amount + * of arbitrary trailers before we close the response. So we're using this API to manually + * craft the 'grpc-status-detail-bin' trailer, in the same way that the GRPC server implementations + * for Go and C++ craft and serialize the header. This will allow us to access the metadata + * cleanly from Go and C++ clients by using the 'details' method which _has_ been implemented + * in those two clients. + * + * @param t The exception to convert + * @return an instance of {@link StatusException} which will properly serialize all its + * headers into the response. + */ + private StatusException throwableToStatusException(Throwable t) { + String[] frames = ExceptionUtils.getStackFrames(t); + Metadata metadata = new Metadata(); + metadata.put(STATUS_DETAILS_KEY, + DebugInfo.newBuilder() + .addAllStackEntries(Arrays.asList(frames)) + .setDetail(ExceptionUtils.getMessage(t)) + .build() + ); + + return Status.INTERNAL .withDescription(t.getMessage()) - .augmentDescription(ExceptionUtils.getStackTrace(t)) .withCause(t) - .asException()); + .asException(metadata); + } + + void onError(StreamObserver response, Throwable t) { + logger.error("internal exception during GRPC request", t); + response.onError(throwableToStatusException(t)); } } From 1beeecc6b7b1a61057dc2368867584f4cc7cfd54 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 7 Jun 2018 12:40:21 +0200 Subject: [PATCH 030/163] common: Document the ProtoBuf-related annotations --- .../common/annotations/ProtoEnum.java | 4 ++++ .../common/annotations/ProtoField.java | 14 ++++++++++- .../common/annotations/ProtoMessage.java | 23 +++++++++++++++++++ 3 files changed, 40 insertions(+), 1 deletion(-) diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java index 00bcc28e74..6217190c3a 100644 --- a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java +++ b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java @@ -5,6 +5,10 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +/** + * ProtoEnum annotates an enum type that will be exposed via the GRPC + * API as a native Protocol Buffers enum. + */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface ProtoEnum { diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java index 8bca1787f6..c3ea743821 100644 --- a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java +++ b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java @@ -5,9 +5,21 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +/** + * ProtoField annotates a field inside an struct with metadata on how to + * expose it on its corresponding Protocol Buffers struct. + * For a field to be exposed in a ProtoBuf struct, the containing struct + * must also be annotated with a {@link ProtoMessage} or {@link ProtoEnum} + * tag. + */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.FIELD) public @interface ProtoField { + /** + * Mandatory. Sets the Protocol Buffer ID for this specific field. Once a field + * has been annotated with a given ID, the ID can never change to a different value + * or the resulting Protocol Buffer struct will not be backwards compatible. + * @return the numeric ID for the field + */ int id(); - String wrap() default ""; } diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java index 3e22c73ae8..fcae3262e5 100644 --- a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java +++ b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java @@ -5,10 +5,33 @@ import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; +/** + * ProtoMessage annotates a given Java class so it becomes exposed via the GRPC + * API as a native Protocol Buffers struct. + * The annotated class must be a POJO. + */ @Retention(RetentionPolicy.RUNTIME) @Target(ElementType.TYPE) public @interface ProtoMessage { + /** + * Sets whether the generated mapping code will contain a helper to translate + * the POJO for this class into the equivalent ProtoBuf object. + * @return whether this class will generate a mapper to ProtoBuf objects + */ boolean toProto() default true; + + /** + * Sets whether the generated mapping code will contain a helper to translate + * the ProtoBuf object for this class into the equivalent POJO. + * @return whether this class will generate a mapper from ProtoBuf objects + */ boolean fromProto() default true; + + /** + * Sets whether this is a wrapper class that will be used to encapsulate complex + * nested type interfaces. Wrapper classes are not directly exposed by the ProtoBuf + * API and must be mapped manually. + * @return whether this is a wrapper class + */ boolean wrapper() default false; } From 9f20103334e7029223d8a15184085ac981d9aabe Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Fri, 8 Jun 2018 11:35:12 +0200 Subject: [PATCH 031/163] grpc: Commit missing .proto file --- .../src/main/proto/model/eventexecution.proto | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/grpc/src/main/proto/model/eventexecution.proto b/grpc/src/main/proto/model/eventexecution.proto index e69de29bb2..e4aee81aa6 100644 --- a/grpc/src/main/proto/model/eventexecution.proto +++ b/grpc/src/main/proto/model/eventexecution.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; +package conductor.proto; + +import "model/eventhandler.proto"; +import "google/protobuf/struct.proto"; + +option java_package = "com.netflix.conductor.proto"; +option java_outer_classname = "EventExecutionPb"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + +message EventExecution { + enum Status { + IN_PROGRESS = 0; + COMPLETED = 1; + FAILED = 2; + SKIPPED = 3; + } + string id = 1; + string message_id = 2; + string name = 3; + string event = 4; + int64 created = 5; + EventExecution.Status status = 6; + EventHandler.Action.Type action = 7; + map output = 8; +} From c42772804073e67dcc0e6ea4b351bee576dbf67a Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Fri, 8 Jun 2018 16:54:01 +0200 Subject: [PATCH 032/163] fixxxes --- .../conductor/common/metadata/tasks/Task.java | 1158 ++++++++--------- .../grpc/server/AbstractProtoMapper.java | 4 + grpc/src/main/proto/model/task.proto | 36 +- settings.gradle | 2 +- 4 files changed, 601 insertions(+), 599 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index bec80d6ce6..ab1b24acbd 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -15,622 +15,618 @@ */ package com.netflix.conductor.common.metadata.tasks; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; - -import java.util.HashMap; -import java.util.Map; - import com.netflix.conductor.common.annotations.ProtoEnum; import com.netflix.conductor.common.annotations.ProtoField; import com.netflix.conductor.common.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import java.util.HashMap; +import java.util.Map; + @ProtoMessage public class Task { - @ProtoEnum - public enum Status { + @ProtoEnum + public enum Status { + + IN_PROGRESS(false, true, true), + CANCELED(true, false, false), + FAILED(true, false, true), + FAILED_WITH_TERMINAL_ERROR(true, false, false), //No Retires even if retries are configured, the task and the related workflow should be terminated + COMPLETED(true, true, true), + COMPLETED_WITH_ERRORS(true, true, true), + SCHEDULED(false, true, true), + TIMED_OUT(true, false, true), + READY_FOR_RERUN(false, true, true), + SKIPPED(true, true, false); - IN_PROGRESS(false, true, true), - CANCELED(true, false, false), - FAILED(true, false, true), - COMPLETED(true, true, true), - COMPLETED_WITH_ERRORS(true, true, true), - SCHEDULED(false, true, true), - TIMED_OUT(true, false, true), - READY_FOR_RERUN(false, true, true), - SKIPPED(true, true, false); + private boolean terminal; - private boolean terminal; + private boolean successful; - private boolean successful; + private boolean retriable; - private boolean retriable; + Status(boolean terminal, boolean successful, boolean retriable) { + this.terminal = terminal; + this.successful = successful; + this.retriable = retriable; + } - Status(boolean terminal, boolean successful, boolean retriable){ - this.terminal = terminal; - this.successful = successful; - this.retriable = retriable; - } + public boolean isTerminal() { + return terminal; + } - public boolean isTerminal(){ - return terminal; - } + public boolean isSuccessful() { + return successful; + } - public boolean isSuccessful(){ - return successful; - } + public boolean isRetriable() { + return retriable; + } + } - public boolean isRetriable(){ - return retriable; - } - }; + @ProtoField(id = 1) + private String taskType; - @ProtoField(id = 1) - private String taskType; + @ProtoField(id = 2) + private Status status; - @ProtoField(id = 2) - private Status status; + @ProtoField(id = 3) + private Map inputData = new HashMap<>(); - @ProtoField(id = 3) - private Map inputData = new HashMap<>(); + @ProtoField(id = 4) + private String referenceTaskName; - @ProtoField(id = 4) - private String referenceTaskName; + @ProtoField(id = 5) + private int retryCount; - @ProtoField(id = 5) - private int retryCount; + @ProtoField(id = 6) + private int seq; - @ProtoField(id = 6) - private int seq; + @ProtoField(id = 7) + private String correlationId; - @ProtoField(id = 7) - private String correlationId; + @ProtoField(id = 8) + private int pollCount; - @ProtoField(id = 8) - private int pollCount; + @ProtoField(id = 9) + private String taskDefName; - @ProtoField(id = 9) - private String taskDefName; + /** + * Time when the task was scheduled + */ + @ProtoField(id = 10) + private long scheduledTime; - /** - * Time when the task was scheduled - */ - @ProtoField(id = 10) - private long scheduledTime; + /** + * Time when the task was first polled + */ + @ProtoField(id = 11) + private long startTime; - /** - * Time when the task was first polled - */ - @ProtoField(id = 11) - private long startTime; + /** + * Time when the task completed executing + */ + @ProtoField(id = 12) + private long endTime; - /** - * Time when the task completed executing - */ - @ProtoField(id = 12) - private long endTime; + /** + * Time when the task was last updated + */ + @ProtoField(id = 13) + private long updateTime; - /** - * Time when the task was last updated - */ - @ProtoField(id = 13) - private long updateTime; + @ProtoField(id = 14) + private int startDelayInSeconds; - @ProtoField(id = 14) - private int startDelayInSeconds; + @ProtoField(id = 15) + private String retriedTaskId; - @ProtoField(id = 15) - private String retriedTaskId; + @ProtoField(id = 16) + private boolean retried; - @ProtoField(id = 16) - private boolean retried; + @ProtoField(id = 17) + private boolean executed; - @ProtoField(id = 17) - private boolean callbackFromWorker = true; + @ProtoField(id = 18) + private boolean callbackFromWorker = true; - @ProtoField(id = 18) - private int responseTimeoutSeconds; - - @ProtoField(id = 19) - private String workflowInstanceId; + @ProtoField(id = 19) + private int responseTimeoutSeconds; - @ProtoField(id = 20) - private String workflowType; + @ProtoField(id = 20) + private String workflowInstanceId; - @ProtoField(id = 21) - private String taskId; + @ProtoField(id = 21) + private String workflowType; - @ProtoField(id = 22) - private String reasonForIncompletion; - - @ProtoField(id = 23) - private long callbackAfterSeconds; - - @ProtoField(id = 24) - private String workerId; - - @ProtoField(id = 25) - private Map outputData = new HashMap<>(); - - @ProtoField(id = 26) - private WorkflowTask workflowTask; - - @ProtoField(id = 27) - private String domain; - - public Task(){ - - } - - /** - * - * @return Type of the task - * @see WorkflowTask.Type - */ - public String getTaskType() { - return taskType; - } - - public void setTaskType(String taskType) { - this.taskType = taskType; - } - - /** - * - * @return Status of the task - */ - public Status getStatus() { - return status; - } - - /** - * - * @param status Status of the task - */ - public void setStatus(Status status) { - this.status = status; - } - - @Deprecated - public Status getTaskStatus() { - return status; - } - - @Deprecated - public void setTaskStatus(Status taskStatus) { - this.status = taskStatus; - } - - public Map getInputData() { - return inputData; - } - - public void setInputData(Map inputData) { - this.inputData = inputData; - } - - - - /** - * @return the referenceTaskName - */ - public String getReferenceTaskName() { - return referenceTaskName; - } - - /** - * @param referenceTaskName the referenceTaskName to set - */ - public void setReferenceTaskName(String referenceTaskName) { - this.referenceTaskName = referenceTaskName; - } - - /** - * @return the correlationId - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * @param correlationId the correlationId to set - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - /** - * @return the retryCount - */ - public int getRetryCount() { - return retryCount; - } - - /** - * @param retryCount the retryCount to set - */ - public void setRetryCount(int retryCount) { - this.retryCount = retryCount; - } - - /** - * @return the scheduledTime - */ - public long getScheduledTime() { - return scheduledTime; - } - - /** - * @param scheduledTime the scheduledTime to set - */ - public void setScheduledTime(long scheduledTime) { - this.scheduledTime = scheduledTime; - } - - /** - * @return the startTime - */ - public long getStartTime() { - return startTime; - } - - /** - * @param startTime the startTime to set - */ - public void setStartTime(long startTime) { - this.startTime = startTime; - } - - /** - * @return the endTime - */ - public long getEndTime() { - return endTime; - } - - /** - * @param endTime the endTime to set - */ - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - - /** - * @return the startDelayInSeconds - */ - public int getStartDelayInSeconds() { - return startDelayInSeconds; - } - - /** - * @param startDelayInSeconds the startDelayInSeconds to set - */ - public void setStartDelayInSeconds(int startDelayInSeconds) { - this.startDelayInSeconds = startDelayInSeconds; - } - - /** - * @return the retriedTaskId - */ - public String getRetriedTaskId() { - return retriedTaskId; - } - - /** - * @param retriedTaskId the retriedTaskId to set - */ - public void setRetriedTaskId(String retriedTaskId) { - this.retriedTaskId = retriedTaskId; - } - - /** - * @return the seq - */ - public int getSeq() { - return seq; - } - - /** - * @param seq the seq to set - */ - public void setSeq(int seq) { - this.seq = seq; - } - - /** - * @return the updateTime - */ - public long getUpdateTime() { - return updateTime; - } - - /** - * @param updateTime the updateTime to set - */ - public void setUpdateTime(long updateTime) { - this.updateTime = updateTime; - } - - - /** - * @return the queueWaitTime - */ - public long getQueueWaitTime() { - if(this.startTime > 0 && this.scheduledTime > 0){ - return this.startTime - scheduledTime - (getCallbackAfterSeconds()*1000); - } - return 0L; - } - - public void setQueueWaitTime(long t) { - - } - - - /** - * - * @return True if the task has been retried after failure - */ - public boolean isRetried() { - return retried; - } - - /** - * @param retried the retried to set - */ - public void setRetried(boolean retried) { - this.retried = retried; - } - - /** - * - * @return No. of times task has been polled - */ - public int getPollCount() { - return pollCount; - } - - public void setPollCount(int pollCount) { - this.pollCount = pollCount; - } - - - public boolean isCallbackFromWorker() { - return callbackFromWorker; - } - - public void setCallbackFromWorker(boolean callbackFromWorker) { - this.callbackFromWorker = callbackFromWorker; - } - - /** - * - * @return Name of the task definition - */ - public String getTaskDefName() { - if(taskDefName == null || "".equals(taskDefName)){ - taskDefName = taskType; - } - return taskDefName; - } - - /** - * - * @param taskDefName Name of the task definition - */ - public void setTaskDefName(String taskDefName) { - this.taskDefName = taskDefName; - } - - - /** - * - * @return the timeout for task to send response. After this timeout, the task will be re-queued - */ - public int getResponseTimeoutSeconds() { - return responseTimeoutSeconds; - } - - /** - * - * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the task will be re-queued - */ - public void setResponseTimeoutSeconds(int responseTimeoutSeconds) { - this.responseTimeoutSeconds = responseTimeoutSeconds; - } - - - /** - * @return the workflowInstanceId - */ - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - /** - * @param workflowInstanceId the workflowInstanceId to set - * - */ - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getWorkflowType() { - return workflowType; - } - - - /** - * @param workflowType workflow type - */ - public Task setWorkflowType(String workflowType) { - this.workflowType = workflowType; - return this; - } - - /** - * @return the taskId - */ - public String getTaskId() { - return taskId; - } - - /** - * @param taskId the taskId to set - * - */ - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - /** - * @return the reasonForIncompletion - */ - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - /** - * @param reasonForIncompletion the reasonForIncompletion to set - * - */ - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - /** - * @return the callbackAfterSeconds - */ - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - /** - * @param callbackAfterSeconds the callbackAfterSeconds to set - * - */ - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - /** - * @return the workerId - */ - public String getWorkerId() { - return workerId; - } - - /** - * @param workerId the workerId to set - * - */ - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - /** - * @return the outputData - */ - public Map getOutputData() { - return outputData; - } - - /** - * @param outputData the outputData to set - * - */ - public void setOutputData(Map outputData) { - this.outputData = outputData; - } - - /** - * - * @return Workflow Task definition - */ - public WorkflowTask getWorkflowTask() { - return workflowTask; - } - - /** - * - * @param workflowTask Task definition - */ - public void setWorkflowTask(WorkflowTask workflowTask) { - this.workflowTask = workflowTask; - } - - /** - * @return the domain - */ - public String getDomain() { - return domain; - } - - /** - * @param domain the Domain - * - */ - public void setDomain(String domain) { - this.domain = domain; - } - - public Task copy() { - - Task copy = new Task(); - copy.setCallbackAfterSeconds(callbackAfterSeconds); - copy.setCallbackFromWorker(callbackFromWorker); - copy.setCorrelationId(correlationId); - copy.setInputData(inputData); - copy.setOutputData(outputData); - copy.setReferenceTaskName(referenceTaskName); - copy.setStartDelayInSeconds(startDelayInSeconds); - copy.setTaskDefName(taskDefName); - copy.setTaskType(taskType); - copy.setWorkflowInstanceId(workflowInstanceId); - copy.setResponseTimeoutSeconds(responseTimeoutSeconds); - copy.setStatus(status); - copy.setRetryCount(retryCount); - copy.setPollCount(pollCount); - copy.setTaskId(taskId); - copy.setReasonForIncompletion(reasonForIncompletion); - copy.setWorkerId(workerId); - copy.setWorkflowTask(workflowTask); - copy.setDomain(domain); - return copy; - } - - - @Override - public String toString() { - return "Task{" + - "taskType='" + taskType + '\'' + - ", status=" + status + - ", inputData=" + inputData + - ", referenceTaskName='" + referenceTaskName + '\'' + - ", retryCount=" + retryCount + - ", seq=" + seq + - ", correlationId='" + correlationId + '\'' + - ", pollCount=" + pollCount + - ", taskDefName='" + taskDefName + '\'' + - ", scheduledTime=" + scheduledTime + - ", startTime=" + startTime + - ", endTime=" + endTime + - ", updateTime=" + updateTime + - ", startDelayInSeconds=" + startDelayInSeconds + - ", retriedTaskId='" + retriedTaskId + '\'' + - ", retried=" + retried + - ", callbackFromWorker=" + callbackFromWorker + - ", responseTimeoutSeconds=" + responseTimeoutSeconds + - ", workflowInstanceId='" + workflowInstanceId + '\'' + - ", taskId='" + taskId + '\'' + - ", reasonForIncompletion='" + reasonForIncompletion + '\'' + - ", callbackAfterSeconds=" + callbackAfterSeconds + - ", workerId='" + workerId + '\'' + - ", outputData=" + outputData + - ", workflowTask=" + workflowTask + - ", domain='" + domain + '\'' + - '}'; - } + @ProtoField(id = 22) + private String taskId; + + @ProtoField(id = 23) + private String reasonForIncompletion; + + @ProtoField(id = 24) + private long callbackAfterSeconds; + + @ProtoField(id = 25) + private String workerId; + + @ProtoField(id = 26) + private Map outputData = new HashMap<>(); + + @ProtoField(id = 27) + private WorkflowTask workflowTask; + + @ProtoField(id = 28) + private String domain; + + public Task() { + + } + + /** + * @return Type of the task + * @see WorkflowTask.Type + */ + public String getTaskType() { + return taskType; + } + + public void setTaskType(String taskType) { + this.taskType = taskType; + } + + /** + * @return Status of the task + */ + public Status getStatus() { + return status; + } + + /** + * @param status Status of the task + */ + public void setStatus(Status status) { + this.status = status; + } + + @Deprecated + public Status getTaskStatus() { + return status; + } + + @Deprecated + public void setTaskStatus(Status taskStatus) { + this.status = taskStatus; + } + + public Map getInputData() { + return inputData; + } + + public void setInputData(Map inputData) { + this.inputData = inputData; + } + + + /** + * @return the referenceTaskName + */ + public String getReferenceTaskName() { + return referenceTaskName; + } + + /** + * @param referenceTaskName the referenceTaskName to set + */ + public void setReferenceTaskName(String referenceTaskName) { + this.referenceTaskName = referenceTaskName; + } + + /** + * @return the correlationId + */ + public String getCorrelationId() { + return correlationId; + } + + /** + * @param correlationId the correlationId to set + */ + public void setCorrelationId(String correlationId) { + this.correlationId = correlationId; + } + + /** + * @return the retryCount + */ + public int getRetryCount() { + return retryCount; + } + + /** + * @param retryCount the retryCount to set + */ + public void setRetryCount(int retryCount) { + this.retryCount = retryCount; + } + + /** + * @return the scheduledTime + */ + public long getScheduledTime() { + return scheduledTime; + } + + /** + * @param scheduledTime the scheduledTime to set + */ + public void setScheduledTime(long scheduledTime) { + this.scheduledTime = scheduledTime; + } + + /** + * @return the startTime + */ + public long getStartTime() { + return startTime; + } + + /** + * @param startTime the startTime to set + */ + public void setStartTime(long startTime) { + this.startTime = startTime; + } + + /** + * @return the endTime + */ + public long getEndTime() { + return endTime; + } + + /** + * @param endTime the endTime to set + */ + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + + /** + * @return the startDelayInSeconds + */ + public int getStartDelayInSeconds() { + return startDelayInSeconds; + } + + /** + * @param startDelayInSeconds the startDelayInSeconds to set + */ + public void setStartDelayInSeconds(int startDelayInSeconds) { + this.startDelayInSeconds = startDelayInSeconds; + } + + /** + * @return the retriedTaskId + */ + public String getRetriedTaskId() { + return retriedTaskId; + } + + /** + * @param retriedTaskId the retriedTaskId to set + */ + public void setRetriedTaskId(String retriedTaskId) { + this.retriedTaskId = retriedTaskId; + } + + /** + * @return the seq + */ + public int getSeq() { + return seq; + } + + /** + * @param seq the seq to set + */ + public void setSeq(int seq) { + this.seq = seq; + } + + /** + * @return the updateTime + */ + public long getUpdateTime() { + return updateTime; + } + + /** + * @param updateTime the updateTime to set + */ + public void setUpdateTime(long updateTime) { + this.updateTime = updateTime; + } + + + /** + * @return the queueWaitTime + */ + public long getQueueWaitTime() { + if (this.startTime > 0 && this.scheduledTime > 0) { + return this.startTime - scheduledTime - (getCallbackAfterSeconds() * 1000); + } + return 0L; + } + + public void setQueueWaitTime(long t) { + + } + + /** + * @return True if the task has been retried after failure + */ + public boolean isRetried() { + return retried; + } + + /** + * @param retried the retried to set + */ + public void setRetried(boolean retried) { + this.retried = retried; + } + + /** + * @return True if the task has completed its lifecycle within conductor (from start to completion to being updated in the datastore) + */ + public boolean isExecuted() { + return executed; + } + + /** + * @param executed the executed value to set + */ + public void setExecuted(boolean executed) { + this.executed = executed; + } + + /** + * @return No. of times task has been polled + */ + public int getPollCount() { + return pollCount; + } + + public void setPollCount(int pollCount) { + this.pollCount = pollCount; + } + + + public boolean isCallbackFromWorker() { + return callbackFromWorker; + } + + public void setCallbackFromWorker(boolean callbackFromWorker) { + this.callbackFromWorker = callbackFromWorker; + } + + /** + * @return Name of the task definition + */ + public String getTaskDefName() { + if (taskDefName == null || "".equals(taskDefName)) { + taskDefName = taskType; + } + return taskDefName; + } + + /** + * @param taskDefName Name of the task definition + */ + public void setTaskDefName(String taskDefName) { + this.taskDefName = taskDefName; + } + + + /** + * @return the timeout for task to send response. After this timeout, the task will be re-queued + */ + public int getResponseTimeoutSeconds() { + return responseTimeoutSeconds; + } + + /** + * @param responseTimeoutSeconds - timeout for task to send response. After this timeout, the task will be re-queued + */ + public void setResponseTimeoutSeconds(int responseTimeoutSeconds) { + this.responseTimeoutSeconds = responseTimeoutSeconds; + } + + + /** + * @return the workflowInstanceId + */ + public String getWorkflowInstanceId() { + return workflowInstanceId; + } + + /** + * @param workflowInstanceId the workflowInstanceId to set + */ + public void setWorkflowInstanceId(String workflowInstanceId) { + this.workflowInstanceId = workflowInstanceId; + } + + public String getWorkflowType() { + return workflowType; + } + + + /** + * @param workflowType workflow type + */ + public Task setWorkflowType(String workflowType) { + this.workflowType = workflowType; + return this; + } + + /** + * @return the taskId + */ + public String getTaskId() { + return taskId; + } + + /** + * @param taskId the taskId to set + */ + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + /** + * @return the reasonForIncompletion + */ + public String getReasonForIncompletion() { + return reasonForIncompletion; + } + + /** + * @param reasonForIncompletion the reasonForIncompletion to set + */ + public void setReasonForIncompletion(String reasonForIncompletion) { + this.reasonForIncompletion = reasonForIncompletion; + } + + /** + * @return the callbackAfterSeconds + */ + public long getCallbackAfterSeconds() { + return callbackAfterSeconds; + } + + /** + * @param callbackAfterSeconds the callbackAfterSeconds to set + */ + public void setCallbackAfterSeconds(long callbackAfterSeconds) { + this.callbackAfterSeconds = callbackAfterSeconds; + } + + /** + * @return the workerId + */ + public String getWorkerId() { + return workerId; + } + + /** + * @param workerId the workerId to set + */ + public void setWorkerId(String workerId) { + this.workerId = workerId; + } + + /** + * @return the outputData + */ + public Map getOutputData() { + return outputData; + } + + /** + * @param outputData the outputData to set + */ + public void setOutputData(Map outputData) { + this.outputData = outputData; + } + + /** + * @return Workflow Task definition + */ + public WorkflowTask getWorkflowTask() { + return workflowTask; + } + + /** + * @param workflowTask Task definition + */ + public void setWorkflowTask(WorkflowTask workflowTask) { + this.workflowTask = workflowTask; + } + + /** + * @return the domain + */ + public String getDomain() { + return domain; + } + + /** + * @param domain the Domain + */ + public void setDomain(String domain) { + this.domain = domain; + } + + public Task copy() { + + Task copy = new Task(); + copy.setCallbackAfterSeconds(callbackAfterSeconds); + copy.setCallbackFromWorker(callbackFromWorker); + copy.setCorrelationId(correlationId); + copy.setInputData(inputData); + copy.setOutputData(outputData); + copy.setReferenceTaskName(referenceTaskName); + copy.setStartDelayInSeconds(startDelayInSeconds); + copy.setTaskDefName(taskDefName); + copy.setTaskType(taskType); + copy.setWorkflowInstanceId(workflowInstanceId); + copy.setResponseTimeoutSeconds(responseTimeoutSeconds); + copy.setStatus(status); + copy.setRetryCount(retryCount); + copy.setPollCount(pollCount); + copy.setTaskId(taskId); + copy.setReasonForIncompletion(reasonForIncompletion); + copy.setWorkerId(workerId); + copy.setWorkflowTask(workflowTask); + copy.setDomain(domain); + return copy; + } + + + @Override + public String toString() { + return "Task{" + + "taskType='" + taskType + '\'' + + ", status=" + status + + ", inputData=" + inputData + + ", referenceTaskName='" + referenceTaskName + '\'' + + ", retryCount=" + retryCount + + ", seq=" + seq + + ", correlationId='" + correlationId + '\'' + + ", pollCount=" + pollCount + + ", taskDefName='" + taskDefName + '\'' + + ", scheduledTime=" + scheduledTime + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", updateTime=" + updateTime + + ", startDelayInSeconds=" + startDelayInSeconds + + ", retriedTaskId='" + retriedTaskId + '\'' + + ", retried=" + retried + + ", callbackFromWorker=" + callbackFromWorker + + ", responseTimeoutSeconds=" + responseTimeoutSeconds + + ", workflowInstanceId='" + workflowInstanceId + '\'' + + ", taskId='" + taskId + '\'' + + ", reasonForIncompletion='" + reasonForIncompletion + '\'' + + ", callbackAfterSeconds=" + callbackAfterSeconds + + ", workerId='" + workerId + '\'' + + ", outputData=" + outputData + + ", workflowTask=" + workflowTask + + ", domain='" + domain + '\'' + + '}'; + } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java index 28d38379b1..5317775cef 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java @@ -255,6 +255,7 @@ public TaskPb.Task toProto(Task from) { to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); to.setRetriedTaskId( from.getRetriedTaskId() ); to.setRetried( from.isRetried() ); + to.setExecuted( from.isExecuted() ); to.setCallbackFromWorker( from.isCallbackFromWorker() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); @@ -293,6 +294,7 @@ public Task fromProto(TaskPb.Task from) { to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); to.setRetriedTaskId( from.getRetriedTaskId() ); to.setRetried( from.getRetried() ); + to.setExecuted( from.getExecuted() ); to.setCallbackFromWorker( from.getCallbackFromWorker() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); @@ -317,6 +319,7 @@ public TaskPb.Task.Status toProto(Task.Status from) { case IN_PROGRESS: to = TaskPb.Task.Status.IN_PROGRESS; break; case CANCELED: to = TaskPb.Task.Status.CANCELED; break; case FAILED: to = TaskPb.Task.Status.FAILED; break; + case FAILED_WITH_TERMINAL_ERROR: to = TaskPb.Task.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = TaskPb.Task.Status.COMPLETED; break; case COMPLETED_WITH_ERRORS: to = TaskPb.Task.Status.COMPLETED_WITH_ERRORS; break; case SCHEDULED: to = TaskPb.Task.Status.SCHEDULED; break; @@ -334,6 +337,7 @@ public Task.Status fromProto(TaskPb.Task.Status from) { case IN_PROGRESS: to = Task.Status.IN_PROGRESS; break; case CANCELED: to = Task.Status.CANCELED; break; case FAILED: to = Task.Status.FAILED; break; + case FAILED_WITH_TERMINAL_ERROR: to = Task.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = Task.Status.COMPLETED; break; case COMPLETED_WITH_ERRORS: to = Task.Status.COMPLETED_WITH_ERRORS; break; case SCHEDULED: to = Task.Status.SCHEDULED; break; diff --git a/grpc/src/main/proto/model/task.proto b/grpc/src/main/proto/model/task.proto index 1bc83d5230..82ecf136af 100644 --- a/grpc/src/main/proto/model/task.proto +++ b/grpc/src/main/proto/model/task.proto @@ -13,12 +13,13 @@ message Task { IN_PROGRESS = 0; CANCELED = 1; FAILED = 2; - COMPLETED = 3; - COMPLETED_WITH_ERRORS = 4; - SCHEDULED = 5; - TIMED_OUT = 6; - READY_FOR_RERUN = 7; - SKIPPED = 8; + FAILED_WITH_TERMINAL_ERROR = 3; + COMPLETED = 4; + COMPLETED_WITH_ERRORS = 5; + SCHEDULED = 6; + TIMED_OUT = 7; + READY_FOR_RERUN = 8; + SKIPPED = 9; } string task_type = 1; Task.Status status = 2; @@ -36,15 +37,16 @@ message Task { int32 start_delay_in_seconds = 14; string retried_task_id = 15; bool retried = 16; - bool callback_from_worker = 17; - int32 response_timeout_seconds = 18; - string workflow_instance_id = 19; - string workflow_type = 20; - string task_id = 21; - string reason_for_incompletion = 22; - int64 callback_after_seconds = 23; - string worker_id = 24; - map output_data = 25; - WorkflowTask workflow_task = 26; - string domain = 27; + bool executed = 17; + bool callback_from_worker = 18; + int32 response_timeout_seconds = 19; + string workflow_instance_id = 20; + string workflow_type = 21; + string task_id = 22; + string reason_for_incompletion = 23; + int64 callback_after_seconds = 24; + string worker_id = 25; + map output_data = 26; + WorkflowTask workflow_task = 27; + string domain = 28; } diff --git a/settings.gradle b/settings.gradle index c73f6287f9..c0b9c4a1a6 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,6 +1,6 @@ rootProject.name='conductor' -include 'client','common','contribs','core','es5-persistence','jersey','mysql-persistence' +include 'client','common','contribs','core','es2-persistence','es5-persistence','jersey','mysql-persistence' include 'redis-persistence','server','test-harness','ui' include 'protogen' include 'grpc', 'grpc-server' From cdcd30b2690f50015c5d277ce52daf804e19fcba Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 11 Jun 2018 10:40:18 +0200 Subject: [PATCH 033/163] grpc: Fix missing field in TaskResult.java --- .../common/metadata/tasks/TaskResult.java | 407 +++++++++--------- .../grpc/server/AbstractProtoMapper.java | 2 + grpc/src/main/proto/model/taskresult.proto | 5 +- 3 files changed, 215 insertions(+), 199 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java index 56a95d8585..65060de8a4 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java @@ -18,7 +18,9 @@ */ package com.netflix.conductor.common.metadata.tasks; -import com.netflix.conductor.common.annotations.*; +import com.netflix.conductor.common.annotations.ProtoEnum; +import com.netflix.conductor.common.annotations.ProtoField; +import com.netflix.conductor.common.annotations.ProtoMessage; import java.util.HashMap; import java.util.List; @@ -32,200 +34,211 @@ */ @ProtoMessage public class TaskResult { - @ProtoEnum - public enum Status { - IN_PROGRESS, FAILED, COMPLETED, SCHEDULED; //SCHEDULED is added for the backward compatibility and should NOT be used when updating the task result - } - - @ProtoField(id = 1) - private String workflowInstanceId; - - @ProtoField(id = 2) - private String taskId; - - @ProtoField(id = 3) - private String reasonForIncompletion; - - @ProtoField(id = 4) - private long callbackAfterSeconds; - - @ProtoField(id = 5) - private String workerId; - - @ProtoField(id = 6) - private Status status; - - @ProtoField(id = 7) - private Map outputData = new HashMap<>(); - - private List logs = new CopyOnWriteArrayList<>(); - - public TaskResult(Task task) { - this.workflowInstanceId = task.getWorkflowInstanceId(); - this.taskId = task.getTaskId(); - this.reasonForIncompletion = task.getReasonForIncompletion(); - this.callbackAfterSeconds = task.getCallbackAfterSeconds(); - this.status = Status.valueOf(task.getStatus().name()); - this.workerId = task.getWorkerId(); - this.outputData = task.getOutputData(); - } - - public TaskResult() { - - } - - /** - * - * @return Workflow instance id for which the task result is produced - */ - public String getWorkflowInstanceId() { - return workflowInstanceId; - } - - public void setWorkflowInstanceId(String workflowInstanceId) { - this.workflowInstanceId = workflowInstanceId; - } - - public String getTaskId() { - return taskId; - } - - public void setTaskId(String taskId) { - this.taskId = taskId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - public long getCallbackAfterSeconds() { - return callbackAfterSeconds; - } - - /** - * When set to non-zero values, the task remains in the queue for the specified seconds before sent back to the worker when polled. - * Useful for the long running task, where the task is updated as IN_PROGRESS and should not be polled out of the queue for a specified amount of time. (delayed queue implementation) - * @param callbackAfterSeconds Amount of time in seconds the task should be held in the queue before giving it to a polling worker. - */ - public void setCallbackAfterSeconds(long callbackAfterSeconds) { - this.callbackAfterSeconds = callbackAfterSeconds; - } - - public String getWorkerId() { - return workerId; - } - - /** - * - * @param workerId a free form string identifying the worker host. - * Could be hostname, IP Address or any other meaningful identifier that can help identify the host/process which executed the task, in case of troubleshooting. - */ - public void setWorkerId(String workerId) { - this.workerId = workerId; - } - - /** - * @return the status - */ - public Status getStatus() { - return status; - } - - /** - * - * @param status Status of the task - *

- * IN_PROGRESS: Use this for long running tasks, indicating the task is still in progress and should be checked again at a later time. e.g. the worker checks the status of the job in the DB, while the job is being executed by another process. - *

- * FAILED, COMPLETED: Terminal statuses for the task. - *

- * - * @see #setCallbackAfterSeconds(long) - */ - public void setStatus(Status status) { - this.status = status; - } - - public Map getOutputData() { - return outputData; - } - - /** - * - * @param outputData output data to be set for the task execution result - */ - public void setOutputData(Map outputData) { - this.outputData = outputData; - } - - /** - * Adds output - * @param key output field - * @param value value - * @return current instance - */ - public TaskResult addOutputData(String key, Object value) { - this.outputData.put(key, value); - return this; - } - - /** - * - * @return Task execution logs - */ - public List getLogs() { - return logs; - } - - /** - * - * @param logs Task execution logs - */ - public void setLogs(List logs) { - this.logs = logs; - } - - - /** - * - * @param log Log line to be added - * @return Instance of TaskResult - */ - public TaskResult log(String log) { - this.logs.add(new TaskExecLog(log)); - return this; - } - - @Override - public String toString() { - return "TaskResult [workflowInstanceId=" + workflowInstanceId + ", taskId=" + taskId + ", status=" + status + "]"; - } - - public static TaskResult complete() { - return newTaskResult(Status.COMPLETED); - } - - public static TaskResult failed() { - return newTaskResult(Status.FAILED); - } - - public static TaskResult failed(String failureReason) { - TaskResult result = newTaskResult(Status.FAILED); - result.setReasonForIncompletion(failureReason); - return result; - } - - public static TaskResult inProgress() { - return newTaskResult(Status.IN_PROGRESS); - } - - public static TaskResult newTaskResult(Status status) { - TaskResult result = new TaskResult(); - result.setStatus(status); - return result; - } + + @ProtoEnum + public enum Status { + IN_PROGRESS, FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED, SCHEDULED; //SCHEDULED is added for the backward compatibility and should NOT be used when updating the task result + } + + @ProtoField(id = 1) + private String workflowInstanceId; + + @ProtoField(id = 2) + private String taskId; + + @ProtoField(id = 3) + private String reasonForIncompletion; + + @ProtoField(id = 4) + private long callbackAfterSeconds; + + @ProtoField(id = 5) + private String workerId; + + @ProtoField(id = 6) + private Status status; + + @ProtoField(id = 7) + private Map outputData = new HashMap<>(); + + private List logs = new CopyOnWriteArrayList<>(); + + public TaskResult(Task task) { + this.workflowInstanceId = task.getWorkflowInstanceId(); + this.taskId = task.getTaskId(); + this.reasonForIncompletion = task.getReasonForIncompletion(); + this.callbackAfterSeconds = task.getCallbackAfterSeconds(); + this.status = Status.valueOf(task.getStatus().name()); + this.workerId = task.getWorkerId(); + this.outputData = task.getOutputData(); + } + + public TaskResult() { + + } + + /** + * + * @return Workflow instance id for which the task result is produced + */ + public String getWorkflowInstanceId() { + return workflowInstanceId; + } + + public void setWorkflowInstanceId(String workflowInstanceId) { + this.workflowInstanceId = workflowInstanceId; + } + + public String getTaskId() { + return taskId; + } + + public void setTaskId(String taskId) { + this.taskId = taskId; + } + + public String getReasonForIncompletion() { + return reasonForIncompletion; + } + + public void setReasonForIncompletion(String reasonForIncompletion) { + this.reasonForIncompletion = reasonForIncompletion; + } + + public long getCallbackAfterSeconds() { + return callbackAfterSeconds; + } + + /** + * When set to non-zero values, the task remains in the queue for the specified seconds before sent back to the worker when polled. + * Useful for the long running task, where the task is updated as IN_PROGRESS and should not be polled out of the queue for a specified amount of time. (delayed queue implementation) + * @param callbackAfterSeconds Amount of time in seconds the task should be held in the queue before giving it to a polling worker. + */ + public void setCallbackAfterSeconds(long callbackAfterSeconds) { + this.callbackAfterSeconds = callbackAfterSeconds; + } + + public String getWorkerId() { + return workerId; + } + + /** + * + * @param workerId a free form string identifying the worker host. + * Could be hostname, IP Address or any other meaningful identifier that can help identify the host/process which executed the task, in case of troubleshooting. + */ + public void setWorkerId(String workerId) { + this.workerId = workerId; + } + + /** + * @return the status + */ + public Status getStatus() { + return status; + } + + /** + * + * @param status Status of the task + *

+ * IN_PROGRESS: Use this for long running tasks, indicating the task is still in progress and should be checked again at a later time. + * e.g. the worker checks the status of the job in the DB, while the job is being executed by another process. + *

+ * FAILED, FAILED_WITH_TERMINAL_ERROR, COMPLETED: Terminal statuses for the task. + *

+ * + * @see #setCallbackAfterSeconds(long) + */ + public void setStatus(Status status) { + this.status = status; + } + + public Map getOutputData() { + return outputData; + } + + /** + * + * @param outputData output data to be set for the task execution result + */ + public void setOutputData(Map outputData) { + this.outputData = outputData; + } + + /** + * Adds output + * @param key output field + * @param value value + * @return current instance + */ + public TaskResult addOutputData(String key, Object value) { + this.outputData.put(key, value); + return this; + } + + /** + * + * @return Task execution logs + */ + public List getLogs() { + return logs; + } + + /** + * + * @param logs Task execution logs + */ + public void setLogs(List logs) { + this.logs = logs; + } + + + /** + * + * @param log Log line to be added + * @return Instance of TaskResult + */ + public TaskResult log(String log) { + this.logs.add(new TaskExecLog(log)); + return this; + } + + @Override + public String toString() { + return "TaskResult{" + + "workflowInstanceId='" + workflowInstanceId + '\'' + + ", taskId='" + taskId + '\'' + + ", reasonForIncompletion='" + reasonForIncompletion + '\'' + + ", callbackAfterSeconds=" + callbackAfterSeconds + + ", workerId='" + workerId + '\'' + + ", status=" + status + + ", outputData=" + outputData + + ", logs=" + logs + + '}'; + } + + public static TaskResult complete() { + return newTaskResult(Status.COMPLETED); + } + + public static TaskResult failed() { + return newTaskResult(Status.FAILED); + } + + public static TaskResult failed(String failureReason) { + TaskResult result = newTaskResult(Status.FAILED); + result.setReasonForIncompletion(failureReason); + return result; + } + + public static TaskResult inProgress() { + return newTaskResult(Status.IN_PROGRESS); + } + + public static TaskResult newTaskResult(Status status) { + TaskResult result = new TaskResult(); + result.setStatus(status); + return result; + } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java index 5317775cef..db42569de8 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java @@ -482,6 +482,7 @@ public TaskResultPb.TaskResult.Status toProto(TaskResult.Status from) { switch (from) { case IN_PROGRESS: to = TaskResultPb.TaskResult.Status.IN_PROGRESS; break; case FAILED: to = TaskResultPb.TaskResult.Status.FAILED; break; + case FAILED_WITH_TERMINAL_ERROR: to = TaskResultPb.TaskResult.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = TaskResultPb.TaskResult.Status.COMPLETED; break; case SCHEDULED: to = TaskResultPb.TaskResult.Status.SCHEDULED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); @@ -494,6 +495,7 @@ public TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { switch (from) { case IN_PROGRESS: to = TaskResult.Status.IN_PROGRESS; break; case FAILED: to = TaskResult.Status.FAILED; break; + case FAILED_WITH_TERMINAL_ERROR: to = TaskResult.Status.FAILED_WITH_TERMINAL_ERROR; break; case COMPLETED: to = TaskResult.Status.COMPLETED; break; case SCHEDULED: to = TaskResult.Status.SCHEDULED; break; default: throw new IllegalArgumentException("Unexpected enum constant: " + from); diff --git a/grpc/src/main/proto/model/taskresult.proto b/grpc/src/main/proto/model/taskresult.proto index abece11c27..ddd1933cac 100644 --- a/grpc/src/main/proto/model/taskresult.proto +++ b/grpc/src/main/proto/model/taskresult.proto @@ -11,8 +11,9 @@ message TaskResult { enum Status { IN_PROGRESS = 0; FAILED = 1; - COMPLETED = 2; - SCHEDULED = 3; + FAILED_WITH_TERMINAL_ERROR = 2; + COMPLETED = 3; + SCHEDULED = 4; } string workflow_instance_id = 1; string task_id = 2; From a71e26c22b03d3296277225ec44ff0c7bf6ed751 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 11 Jun 2018 11:02:39 +0200 Subject: [PATCH 034/163] Cherry pick bootstrapping merge. --- docker/grpc/docker-compose.yaml | 16 +++++++++++++ grpc-server/build.gradle | 1 + .../conductor/grpc/server/GRPCModule.java | 19 +++++++++------ .../grpc/server/GRPCServerProvider.java | 24 ++----------------- .../netflix/conductor/grpc/server/Main.java | 14 ++++++++--- 5 files changed, 42 insertions(+), 32 deletions(-) create mode 100644 docker/grpc/docker-compose.yaml diff --git a/docker/grpc/docker-compose.yaml b/docker/grpc/docker-compose.yaml new file mode 100644 index 0000000000..6b37444a3a --- /dev/null +++ b/docker/grpc/docker-compose.yaml @@ -0,0 +1,16 @@ +version: '3.4' + +services: + + mysql: + image: mysql:5.6 + restart: always + environment: + MYSQL_ROOT_PASSWORD: password + ports: + - 3306:3306 + + elasticsearch: + image: elasticsearch:2.4 + ports: + - 9200:9200 diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index faee1a45fc..a5cec8cdf2 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -5,6 +5,7 @@ dependencies { //FIXME Right now this brings a lot of stuff along for the ride. :-( compile project(':conductor-server') + compile project(':conductor-es5-persistence') compile "io.grpc:grpc-netty:${revGrpc}" compile "log4j:log4j:1.2.17" diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java index 3bf1f34a14..710d38353d 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java @@ -2,7 +2,12 @@ import com.google.inject.AbstractModule; import com.google.inject.Provides; + import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.core.config.CoreModule; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; +import com.netflix.conductor.dao.es5.index.ElasticSearchModuleV5; +import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; import com.netflix.conductor.grpc.EventServiceGrpc; import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.TaskServiceGrpc; @@ -11,28 +16,28 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; +import javax.inject.Singleton; + public class GRPCModule extends AbstractModule { // FIXME Eventually this should be shared with the Jersey code and provided by the server module. private final int maxThreads = 50; - private final Configuration configuration; private ExecutorService es; - public GRPCModule(Configuration configuration){ - this.configuration = configuration; - } @Override protected void configure() { configureExecutorService(); - bind(Configuration.class).toInstance(configuration); + install(new CoreModule()); + install(new ElasticSearchModuleV5()); + install(new MySQLWorkflowModule()); + bind(Configuration.class).to(SystemPropertiesConfiguration.class).in(Singleton.class); bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); bind(MetadataServiceGrpc.MetadataServiceImplBase.class).to(MetadataServiceImpl.class); bind(WorkflowServiceGrpc.WorkflowServiceImplBase.class).to(WorkflowServiceImpl.class); + bind(GRPCServer.class).toProvider(GRPCServerProvider.class).asEagerSingleton(); bind(EventServiceGrpc.EventServiceImplBase.class).to(EventServiceImpl.class); - - bind(GRPCServer.class).to(GRPCServer.class); } @Provides diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java index 4d5373b1ac..6ec307204d 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java @@ -1,40 +1,20 @@ package com.netflix.conductor.grpc.server; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.grpc.EventServiceGrpc; -import com.netflix.conductor.grpc.MetadataServiceGrpc; -import com.netflix.conductor.grpc.TaskServiceGrpc; -import com.netflix.conductor.grpc.WorkflowServiceGrpc; import javax.inject.Inject; import javax.inject.Provider; public class GRPCServerProvider implements Provider { - private final TaskServiceGrpc.TaskServiceImplBase taskServiceImplBase; - private final WorkflowServiceGrpc.WorkflowServiceImplBase workflowServiceImplBase; - private final MetadataServiceGrpc.MetadataServiceImplBase metadataServiceImplBase; - private final EventServiceGrpc.EventServiceImplBase eventServiceImplBase; private final Configuration configuration; @Inject - public GRPCServerProvider(TaskServiceGrpc.TaskServiceImplBase taskImpl, - WorkflowServiceGrpc.WorkflowServiceImplBase workflowImpl, - MetadataServiceGrpc.MetadataServiceImplBase metaImpl, - EventServiceGrpc.EventServiceImplBase eventImpl, - Configuration conf) { - this.taskServiceImplBase = taskImpl; - this.workflowServiceImplBase = workflowImpl; - this.metadataServiceImplBase = metaImpl; - this.eventServiceImplBase = eventImpl; + public GRPCServerProvider(Configuration conf) { this.configuration = conf; } @Override public GRPCServer get() { - return new GRPCServer(configuration, - taskServiceImplBase, - workflowServiceImplBase, - metadataServiceImplBase, - eventServiceImplBase); + return new GRPCServer(configuration); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java index 46bcd3472f..bfc1bcb74f 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java @@ -3,8 +3,6 @@ import com.google.inject.Guice; import com.google.inject.Injector; -import com.netflix.conductor.server.ConductorConfig; - import org.apache.log4j.PropertyConfigurator; import java.io.File; @@ -24,7 +22,7 @@ public static void main(String args[]) throws Exception { PropertyConfigurator.configure(new FileInputStream(new File(args[1]))); } - Injector injector = Guice.createInjector(new GRPCModule(new ConductorConfig())); + Injector injector = Guice.createInjector(new GRPCModule()); GRPCServer server = injector.getInstance(GRPCServer.class); Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @@ -33,6 +31,16 @@ public void run() { server.stop(); } })); + + System.out.println("\n\n\n"); + System.out.println(" _ _ "); + System.out.println(" ___ ___ _ __ __| |_ _ ___| |_ ___ _ __ "); + System.out.println(" / __/ _ \\| '_ \\ / _` | | | |/ __| __/ _ \\| '__|"); + System.out.println("| (_| (_) | | | | (_| | |_| | (__| || (_) | | "); + System.out.println(" \\___\\___/|_| |_|\\__,_|\\__,_|\\___|\\__\\___/|_| "); + System.out.println("\n\n\n"); + + server.start(); } private static void loadConfigFile(String propertyFile) throws IOException { From dc7f7b2d0d76beb63a5357a0c1a0f3951f5fc635 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 24 May 2018 11:17:13 +0200 Subject: [PATCH 035/163] Persist the DB data between runs, but also provide a way to clean it up. --- docker/grpc/Makefile | 3 +++ docker/grpc/docker-compose.yaml | 12 +++++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 docker/grpc/Makefile diff --git a/docker/grpc/Makefile b/docker/grpc/Makefile new file mode 100644 index 0000000000..c0e87d21f8 --- /dev/null +++ b/docker/grpc/Makefile @@ -0,0 +1,3 @@ + +clean-db: + docker volume rm grpc_conductor_mysql diff --git a/docker/grpc/docker-compose.yaml b/docker/grpc/docker-compose.yaml index 6b37444a3a..46e932d534 100644 --- a/docker/grpc/docker-compose.yaml +++ b/docker/grpc/docker-compose.yaml @@ -6,7 +6,14 @@ services: image: mysql:5.6 restart: always environment: - MYSQL_ROOT_PASSWORD: password + MYSQL_ROOT_PASSWORD: 12345 + MYSQL_DATABASE: conductor + MYSQL_USER: conductor + MYSQL_PASSWORD: password + volumes: + - type: volume + source: conductor_mysql + target: /var/lib/mysql ports: - 3306:3306 @@ -14,3 +21,6 @@ services: image: elasticsearch:2.4 ports: - 9200:9200 + +volumes: + conductor_mysql: From 7444cc076022c9dbce2ed0167d211f859f9a642e Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 24 May 2018 11:54:02 +0200 Subject: [PATCH 036/163] Cherry pick bootstrapping merge. --- grpc-server/build.gradle | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index a5cec8cdf2..08f8cdcbca 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -1,3 +1,8 @@ +plugins { + // FIXME This is temporary until the server module refactoring is completed. + id 'com.github.johnrengelman.shadow' version '1.2.3' +} + dependencies { compile project(':conductor-common') compile project(':conductor-core') @@ -10,3 +15,22 @@ dependencies { compile "io.grpc:grpc-netty:${revGrpc}" compile "log4j:log4j:1.2.17" } + +// FIXME This is temporary until the server module refactoring is completed. +build.dependsOn('shadowJar') + +shadowJar { + mergeServiceFiles() + configurations = [project.configurations.compile] + manifest { + attributes 'Description': 'Self contained Conductor server jar' + attributes 'Main-Class' : 'com.netflix.conductor.grpc.server.Main' + } +} + +task server(type: JavaExec) { + systemProperty 'workflow.elasticsearch.url', 'localhost:9300' + systemProperties System.properties + main = 'com.netflix.conductor.grpc.server.Main' + classpath = sourceSets.test.runtimeClasspath +} From f3ed5d442e8fb2585f8f6a527100e978ee2066cc Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 11 Jun 2018 14:00:43 +0200 Subject: [PATCH 037/163] * Update bootstrap files for gRPC. These are still not the final version, but are functional for testing. * Add docker compose configurations for running a complete stack, including gRPC on top of MySQL. --- docker/grpc/Dockerfile.grpc | 22 +++++++++ docker/grpc/Makefile | 15 ++++++ docker/grpc/config/config-local.properties | 39 +++++++++++++++ docker/grpc/config/config.properties | 34 ++++++++++++++ docker/grpc/docker-compose.apps.yaml | 47 +++++++++++++++++++ ....yaml => docker-compose.dependencies.yaml} | 13 ++++- docker/server/config/config-mysql.properties | 21 +++++++++ .../conductor/grpc/server/GRPCModule.java | 5 +- .../conductor/grpc/server/GRPCServer.java | 14 ++++-- 9 files changed, 202 insertions(+), 8 deletions(-) create mode 100644 docker/grpc/Dockerfile.grpc create mode 100755 docker/grpc/config/config-local.properties create mode 100755 docker/grpc/config/config.properties create mode 100644 docker/grpc/docker-compose.apps.yaml rename docker/grpc/{docker-compose.yaml => docker-compose.dependencies.yaml} (67%) create mode 100755 docker/server/config/config-mysql.properties diff --git a/docker/grpc/Dockerfile.grpc b/docker/grpc/Dockerfile.grpc new file mode 100644 index 0000000000..65427327b0 --- /dev/null +++ b/docker/grpc/Dockerfile.grpc @@ -0,0 +1,22 @@ +# +# conductor:server - Netflix conductor server +# +FROM openjdk:8-jre-slim + +MAINTAINER Netflix OSS + +# Make app folders +RUN mkdir -p /app/config /app/logs /app/libs + +# Copy the project directly onto the image +COPY ./docker/grpc/bin /app +COPY ./docker/grpc/config /app/config +COPY ./grpc-server/build/libs/conductor-grpc-server-*-all.jar /app/libs + +# Copy the files for the server into the app folders +RUN chmod +x /app/startup.sh + +EXPOSE 8090 + +CMD [ "/app/startup.sh" ] +ENTRYPOINT [ "/bin/sh"] diff --git a/docker/grpc/Makefile b/docker/grpc/Makefile index c0e87d21f8..6da40b1a19 100644 --- a/docker/grpc/Makefile +++ b/docker/grpc/Makefile @@ -1,3 +1,18 @@ clean-db: docker volume rm grpc_conductor_mysql + +compose-build: + docker-compose -f docker-compose.dependencies.yaml -f docker-compose.apps.yaml build + +dependencies-up: + docker-compose -f docker-compose.dependencies.yaml up -d + +dependencies-down: + docker-compose -f docker-compose.dependencies.yaml down + +stack-up: + docker-compose -f docker-compose.dependencies.yaml -f docker-compose.apps.yaml up + +stack-down: + docker-compose -f docker-compose.dependencies.yaml -f docker-compose.apps.yaml down diff --git a/docker/grpc/config/config-local.properties b/docker/grpc/config/config-local.properties new file mode 100755 index 0000000000..b9cced6448 --- /dev/null +++ b/docker/grpc/config/config-local.properties @@ -0,0 +1,39 @@ +# Database persistence model. Possible values are memory, redis, and dynomite. +# If ommitted, the persistence used is memory +# +# memory : The data is stored in memory and lost when the server dies. Useful for testing or demo +# redis : non-Dynomite based redis instance +# dynomite : Dynomite cluster. Use this for HA configuration. + +db=memory + +# Dynomite Cluster details. +# format is host:port:rack separated by semicolon +workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c + +# Namespace for the keys stored in Dynomite/Redis +workflow.namespace.prefix=conductor + +# Namespace prefix for the dyno queues +workflow.namespace.queue.prefix=conductor_queues + +# No. of threads allocated to dyno-queues (optional) +queues.dynomite.threads=10 + +# Non-quorum port used to connect to local redis. Used by dyno-queues. +# When using redis directly, set this to the same port as redis server +# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. +queues.dynomite.nonQuorum.port=22122 + + +# Transport address to elasticsearch +workflow.elasticsearch.url=localhost:9300 + +# Name of the elasticsearch cluster +workflow.elasticsearch.index.name=conductor + +# Additional modules (optional) +# conductor.additional.modules=class_extending_com.google.inject.AbstractModule + +# Load sample kitchen sink workflow +loadSample=true diff --git a/docker/grpc/config/config.properties b/docker/grpc/config/config.properties new file mode 100755 index 0000000000..a6e3902263 --- /dev/null +++ b/docker/grpc/config/config.properties @@ -0,0 +1,34 @@ +# Database persistence model. Possible values are memory, redis, and dynomite. +# If ommitted, the persistence used is memory +# +# memory : The data is stored in memory and lost when the server dies. Useful for testing or demo +# redis : non-Dynomite based redis instance +# dynomite : Dynomite cluster. Use this for HA configuration. + +db=mysql + +jdbc.url=jdbc:mysql://mysql:3306/conductor + +# Namespace prefix for the dyno queues +workflow.namespace.queue.prefix=conductor_queues + +# No. of threads allocated to dyno-queues (optional) +queues.dynomite.threads=10 + +# Non-quorum port used to connect to local redis. Used by dyno-queues. +# When using redis directly, set this to the same port as redis server +# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. +queues.dynomite.nonQuorum.port=22122 + + +# Transport address to elasticsearch +workflow.elasticsearch.url=elasticsearch:9300 + +# Name of the elasticsearch cluster +workflow.elasticsearch.index.name=conductor + +# Additional modules (optional) +# conductor.additional.modules=class_extending_com.google.inject.AbstractModule + +# Load sample kitchen sink workflow +loadSample=true diff --git a/docker/grpc/docker-compose.apps.yaml b/docker/grpc/docker-compose.apps.yaml new file mode 100644 index 0000000000..710395f895 --- /dev/null +++ b/docker/grpc/docker-compose.apps.yaml @@ -0,0 +1,47 @@ +version: '3.4' + +services: + + conductor-server-grpc: + environment: + - CONFIG_PROP=config.properties + image: conductor:grpc-server + build: + context: ../../ + dockerfile: docker/grpc/Dockerfile.grpc + ports: + - 8090:8090 + networks: + - internal + depends_on: + - mysql + - elasticsearch + + conductor-server-rest: + image: conductor:server + build: + context: ../../ + dockerfile: docker/server/Dockerfile + environment: + - CONFIG_PROP=config-mysql.properties + ports: + - 8080:8080 + networks: + - internal + depends_on: + - mysql + - elasticsearch + + conductor-ui: + environment: + - WF_SERVER=http://conductor-server-rest:8080/api/ + image: conductor:ui + build: + context: ../../ + dockerfile: docker/ui/Dockerfile + ports: + - 5000:5000 + networks: + - internal + depends_on: + - conductor-server-rest diff --git a/docker/grpc/docker-compose.yaml b/docker/grpc/docker-compose.dependencies.yaml similarity index 67% rename from docker/grpc/docker-compose.yaml rename to docker/grpc/docker-compose.dependencies.yaml index 46e932d534..60f4b95e60 100644 --- a/docker/grpc/docker-compose.yaml +++ b/docker/grpc/docker-compose.dependencies.yaml @@ -4,7 +4,6 @@ services: mysql: image: mysql:5.6 - restart: always environment: MYSQL_ROOT_PASSWORD: 12345 MYSQL_DATABASE: conductor @@ -14,13 +13,23 @@ services: - type: volume source: conductor_mysql target: /var/lib/mysql + networks: + - internal ports: - 3306:3306 elasticsearch: - image: elasticsearch:2.4 + image: elasticsearch:5.6-alpine + environment: + ES_JAVA_OPTS: "-Xms750m -Xmx750m" + networks: + - internal ports: - 9200:9200 + - 9300:9300 volumes: conductor_mysql: + +networks: + internal: diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties new file mode 100755 index 0000000000..575254578f --- /dev/null +++ b/docker/server/config/config-mysql.properties @@ -0,0 +1,21 @@ +# Database persistence model. Possible values are memory, redis, and dynomite. +# If ommitted, the persistence used is memory +# +# memory : The data is stored in memory and lost when the server dies. Useful for testing or demo +# redis : non-Dynomite based redis instance +# dynomite : Dynomite cluster. Use this for HA configuration. + +db=mysql + +jdbc.url=jdbc:mysql://mysql:3306/conductor +# Transport address to elasticsearch +workflow.elasticsearch.url=elasticsearch:9300 + +# Name of the elasticsearch cluster +workflow.elasticsearch.index.name=conductor + +# Additional modules (optional) +# conductor.additional.modules=class_extending_com.google.inject.AbstractModule + +# Load sample kitchen sink workflow +loadSample=true diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java index 710d38353d..a6136ec00b 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java @@ -32,12 +32,15 @@ protected void configure() { install(new CoreModule()); install(new ElasticSearchModuleV5()); install(new MySQLWorkflowModule()); + bind(Configuration.class).to(SystemPropertiesConfiguration.class).in(Singleton.class); + bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); bind(MetadataServiceGrpc.MetadataServiceImplBase.class).to(MetadataServiceImpl.class); bind(WorkflowServiceGrpc.WorkflowServiceImplBase.class).to(WorkflowServiceImpl.class); - bind(GRPCServer.class).toProvider(GRPCServerProvider.class).asEagerSingleton(); bind(EventServiceGrpc.EventServiceImplBase.class).to(EventServiceImpl.class); + + bind(GRPCServer.class).toProvider(GRPCServerProvider.class).asEagerSingleton(); } @Provides diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index 0358d1199e..b92e9d3cbc 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -1,17 +1,21 @@ package com.netflix.conductor.grpc.server; import com.google.inject.Inject; + import com.netflix.conductor.core.config.Configuration; -import io.grpc.BindableService; -import io.grpc.Server; -import io.grpc.ServerBuilder; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Singleton; import java.io.IOException; import java.util.Arrays; +import javax.inject.Singleton; + +import io.grpc.BindableService; +import io.grpc.Server; +import io.grpc.ServerBuilder; + @Singleton public class GRPCServer { private static final Logger logger = LoggerFactory.getLogger(GRPCServer.class); @@ -19,7 +23,7 @@ public class GRPCServer { private final Server server; public final static String CONFIG_PORT = "grpc.port"; - public final static int CONFIG_PORT_DEFAULT = 8080; + public final static int CONFIG_PORT_DEFAULT = 8090; @Inject public GRPCServer(Configuration conf, BindableService... services) { From d568659d7f17cec42bf7c12b2377091a82d268ac Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Tue, 12 Jun 2018 15:12:35 +0200 Subject: [PATCH 038/163] Remove ignore file entry that was ignoring valid config files. --- .gitignore | 1 - docker/grpc/bin/startup.sh | 21 +++++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) create mode 100755 docker/grpc/bin/startup.sh diff --git a/.gitignore b/.gitignore index 6d79af3c54..76ce3f5c4e 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,6 @@ ui/dist ui/package-lock.json .gradle .project -bin build client/python/conductor.egg-info *.pyc diff --git a/docker/grpc/bin/startup.sh b/docker/grpc/bin/startup.sh new file mode 100755 index 0000000000..5ac1723c0a --- /dev/null +++ b/docker/grpc/bin/startup.sh @@ -0,0 +1,21 @@ +#!/bin/sh +# startup.sh - startup script for the server docker image + +echo "Starting Conductor server" + +# Start the server +cd /app/libs +echo "Property file: $CONFIG_PROP" +echo $CONFIG_PROP +export config_file= + +if [ -z "$CONFIG_PROP" ]; + then + echo "Using an in-memory instance of conductor"; + export config_file=/app/config/config-local.properties + else + echo "Using '$CONFIG_PROP'"; + export config_file=/app/config/$CONFIG_PROP +fi + +java -jar conductor-grpc-server-*-all.jar $config_file From b4f18d84e814dc82952b8441b5f3fbab78b34724 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Tue, 12 Jun 2018 16:44:22 +0200 Subject: [PATCH 039/163] Re-factor MySQL bootstrapping and configuration. This brings the MySQL module more in line with the Redis module in how it is configured and bootstrapped. It also cleans up the package structure a bit alone the way to be a bit more explicit. * Add a DataSourceProvider for MySQL. * Consolidate configuration property names and default values into an interface. * Move most of things that aren't actually DAO related out of the dao package. * Move boolean property reading code into the base configuration interface. --- .../conductor/core/config/Configuration.java | 6 ++ .../conductor/dao/mysql/MySQLBaseDAO.java | 4 ++ .../dao/mysql/MySQLWorkflowModule.java | 66 ------------------- .../netflix/conductor/dao/mysql/Query.java | 14 ++-- .../conductor/mysql/MySQLConfiguration.java | 43 ++++++++++++ .../mysql/MySQLDataSourceProvider.java | 54 +++++++++++++++ .../conductor/mysql/MySQLWorkflowModule.java | 29 ++++++++ .../SystemPropertiesMySQLConfiguration.java | 6 ++ .../{dao/mysql => sql}/ExecuteFunction.java | 4 +- .../{dao/mysql => sql}/QueryFunction.java | 4 +- .../{dao/mysql => sql}/ResultSetHandler.java | 4 +- .../mysql => sql}/TransactionalFunction.java | 2 +- .../conductor/config/TestConfiguration.java | 9 +-- .../conductor/server/ServerModule.java | 2 +- 14 files changed, 166 insertions(+), 81 deletions(-) delete mode 100644 mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLWorkflowModule.java create mode 100644 mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java create mode 100644 mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java create mode 100644 mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLWorkflowModule.java create mode 100644 mysql-persistence/src/main/java/com/netflix/conductor/mysql/SystemPropertiesMySQLConfiguration.java rename mysql-persistence/src/main/java/com/netflix/conductor/{dao/mysql => sql}/ExecuteFunction.java (74%) rename mysql-persistence/src/main/java/com/netflix/conductor/{dao/mysql => sql}/QueryFunction.java (74%) rename mysql-persistence/src/main/java/com/netflix/conductor/{dao/mysql => sql}/ResultSetHandler.java (77%) rename mysql-persistence/src/main/java/com/netflix/conductor/{dao/mysql => sql}/TransactionalFunction.java (86%) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index a436f72e68..a3cd09c6c7 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -118,6 +118,12 @@ public interface Configuration { public String getProperty(String name, String defaultValue); + default boolean getBoolProperty(String name, boolean defaultValue) { + String value = getProperty(name, null); + if(null == value || value.trim().length() == 0){ return defaultValue; } + return Boolean.valueOf(value.trim()); + } + /** * * @return Returns all the configurations in a map. diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLBaseDAO.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLBaseDAO.java index 2869073750..3cba07cd47 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLBaseDAO.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLBaseDAO.java @@ -5,6 +5,10 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; import com.netflix.conductor.core.execution.ApplicationException; +import com.netflix.conductor.sql.ExecuteFunction; +import com.netflix.conductor.sql.QueryFunction; +import com.netflix.conductor.sql.TransactionalFunction; + import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLWorkflowModule.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLWorkflowModule.java deleted file mode 100644 index 75fbd45815..0000000000 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLWorkflowModule.java +++ /dev/null @@ -1,66 +0,0 @@ -package com.netflix.conductor.dao.mysql; - -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.google.inject.Singleton; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.dao.ExecutionDAO; -import com.netflix.conductor.dao.MetadataDAO; -import com.netflix.conductor.dao.QueueDAO; -import com.zaxxer.hikari.HikariDataSource; -import javax.sql.DataSource; -import org.flywaydb.core.Flyway; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * @author mustafa - */ -public class MySQLWorkflowModule extends AbstractModule { - protected final Logger logger = LoggerFactory.getLogger(getClass()); - - @Provides - @Singleton - public DataSource getDataSource(Configuration config) { - HikariDataSource dataSource = new HikariDataSource(); - dataSource.setJdbcUrl(config.getProperty("jdbc.url", "jdbc:mysql://localhost:3306/conductor")); - dataSource.setUsername(config.getProperty("jdbc.username", "conductor")); - dataSource.setPassword(config.getProperty("jdbc.password", "password")); - dataSource.setAutoCommit(false); - flywayMigrate(config, dataSource); - - return dataSource; - } - - @Override - protected void configure() { - bind(MetadataDAO.class).to(MySQLMetadataDAO.class); - bind(ExecutionDAO.class).to(MySQLExecutionDAO.class); - bind(QueueDAO.class).to(MySQLQueueDAO.class); - } - - private void flywayMigrate(Configuration config, DataSource dataSource) { - boolean enabled = getBool(config.getProperty("flyway.enabled", "true"), true); - if(!enabled) { - logger.debug("Flyway migrations are disabled"); - return; - } - - String migrationTable = config.getProperty("flyway.table", null); - - Flyway flyway = new Flyway(); - if(null != migrationTable) { - logger.debug("Using Flyway migration table '{}'", migrationTable); - flyway.setTable(migrationTable); - } - - flyway.setDataSource(dataSource); - flyway.setPlaceholderReplacement(false); - flyway.migrate(); - } - - private boolean getBool(String value, boolean defaultValue) { - if(null == value || value.trim().length() == 0){ return defaultValue; } - return Boolean.valueOf(value.trim()); - } -} diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/Query.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/Query.java index 086c5490bd..2eb43db6df 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/Query.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/Query.java @@ -1,26 +1,28 @@ package com.netflix.conductor.dao.mysql; -import static com.netflix.conductor.core.execution.ApplicationException.Code; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.core.execution.ApplicationException; +import com.netflix.conductor.sql.ResultSetHandler; + +import org.apache.commons.lang3.math.NumberUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.IOException; import java.sql.Connection; +import java.sql.Date; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collection; -import java.sql.Date; import java.util.List; import java.util.concurrent.atomic.AtomicInteger; -import org.apache.commons.lang3.math.NumberUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import static com.netflix.conductor.core.execution.ApplicationException.Code; /** * Represents a {@link PreparedStatement} that is wrapped with convenience methods and utilities. diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java new file mode 100644 index 0000000000..42dcdfa19c --- /dev/null +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java @@ -0,0 +1,43 @@ +package com.netflix.conductor.mysql; + +import com.netflix.conductor.core.config.Configuration; + +import java.util.Optional; + +public interface MySQLConfiguration extends Configuration { + + String JDBC_URL_PROPERTY_NAME = "jdbc.url"; + String JDBC_URL_DEFAULT_VALUE = "jdbc:mysql://localhost:3306/conductor"; + + String JDBC_USER_NAME_PROPERTY_NAME = "jdbc.username"; + String JDBC_USER_NAME_DEFAULT_VALUE = "conductor"; + + String JDBC_PASSWORD_PROPERTY_NAME = "jdbc.password"; + String JDBC_PASSWORD_DEFAULT_VALUE = "password"; + + String FLYWAY_ENABLED_PROPERTY_NAME = "flyway.enabled"; + boolean FLYWAY_ENABLED_DEFAULT_VALUE = true; + + String FLYWAY_TABLE_PROPERTY_NAME = "flyway.table"; + Optional FLYWAY_TABLE_DEFAULT_VALUE = Optional.empty(); + + default String getJdbcUrl(){ + return getProperty(JDBC_URL_PROPERTY_NAME, JDBC_URL_DEFAULT_VALUE); + } + + default String getJdbcUserName(){ + return getProperty(JDBC_USER_NAME_PROPERTY_NAME, JDBC_USER_NAME_DEFAULT_VALUE); + } + + default String getJdbcPassword(){ + return getProperty(JDBC_PASSWORD_PROPERTY_NAME, JDBC_PASSWORD_DEFAULT_VALUE); + } + + default boolean isFlywayEnabled(){ + return getBoolProperty(FLYWAY_ENABLED_PROPERTY_NAME, FLYWAY_ENABLED_DEFAULT_VALUE); + } + + default Optional getFlywayTable(){ + return Optional.ofNullable(getProperty(FLYWAY_TABLE_PROPERTY_NAME, null)); + } +} diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java new file mode 100644 index 0000000000..abd596b71c --- /dev/null +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java @@ -0,0 +1,54 @@ +package com.netflix.conductor.mysql; + +import com.zaxxer.hikari.HikariDataSource; + +import org.flywaydb.core.Flyway; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; +import javax.inject.Provider; +import javax.sql.DataSource; + +public class MySQLDataSourceProvider implements Provider { + private static final Logger logger = LoggerFactory.getLogger(MySQLDataSourceProvider.class); + + private final MySQLConfiguration configuration; + + @Inject + public MySQLDataSourceProvider(MySQLConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public DataSource get() { + HikariDataSource dataSource = new HikariDataSource(); + dataSource.setJdbcUrl(configuration.getJdbcUrl()); + dataSource.setUsername(configuration.getJdbcUserName()); + dataSource.setPassword(configuration.getJdbcPassword()); + dataSource.setAutoCommit(false); + flywayMigrate(dataSource); + + return dataSource; + } + + // TODO Move this into a class that has complete lifecycle for the connection, i.e. startup and shutdown. + private void flywayMigrate(DataSource dataSource) { + boolean enabled = configuration.isFlywayEnabled(); + if (!enabled) { + logger.debug("Flyway migrations are disabled"); + return; + } + + + Flyway flyway = new Flyway(); + configuration.getFlywayTable().ifPresent(tableName -> { + logger.debug("Using Flyway migration table '{}'", tableName); + flyway.setTable(tableName); + }); + + flyway.setDataSource(dataSource); + flyway.setPlaceholderReplacement(false); + flyway.migrate(); + } +} diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLWorkflowModule.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLWorkflowModule.java new file mode 100644 index 0000000000..4bc03fe9de --- /dev/null +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLWorkflowModule.java @@ -0,0 +1,29 @@ +package com.netflix.conductor.mysql; + +import com.google.inject.AbstractModule; +import com.google.inject.Scopes; + +import com.netflix.conductor.dao.ExecutionDAO; +import com.netflix.conductor.dao.MetadataDAO; +import com.netflix.conductor.dao.QueueDAO; +import com.netflix.conductor.dao.mysql.MySQLExecutionDAO; +import com.netflix.conductor.dao.mysql.MySQLMetadataDAO; +import com.netflix.conductor.dao.mysql.MySQLQueueDAO; + +import javax.sql.DataSource; + +/** + * @author mustafa + */ +public class MySQLWorkflowModule extends AbstractModule { + + @Override + protected void configure() { + bind(MySQLConfiguration.class).to(SystemPropertiesMySQLConfiguration.class); + bind(DataSource.class).toProvider(MySQLDataSourceProvider.class).in(Scopes.SINGLETON); + bind(MetadataDAO.class).to(MySQLMetadataDAO.class); + bind(ExecutionDAO.class).to(MySQLExecutionDAO.class); + bind(QueueDAO.class).to(MySQLQueueDAO.class); + } + +} diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/SystemPropertiesMySQLConfiguration.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/SystemPropertiesMySQLConfiguration.java new file mode 100644 index 0000000000..1ffa1e0cff --- /dev/null +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/SystemPropertiesMySQLConfiguration.java @@ -0,0 +1,6 @@ +package com.netflix.conductor.mysql; + +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + +public class SystemPropertiesMySQLConfiguration extends SystemPropertiesConfiguration implements MySQLConfiguration { +} diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/ExecuteFunction.java b/mysql-persistence/src/main/java/com/netflix/conductor/sql/ExecuteFunction.java similarity index 74% rename from mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/ExecuteFunction.java rename to mysql-persistence/src/main/java/com/netflix/conductor/sql/ExecuteFunction.java index ad3b4d7f94..f1cabce830 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/ExecuteFunction.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/sql/ExecuteFunction.java @@ -1,4 +1,6 @@ -package com.netflix.conductor.dao.mysql; +package com.netflix.conductor.sql; + +import com.netflix.conductor.dao.mysql.Query; import java.sql.SQLException; diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/QueryFunction.java b/mysql-persistence/src/main/java/com/netflix/conductor/sql/QueryFunction.java similarity index 74% rename from mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/QueryFunction.java rename to mysql-persistence/src/main/java/com/netflix/conductor/sql/QueryFunction.java index f2ada44783..6f6a304659 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/QueryFunction.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/sql/QueryFunction.java @@ -1,4 +1,6 @@ -package com.netflix.conductor.dao.mysql; +package com.netflix.conductor.sql; + +import com.netflix.conductor.dao.mysql.Query; import java.sql.SQLException; diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/ResultSetHandler.java b/mysql-persistence/src/main/java/com/netflix/conductor/sql/ResultSetHandler.java similarity index 77% rename from mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/ResultSetHandler.java rename to mysql-persistence/src/main/java/com/netflix/conductor/sql/ResultSetHandler.java index 28e80ce1af..ddaa145ad6 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/ResultSetHandler.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/sql/ResultSetHandler.java @@ -1,4 +1,6 @@ -package com.netflix.conductor.dao.mysql; +package com.netflix.conductor.sql; + +import com.netflix.conductor.dao.mysql.Query; import java.sql.ResultSet; import java.sql.SQLException; diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/TransactionalFunction.java b/mysql-persistence/src/main/java/com/netflix/conductor/sql/TransactionalFunction.java similarity index 86% rename from mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/TransactionalFunction.java rename to mysql-persistence/src/main/java/com/netflix/conductor/sql/TransactionalFunction.java index 4f8af2d372..00a6119316 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/TransactionalFunction.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/sql/TransactionalFunction.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.dao.mysql; +package com.netflix.conductor.sql; import java.sql.Connection; import java.sql.SQLException; diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java b/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java index b78e5f8ce8..26d6845555 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/config/TestConfiguration.java @@ -15,17 +15,18 @@ */ package com.netflix.conductor.config; -import java.util.Map; - import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; -import com.netflix.conductor.core.config.Configuration; + +import com.netflix.conductor.mysql.MySQLConfiguration; + +import java.util.Map; /** * @author Viren * */ -public class TestConfiguration implements Configuration { +public class TestConfiguration implements MySQLConfiguration { private Map testProperties = Maps.newHashMap(ImmutableMap.of("test", "dummy")); diff --git a/server/src/main/java/com/netflix/conductor/server/ServerModule.java b/server/src/main/java/com/netflix/conductor/server/ServerModule.java index afbfedf3ba..c138413bcb 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServerModule.java +++ b/server/src/main/java/com/netflix/conductor/server/ServerModule.java @@ -30,7 +30,7 @@ import com.netflix.conductor.dao.RedisWorkflowModule; import com.netflix.conductor.dao.es.index.ElasticSearchModule; import com.netflix.conductor.dao.es5.index.ElasticSearchModuleV5; -import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; +import com.netflix.conductor.mysql.MySQLWorkflowModule; import com.netflix.dyno.connectionpool.HostSupplier; import java.util.List; From eb0b66982bb797349124970e34f3f813ba1310db Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 11 Jun 2018 13:34:35 +0200 Subject: [PATCH 040/163] protogen: Change ProtoMapper package and source location --- .../main/java/com/netflix/conductor/protogen/ProtoGen.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java index 9b56548024..1165b03342 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java @@ -20,7 +20,7 @@ public class ProtoGen { public static String PROTO_JAVA_PACKAGE_NAME = "com.netflix.conductor.proto"; public static String PROTO_GO_PACKAGE_NAME = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - public static String GENERATED_MAPPER_PACKAGE = "com.netflix.conductor.grpc.server"; + public static String GENERATED_MAPPER_PACKAGE = "com.netflix.conductor.grpc"; public static String GENERATOR_NAME = "com.netflix.conductor.protogen.ProtoGen"; private List files = new ArrayList<>(); @@ -51,7 +51,7 @@ public static void main(String[] args) throws Exception { generator.process(com.netflix.conductor.common.run.WorkflowSummary.class); generator.writeProtos("grpc/src/main/proto"); - generator.writeMapper("grpc-server/src/main/java/com/netflix/conductor/grpc/server"); + generator.writeMapper("grpc/src/main/java/com/netflix/conductor/grpc/"); } public ProtoGen() { From c5c33911bb3c7e2b3c82d5465a23c721f32f579c Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 11 Jun 2018 13:36:25 +0200 Subject: [PATCH 041/163] grpc: Move ProtoMapper to the `grpc` module --- .../com/netflix/conductor/grpc/server/EventServiceImpl.java | 2 +- .../com/netflix/conductor/grpc/server/MetadataServiceImpl.java | 1 + .../com/netflix/conductor/grpc/server/TaskServiceImpl.java | 1 + .../com/netflix/conductor/grpc/server/WorkflowServiceImpl.java | 1 + grpc/build.gradle | 3 +++ .../java/com/netflix/conductor/grpc}/AbstractProtoMapper.java | 2 +- .../src/main/java/com/netflix/conductor/grpc}/ProtoMapper.java | 2 +- 7 files changed, 9 insertions(+), 3 deletions(-) rename {grpc-server/src/main/java/com/netflix/conductor/grpc/server => grpc/src/main/java/com/netflix/conductor/grpc}/AbstractProtoMapper.java (99%) rename {grpc-server/src/main/java/com/netflix/conductor/grpc/server => grpc/src/main/java/com/netflix/conductor/grpc}/ProtoMapper.java (98%) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java index b92e556113..f5ff20f9e3 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java @@ -1,11 +1,11 @@ package com.netflix.conductor.grpc.server; import com.google.protobuf.Empty; -import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.core.events.EventProcessor; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.grpc.EventServiceGrpc; import com.netflix.conductor.grpc.EventServicePb; +import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.EventHandlerPb; import com.netflix.conductor.service.MetadataService; import io.grpc.stub.StreamObserver; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index 80a3d6fe18..a383c574e3 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -5,6 +5,7 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.MetadataServicePb; +import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.TaskDefPb; import com.netflix.conductor.proto.WorkflowDefPb; import com.netflix.conductor.service.MetadataService; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index d3f3bdd1ef..c3811e3021 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -5,6 +5,7 @@ import com.google.protobuf.Empty; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.TaskPb; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.TaskServicePb; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java index 33dd30da7e..078f0cc6f4 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java @@ -9,6 +9,7 @@ import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.execution.WorkflowExecutor; +import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.proto.RerunWorkflowRequestPb; import com.netflix.conductor.proto.StartWorkflowRequestPb; diff --git a/grpc/build.gradle b/grpc/build.gradle index ac0eb4cd48..c59abe8448 100644 --- a/grpc/build.gradle +++ b/grpc/build.gradle @@ -11,6 +11,9 @@ plugins { } dependencies { + compile project(':conductor-common') + compile project(':conductor-core') + compile "com.google.api.grpc:proto-google-common-protos:1.0.0" compile "io.grpc:grpc-protobuf:${revGrpc}" compile "io.grpc:grpc-stub:${revGrpc}" diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java similarity index 99% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java rename to grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index db42569de8..94d2b574c8 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc; import com.google.protobuf.Value; import com.netflix.conductor.common.metadata.events.EventExecution; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java similarity index 98% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java rename to grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java index 491d9fc18f..092b1f9465 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/ProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc; import com.google.protobuf.ListValue; import com.google.protobuf.NullValue; From 6782518bb3deb7bd5bb8202dd0d696ad5c763be6 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 11 Jun 2018 17:23:03 +0200 Subject: [PATCH 042/163] grpc-server: Implement missing TaskService methods --- .../grpc/server/TaskServiceImpl.java | 98 ++++++++++++++++++- grpc/src/main/proto/grpc/task_service.proto | 73 ++++++++++++-- 2 files changed, 159 insertions(+), 12 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java index c3811e3021..b74148b7ee 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java @@ -1,6 +1,10 @@ package com.netflix.conductor.grpc.server; +import java.util.Comparator; +import java.util.HashMap; import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; import com.google.protobuf.Empty; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; @@ -10,6 +14,7 @@ import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.TaskServicePb; import com.netflix.conductor.proto.TaskResultPb; +import io.grpc.Status; import io.grpc.stub.ServerCallStreamObserver; import io.grpc.stub.StreamObserver; @@ -121,14 +126,14 @@ public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, Stre } @Override - public void updateTask(TaskResultPb.TaskResult req, StreamObserver response) { + public void updateTask(TaskResultPb.TaskResult req, StreamObserver response) { try { TaskResult task = protoMapper.fromProto(req); taskService.updateTask(task); response.onNext( - TaskServicePb.TaskUpdateResponse.newBuilder() - .setTaskId(task.getTaskId()).build() + TaskServicePb.TaskId.newBuilder() + .setTaskId(task.getTaskId()).build() ); response.onCompleted(); } catch (Exception e) { @@ -154,7 +159,7 @@ public void addLog(TaskServicePb.AddLogRequest req, StreamObserver respon } @Override - public void getLogs(TaskServicePb.TaskId req, StreamObserver response) { + public void getTaskLogs(TaskServicePb.TaskId req, StreamObserver response) { List logs = taskService.getTaskLogs(req.getTaskId()); response.onNext(TaskServicePb.GetLogsResponse.newBuilder() .addAllLogs(logs.stream().map(protoMapper::toProto)::iterator) @@ -162,4 +167,89 @@ public void getLogs(TaskServicePb.TaskId req, StreamObserver response) { + try { + Task task = taskService.getTask(req.getTaskId()); + if (task == null) { + response.onError(Status.NOT_FOUND + .withDescription("No such task found by id="+req.getTaskId()) + .asRuntimeException() + ); + } else { + response.onNext(protoMapper.toProto(task)); + response.onCompleted(); + } + } catch (Exception e) { + grpcHelper.onError(response, e); + } + + } + + @Override + public void removeTaskFromQueue(TaskServicePb.RemoveTaskRequest req, StreamObserver response) { + taskService.removeTaskfromQueue(req.getTaskType(), req.getTaskId()); + response.onCompleted(); + } + + @Override + public void getQueueSizesForTasks(TaskServicePb.QueueSizesRequest req, StreamObserver response) { + Map sizes = taskService.getTaskQueueSizes(req.getTaskTypesList()); + response.onNext( + TaskServicePb.QueueSizesResponse.newBuilder() + .putAllQueueForTask(sizes) + .build() + ); + response.onCompleted(); + } + + @Override + public void getQueueInfo(Empty req, StreamObserver response) { + Map queueInfo = queues.queuesDetail().entrySet().stream() + .sorted(Comparator.comparing(Map.Entry::getKey)) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (v1, v2) -> v1, HashMap::new)); + + response.onNext( + TaskServicePb.QueueInfoResponse.newBuilder() + .putAllQueues(queueInfo) + .build() + ); + response.onCompleted(); + } + + @Override + public void getQueueAllInfo(Empty req, StreamObserver response) { + Map>> info = queues.queuesDetailVerbose(); + TaskServicePb.QueueAllInfoResponse.Builder queuesBuilder = TaskServicePb.QueueAllInfoResponse.newBuilder(); + + for (Map.Entry>> queue : info.entrySet()) { + final String queueName = queue.getKey(); + final Map> queueShards = queue.getValue(); + + TaskServicePb.QueueAllInfoResponse.QueueInfo.Builder queueInfoBuilder = + TaskServicePb.QueueAllInfoResponse.QueueInfo.newBuilder(); + + for (Map.Entry> shard : queueShards.entrySet()) { + final String shardName = shard.getKey(); + final Map shardInfo = shard.getValue(); + + // FIXME: make shardInfo an actual type + // shardInfo is an immutable map with predefined keys, so we can always + // access 'size' and 'uacked'. It would be better if shardInfo + // were actually a POJO. + queueInfoBuilder.putShards(shardName, + TaskServicePb.QueueAllInfoResponse.ShardInfo.newBuilder() + .setSize(shardInfo.get("size")) + .setUacked(shardInfo.get("uacked")) + .build() + ); + } + + queuesBuilder.putQueues(queueName, queueInfoBuilder.build()); + } + + response.onNext(queuesBuilder.build()); + response.onCompleted(); + } } diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto index db853598f9..1dc98c54a0 100644 --- a/grpc/src/main/proto/grpc/task_service.proto +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -11,15 +11,44 @@ option java_outer_classname = "TaskServicePb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service TaskService { + // GET /poll/{tasktype} rpc Poll(PollRequest) returns (conductor.proto.Task); + + // GRPC-only rpc PollStream(stream StreamingPollRequest) returns (stream conductor.proto.Task); + + // GET /in_progress/{tasktype} rpc GetTasksInProgress(TasksInProgressRequest) returns (TasksInProgressResponse); + + // GET /in_progress/{workflowId}/{taskRefName} rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (conductor.proto.Task); - rpc UpdateTask(conductor.proto.TaskResult) returns (TaskUpdateResponse); + + // POST / + rpc UpdateTask(conductor.proto.TaskResult) returns (TaskId); + + // POST /{taskId}/ack rpc AckTask(AckTaskRequest) returns (AckTaskResponse); + // POST /{taskId}/log rpc AddLog(AddLogRequest) returns (google.protobuf.Empty); - rpc GetLogs(TaskId) returns (GetLogsResponse); + + // GET {taskId}/log + rpc GetTaskLogs(TaskId) returns (GetLogsResponse); + + // GET /{taskId} + rpc GetTask(TaskId) returns (conductor.proto.Task); + + // DELETE /queue/{taskType}/{taskId} + rpc RemoveTaskFromQueue(RemoveTaskRequest) returns (google.protobuf.Empty); + + // GET /queue/sizes + rpc GetQueueSizesForTasks(QueueSizesRequest) returns (QueueSizesResponse); + + // GET /queue/all + rpc GetQueueInfo(google.protobuf.Empty) returns (QueueInfoResponse); + + // GET /queue/all/verbose + rpc GetQueueAllInfo(google.protobuf.Empty) returns (QueueAllInfoResponse); } message PollRequest { @@ -51,10 +80,6 @@ message PendingTaskRequest { string task_ref_name = 2; } -message TaskUpdateResponse { - string task_id = 1; -} - message AckTaskRequest { string task_id = 1; string worker_id = 2; @@ -69,10 +94,42 @@ message AddLogRequest { string log = 2; } +message GetLogsResponse { + repeated conductor.proto.TaskExecLog logs = 1; +} + message TaskId { string task_id = 1; } -message GetLogsResponse { - repeated conductor.proto.TaskExecLog logs = 1; +message TaskType { + string task_type = 1; +} + +message RemoveTaskRequest { + string task_type = 1; + string task_id = 2; } + +message QueueSizesRequest { + repeated string task_types = 1; +} + +message QueueSizesResponse { + map queue_for_task = 1; +} + +message QueueInfoResponse { + map queues = 1; +} + +message QueueAllInfoResponse { + message ShardInfo { + int64 size = 1; + int64 uacked = 2; + } + message QueueInfo { + map shards = 1; + } + map queues = 1; +} \ No newline at end of file From 60c769433a6c6ea1a2b18db58049a070b9c6a814 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 11 Jun 2018 13:37:40 +0200 Subject: [PATCH 043/163] grpc-client: Implementation (WIP) --- grpc-client/build.gradle | 21 ++++++++++++++ .../conductor/client/grpc/ClientBase.java | 29 +++++++++++++++++++ .../conductor/client/grpc/TaskClient.java | 26 +++++++++++++++++ settings.gradle | 2 +- 4 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 grpc-client/build.gradle create mode 100644 grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java create mode 100644 grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java diff --git a/grpc-client/build.gradle b/grpc-client/build.gradle new file mode 100644 index 0000000000..4714e44cfa --- /dev/null +++ b/grpc-client/build.gradle @@ -0,0 +1,21 @@ +plugins { + id 'java' +} + +group 'com.netflix' + +sourceCompatibility = 1.8 + +repositories { + mavenCentral() +} + +dependencies { + compile project(':conductor-common') + compile project(':conductor-core') + compile project(':conductor-grpc') + + compile "io.grpc:grpc-netty:${revGrpc}" + compile "log4j:log4j:1.2.17" + testCompile group: 'junit', name: 'junit', version: '4.12' +} diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java new file mode 100644 index 0000000000..26a161936f --- /dev/null +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/ClientBase.java @@ -0,0 +1,29 @@ +package com.netflix.conductor.client.grpc; + +import com.netflix.conductor.grpc.ProtoMapper; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.TimeUnit; + +public abstract class ClientBase { + private static Logger logger = LoggerFactory.getLogger(ClientBase.class); + protected static ProtoMapper protoMapper = ProtoMapper.INSTANCE; + + protected final ManagedChannel channel; + + public ClientBase(String address, int port) { + this(ManagedChannelBuilder.forAddress(address, port).usePlaintext()); + } + + public ClientBase(ManagedChannelBuilder builder) { + channel = builder.build(); + } + + public void shutdown() throws InterruptedException { + channel.shutdown().awaitTermination(5, TimeUnit.SECONDS); + } + +} diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java new file mode 100644 index 0000000000..650854dcf0 --- /dev/null +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java @@ -0,0 +1,26 @@ +package com.netflix.conductor.client.grpc; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.grpc.TaskServiceGrpc; +import com.netflix.conductor.grpc.TaskServicePb; +import com.netflix.conductor.proto.TaskPb; + +public class TaskClient extends ClientBase { + private TaskServiceGrpc.TaskServiceBlockingStub stub; + + public TaskClient(String address, int port) { + super(address, port); + this.stub = TaskServiceGrpc.newBlockingStub(this.channel); + } + + public Task pollTask(String taskType, String workerId, String domain) { + TaskPb.Task task = stub.poll( + TaskServicePb.PollRequest.newBuilder() + .setTaskType(taskType) + .setWorkerId(workerId) + .setDomain(domain) + .build() + ); + return protoMapper.fromProto(task); + } +} diff --git a/settings.gradle b/settings.gradle index c0b9c4a1a6..ea19e2b69b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -3,6 +3,6 @@ rootProject.name='conductor' include 'client','common','contribs','core','es2-persistence','es5-persistence','jersey','mysql-persistence' include 'redis-persistence','server','test-harness','ui' include 'protogen' -include 'grpc', 'grpc-server' +include 'grpc', 'grpc-server', 'grpc-client' rootProject.children.each {it.name="conductor-${it.name}"} From 5bd3428e67c6c1974982ce95d710860bb2685be0 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 11 Jun 2018 18:26:05 +0200 Subject: [PATCH 044/163] grpc-client: TaskClient WIP --- .../conductor/client/grpc/TaskClient.java | 155 ++++++++++++++++++ 1 file changed, 155 insertions(+) diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java index 650854dcf0..b9b155cd7c 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java @@ -1,9 +1,17 @@ package com.netflix.conductor.client.grpc; +import com.google.common.base.Preconditions; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskExecLog; +import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.TaskServicePb; import com.netflix.conductor.proto.TaskPb; +import org.apache.commons.lang3.StringUtils; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Collectors; public class TaskClient extends ClientBase { private TaskServiceGrpc.TaskServiceBlockingStub stub; @@ -13,7 +21,19 @@ public TaskClient(String address, int port) { this.stub = TaskServiceGrpc.newBlockingStub(this.channel); } + /** + * Perform a poll for a task of a specific task type. + * + * @param taskType The taskType to poll for + * @param domain The domain of the task type + * @param workerId Name of the client worker. Used for logging. + * @return Task waiting to be executed. + */ public Task pollTask(String taskType, String workerId, String domain) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(domain), "Domain cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(workerId), "Worker id cannot be blank"); + TaskPb.Task task = stub.poll( TaskServicePb.PollRequest.newBuilder() .setTaskType(taskType) @@ -23,4 +43,139 @@ public Task pollTask(String taskType, String workerId, String domain) { ); return protoMapper.fromProto(task); } + + /** + * Retrieve pending tasks by type + * + * @param taskType Type of task + * @param startKey id of the task from where to return the results. NULL to start from the beginning. + * @param count number of tasks to retrieve + * @return Returns the list of PENDING tasks by type, starting with a given task Id. + */ + public List getPendingTasksByType(String taskType, Optional startKey, Optional count) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + // TODO + return null; + } + + /** + * Retrieve pending task identified by reference name for a workflow + * + * @param workflowId Workflow instance id + * @param taskReferenceName reference name of the task + * @return Returns the pending workflow task identified by the reference name + */ + public Task getPendingTaskForWorkflow(String workflowId, String taskReferenceName) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "Workflow id cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(taskReferenceName), "Task reference name cannot be blank"); + + TaskPb.Task task = stub.getPendingTaskForWorkflow( + TaskServicePb.PendingTaskRequest.newBuilder() + .setWorkflowId(workflowId) + .setTaskRefName(taskReferenceName) + .build() + ); + return protoMapper.fromProto(task); + } + + /** + * Updates the result of a task execution. + * + * @param taskResult TaskResults to be updated. + */ + public void updateTask(TaskResult taskResult) { + Preconditions.checkNotNull(taskResult, "Task result cannot be null"); + stub.updateTask(protoMapper.toProto(taskResult)); + } + + /** + * Ack for the task poll. + * + * @param taskId Id of the task to be polled + * @param workerId user identified worker. + * @return true if the task was found with the given ID and acknowledged. False otherwise. If the server returns false, the client should NOT attempt to ack again. + */ + public boolean ack(String taskId, String workerId) { + // TODO: Optional + Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); + return stub.ackTask( + TaskServicePb.AckTaskRequest.newBuilder() + .setTaskId(taskId) + .setWorkerId(workerId) + .build() + ).getAck(); + } + + /** + * Log execution messages for a task. + * + * @param taskId id of the task + * @param logMessage the message to be logged + */ + public void logMessageForTask(String taskId, String logMessage) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); + stub.addLog( + TaskServicePb.AddLogRequest.newBuilder() + .setTaskId(taskId) + .setLog(logMessage) + .build() + ); + } + + /** + * Fetch execution logs for a task. + * + * @param taskId id of the task. + */ + public List getTaskLogs(String taskId) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); + return stub.getTaskLogs( + TaskServicePb.TaskId.newBuilder().setTaskId(taskId).build() + ).getLogsList() + .stream() + .map(protoMapper::fromProto) + .collect(Collectors.toList()); + } + + /** + * Retrieve information about the task + * + * @param taskId ID of the task + * @return Task details + */ + public Task getTaskDetails(String taskId) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); + return protoMapper.fromProto( + stub.getTask(TaskServicePb.TaskId.newBuilder().setTaskId(taskId).build()) + ); + } + + /** + * Removes a task from a taskType queue + * + * @param taskType the taskType to identify the queue + * @param taskId the id of the task to be removed + */ + public void removeTaskFromQueue(String taskType, String taskId) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); + stub.removeTaskFromQueue( + TaskServicePb.RemoveTaskRequest.newBuilder() + .setTaskType(taskType) + .setTaskId(taskId) + .build() + ); + } + + public int getQueueSizeForTask(String taskType) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + + TaskServicePb.QueueSizesResponse sizes = stub.getQueueSizesForTasks( + TaskServicePb.QueueSizesRequest.newBuilder() + .addTaskTypes(taskType) + .build() + ); + + return sizes.getQueueForTaskOrDefault(taskType, 0); + } } From 3d08e6e14a66df167cb6820640e549693ee428c8 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 13 Jun 2018 13:00:10 +0200 Subject: [PATCH 045/163] grpc-client: Implement WorkflowClient --- .../workflow/RerunWorkflowRequest.java | 2 +- .../workflow/StartWorkflowRequest.java | 2 +- .../conductor/client/grpc/WorkflowClient.java | 259 ++++++++++++++++++ .../conductor/grpc/AbstractProtoMapper.java | 26 ++ 4 files changed, 287 insertions(+), 2 deletions(-) create mode 100644 grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java index 2636c3a7e5..bcab01a620 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java @@ -20,7 +20,7 @@ import java.util.Map; -@ProtoMessage(toProto = false) +@ProtoMessage public class RerunWorkflowRequest { @ProtoField(id = 1) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java index 3e2e77cc6b..39d86c8981 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java @@ -6,7 +6,7 @@ import java.util.HashMap; import java.util.Map; -@ProtoMessage(toProto = false) +@ProtoMessage public class StartWorkflowRequest { @ProtoField(id = 1) private String name; diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java new file mode 100644 index 0000000000..2274e81c1f --- /dev/null +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java @@ -0,0 +1,259 @@ +package com.netflix.conductor.client.grpc; + +import com.google.common.base.Preconditions; +import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.grpc.WorkflowServiceGrpc; +import com.netflix.conductor.grpc.WorkflowServicePb; +import com.netflix.conductor.proto.WorkflowPb; +import org.apache.commons.lang3.StringUtils; + +import java.util.*; +import java.util.stream.Collectors; + +public class WorkflowClient extends ClientBase { + private WorkflowServiceGrpc.WorkflowServiceBlockingStub stub; + + public WorkflowClient(String address, int port) { + super(address, port); + } + + /** + * Starts a workflow + * + * @param startWorkflowRequest the {@link StartWorkflowRequest} object to start the workflow + * @return the id of the workflow instance that can be used for tracking + */ + public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { + Preconditions.checkNotNull(startWorkflowRequest, "StartWorkflowRequest cannot be null"); + return stub.startWorkflow( + protoMapper.toProto(startWorkflowRequest) + ).getWorkflowId(); + } + + /** + * Retrieve a workflow by workflow id + * + * @param workflowId the id of the workflow + * @param includeTasks specify if the tasks in the workflow need to be returned + * @return the requested workflow + */ + public Workflow getWorkflow(String workflowId, boolean includeTasks) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + WorkflowPb.Workflow workflow = stub.getWorkflowStatus( + WorkflowServicePb.GetWorkflowStatusRequest.newBuilder() + .setWorkflowId(workflowId) + .setIncludeTasks(includeTasks) + .build() + ); + return protoMapper.fromProto(workflow); + } + + /** + * Retrieve all workflows for a given correlation id and name + * + * @param name the name of the workflow + * @param correlationId the correlation id + * @param includeClosed specify if all workflows are to be returned or only running workflows + * @param includeTasks specify if the tasks in the workflow need to be returned + * @return list of workflows for the given correlation id and name + */ + public List getWorkflows(String name, String correlationId, boolean includeClosed, boolean includeTasks) { + Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(correlationId), "correlationId cannot be blank"); + + WorkflowServicePb.GetWorkflowsResponse workflows = stub.getWorkflows( + WorkflowServicePb.GetWorkflowsRequest.newBuilder() + .setName(name) + .addCorrelationId(correlationId) + .setIncludeClosed(includeClosed) + .setIncludeTasks(includeTasks) + .build() + ); + + if (!workflows.containsWorkflowsById(correlationId)) { + return Collections.emptyList(); + } + + return workflows.getWorkflowsByIdOrThrow(correlationId) + .getWorkflowsList().stream() + .map(protoMapper::fromProto) + .collect(Collectors.toList()); + } + + /** + * Removes a workflow from the system + * + * @param workflowId the id of the workflow to be deleted + * @param archiveWorkflow flag to indicate if the workflow should be archived before deletion + */ + public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "Workflow id cannot be blank"); + stub.removeWorkflow( + WorkflowServicePb.RemoveWorkflowRequest.newBuilder() + .setWorkflodId(workflowId) + .setArchiveWorkflow(archiveWorkflow) + .build() + ); + } + + /* + * Retrieve all running workflow instances for a given name and version + * + * @param workflowName the name of the workflow + * @param version the version of the wokflow definition. Defaults to 1. + * @return the list of running workflow instances + */ + public List getRunningWorkflow(String workflowName, Optional version) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank"); + + WorkflowServicePb.GetRunningWorkflowsResponse workflows = stub.getRunningWorkflows( + WorkflowServicePb.GetRunningWorkflowsRequest.newBuilder() + .setName(workflowName) + .setVersion(version.orElse(1)) + .build() + ); + return workflows.getWorkflowIdsList(); + } + + /** + * Retrieve all workflow instances for a given workflow name between a specific time period + * + * @param workflowName the name of the workflow + * @param version the version of the workflow definition. Defaults to 1. + * @param startTime the start time of the period + * @param endTime the end time of the period + * @return returns a list of workflows created during the specified during the time period + */ + public List getWorkflowsByTimePeriod(String workflowName, int version, Long startTime, Long endTime) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank"); + Preconditions.checkNotNull(startTime, "Start time cannot be null"); + Preconditions.checkNotNull(endTime, "End time cannot be null"); + // TODO + return null; + } + + /* + * Starts the decision task for the given workflow instance + * + * @param workflowId the id of the workflow instance + */ + public void runDecider(String workflowId) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.decideWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + .setWorkflowId(workflowId) + .build() + ); + } + + /** + * Pause a workflow by workflow id + * + * @param workflowId the workflow id of the workflow to be paused + */ + public void pauseWorkflow(String workflowId) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.pauseWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + .setWorkflowId(workflowId) + .build() + ); + } + + /** + * Resume a paused workflow by workflow id + * + * @param workflowId the workflow id of the paused workflow + */ + public void resumeWorkflow(String workflowId) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.resumeWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + .setWorkflowId(workflowId) + .build() + ); + } + + /** + * Skips a given task from a current RUNNING workflow + * + * @param workflowId the id of the workflow instance + * @param taskReferenceName the reference name of the task to be skipped + */ + public void skipTaskFromWorkflow(String workflowId, String taskReferenceName) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(taskReferenceName), "Task reference name cannot be blank"); + stub.skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest.newBuilder() + .setWorkflowId(workflowId) + .setTaskReferenceName(taskReferenceName) + .build() + ); + } + + /** + * Reruns the workflow from a specific task + * + * @param rerunWorkflowRequest the request containing the task to rerun from + * @return the id of the workflow + */ + public String rerunWorkflow(RerunWorkflowRequest rerunWorkflowRequest) { + Preconditions.checkNotNull(rerunWorkflowRequest, "RerunWorkflowRequest cannot be null"); + return stub.rerunWorkflow( + protoMapper.toProto(rerunWorkflowRequest) + ).getWorkflowId(); + } + + /** + * Restart a completed workflow + * + * @param workflowId the workflow id of the workflow to be restarted + */ + public void restart(String workflowId) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.restartWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + .setWorkflowId(workflowId) + .build() + ); + } + + /** + * Retries the last failed task in a workflow + * + * @param workflowId the workflow id of the workflow with the failed task + */ + public void retryLastFailedTask(String workflowId) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.retryWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + .setWorkflowId(workflowId) + .build() + ); + } + + + /** + * Resets the callback times of all IN PROGRESS tasks to 0 for the given workflow + * + * @param workflowId the id of the workflow + */ + public void resetCallbacksForInProgressTasks(String workflowId) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.resetWorkflowCallbacks(WorkflowServicePb.WorkflowId.newBuilder() + .setWorkflowId(workflowId) + .build() + ); + } + + /** + * Terminates the execution of the given workflow instance + * + * @param workflowId the id of the workflow to be terminated + * @param reason the reason to be logged and displayed + */ + public void terminateWorkflow(String workflowId, String reason) { + Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); + stub.terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest.newBuilder() + .setWorkflowId(workflowId) + .setReason(reason) + .build() + ); + } +} diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 94d2b574c8..19287c8080 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -544,6 +544,20 @@ public DynamicForkJoinTaskList fromProto( return to; } + public RerunWorkflowRequestPb.RerunWorkflowRequest toProto(RerunWorkflowRequest from) { + RerunWorkflowRequestPb.RerunWorkflowRequest.Builder to = RerunWorkflowRequestPb.RerunWorkflowRequest.newBuilder(); + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + for (Map.Entry pair : from.getWorkflowInput().entrySet()) { + to.putWorkflowInput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setReRunFromTaskId( from.getReRunFromTaskId() ); + for (Map.Entry pair : from.getTaskInput().entrySet()) { + to.putTaskInput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.setCorrelationId( from.getCorrelationId() ); + return to.build(); + } + public RerunWorkflowRequest fromProto(RerunWorkflowRequestPb.RerunWorkflowRequest from) { RerunWorkflowRequest to = new RerunWorkflowRequest(); to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); @@ -577,6 +591,18 @@ public SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { return to; } + public StartWorkflowRequestPb.StartWorkflowRequest toProto(StartWorkflowRequest from) { + StartWorkflowRequestPb.StartWorkflowRequest.Builder to = StartWorkflowRequestPb.StartWorkflowRequest.newBuilder(); + to.setName( from.getName() ); + to.setVersion( from.getVersion() ); + to.setCorrelationId( from.getCorrelationId() ); + for (Map.Entry pair : from.getInput().entrySet()) { + to.putInput( pair.getKey(), toProto( pair.getValue() ) ); + } + to.putAllTaskToDomain( from.getTaskToDomain() ); + return to.build(); + } + public StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowRequest from) { StartWorkflowRequest to = new StartWorkflowRequest(); to.setName( from.getName() ); From d1cc718bb6848c35e31e377182c19cd2cc46f6bd Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 13 Jun 2018 17:23:40 +0200 Subject: [PATCH 046/163] grpc: Remove the 'All' methods from MetadataService These methods are deprecated on the REST API. Let's not port them over! --- .../conductor/grpc/server/MetadataServiceImpl.java | 12 ------------ grpc/src/main/proto/grpc/metadata_service.proto | 6 ------ 2 files changed, 18 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java index a383c574e3..c123831b1c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java @@ -68,12 +68,6 @@ public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver } } - @Override - public void getAllWorkflows(Empty _request, StreamObserver response) { - service.getWorkflowDefs().stream().map(protoMapper::toProto).forEach(response::onNext); - response.onCompleted(); - } - @Override public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver response) { service.registerTaskDef( @@ -88,12 +82,6 @@ public void updateTask(TaskDefPb.TaskDef req, StreamObserver response) { response.onCompleted(); } - @Override - public void getAllTasks(Empty _request, StreamObserver response) { - service.getTaskDefs().stream().map(protoMapper::toProto).forEach(response::onNext); - response.onCompleted(); - } - @Override public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { TaskDef def = service.getTaskDef(req.getTaskType()); diff --git a/grpc/src/main/proto/grpc/metadata_service.proto b/grpc/src/main/proto/grpc/metadata_service.proto index e92a85a860..6a5029326a 100644 --- a/grpc/src/main/proto/grpc/metadata_service.proto +++ b/grpc/src/main/proto/grpc/metadata_service.proto @@ -19,9 +19,6 @@ service MetadataService { // GET /workflow/{name} rpc GetWorkflow(GetWorkflowRequest) returns (conductor.proto.WorkflowDef); - // GET /workflow - rpc GetAllWorkflows(google.protobuf.Empty) returns (stream conductor.proto.WorkflowDef); - // POST /taskdefs rpc CreateTasks(CreateTasksRequest) returns (google.protobuf.Empty); @@ -31,9 +28,6 @@ service MetadataService { // GET /taskdefs/{tasktype} rpc GetTask(GetTaskRequest) returns (conductor.proto.TaskDef); - // GET /taskdefs - rpc GetAllTasks(google.protobuf.Empty) returns (stream conductor.proto.TaskDef); - // DELETE /taskdefs/{tasktype} rpc DeleteTask(GetTaskRequest) returns (google.protobuf.Empty); } From 43c1c3273071a8e59647ae43e49704bd8a027139 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 13 Jun 2018 17:24:08 +0200 Subject: [PATCH 047/163] grpc-client: Implement MetadataService --- .../conductor/client/grpc/MetadataClient.java | 120 ++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java new file mode 100644 index 0000000000..7ec87b7222 --- /dev/null +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java @@ -0,0 +1,120 @@ +package com.netflix.conductor.client.grpc; + +import com.google.common.base.Preconditions; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.grpc.MetadataServiceGrpc; +import com.netflix.conductor.grpc.MetadataServicePb; +import com.netflix.conductor.proto.WorkflowDefPb; +import org.apache.commons.lang3.StringUtils; + +import java.util.List; + +public class MetadataClient extends ClientBase { + private MetadataServiceGrpc.MetadataServiceBlockingStub stub; + + public MetadataClient(String address, int port) { + super(address, port); + } + + /** + * Register a workflow definition with the server + * + * @param workflowDef the workflow definition + */ + public void registerWorkflowDef(WorkflowDef workflowDef) { + Preconditions.checkNotNull(workflowDef, "Worfklow definition cannot be null"); + stub.createWorkflow( + protoMapper.toProto(workflowDef) + ); + } + + /** + * Updates a list of existing workflow definitions + * + * @param workflowDefs List of workflow definitions to be updated + */ + public void updateWorkflowDefs(List workflowDefs) { + Preconditions.checkNotNull(workflowDefs, "Workflow defs list cannot be null"); + stub.updateWorkflows( + MetadataServicePb.UpdateWorkflowsRequest.newBuilder() + .addAllDefs( + workflowDefs.stream().map(protoMapper::toProto)::iterator + ) + .build() + ); + } + + /** + * Retrieve the workflow definition + * + * @param name the name of the workflow + * @param version the version of the workflow def + * @return Workflow definition for the given workflow and version + */ + public WorkflowDef getWorkflowDef(String name, Integer version) { + Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be blank"); + WorkflowDefPb.WorkflowDef workflowDef = stub.getWorkflow( + MetadataServicePb.GetWorkflowRequest.newBuilder() + .setName(name) + .setVersion(version) + .build() + ); + return protoMapper.fromProto(workflowDef); + } + + /** + * Registers a list of task types with the conductor server + * + * @param taskDefs List of task types to be registered. + */ + public void registerTaskDefs(List taskDefs) { + Preconditions.checkNotNull(taskDefs, "Task defs list cannot be null"); + stub.createTasks(MetadataServicePb.CreateTasksRequest.newBuilder() + .addAllDefs( + taskDefs.stream().map(protoMapper::toProto)::iterator + ) + .build() + ); + } + + /** + * Updates an existing task definition + * + * @param taskDef the task definition to be updated + */ + public void updateTaskDef(TaskDef taskDef) { + Preconditions.checkNotNull(taskDef, "Task definition cannot be null"); + stub.updateTask(protoMapper.toProto(taskDef)); + } + + /** + * Retrieve the task definition of a given task type + * + * @param taskType type of task for which to retrieve the definition + * @return Task Definition for the given task type + */ + public TaskDef getTaskDef(String taskType) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + return protoMapper.fromProto( + stub.getTask(MetadataServicePb.GetTaskRequest.newBuilder() + .setTaskType(taskType) + .build() + ) + ); + } + + /** + * Removes the task definition of a task type from the conductor server. + * Use with caution. + * + * @param taskType Task type to be unregistered. + */ + public void unregisterTaskDef(String taskType) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + stub.deleteTask(MetadataServicePb.GetTaskRequest.newBuilder() + .setTaskType(taskType) + .build() + ); + } +} From 7a3ccc7d82bc56d31d02e3444b31244596eba3f7 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Fri, 15 Jun 2018 15:01:43 +0200 Subject: [PATCH 048/163] Completely re-wire the server bootstrap process. This re-wires things to be both more Guice friendly and to allow users to load either the Jetty server, gRPC server or both based on configuration parameters. --- .../netflix/conductor/service/Lifecycle.java | 18 ++ grpc-server/build.gradle | 23 -- .../conductor/grpc/server/GRPCModule.java | 48 +--- .../conductor/grpc/server/GRPCServer.java | 18 +- .../grpc/server/GRPCServerConfiguration.java | 19 ++ .../grpc/server/GRPCServerProvider.java | 43 +++- .../server/GRPCServerSystemConfiguration.java | 6 + .../netflix/conductor/grpc/server/Main.java | 53 ----- .../{ => service}/EventServiceImpl.java | 9 +- .../grpc/server/{ => service}/GRPCHelper.java | 2 +- .../{ => service}/MetadataServiceImpl.java | 2 +- .../server/{ => service}/TaskServiceImpl.java | 2 +- .../{ => service}/WorkflowServiceImpl.java | 2 +- server/build.gradle | 1 + .../conductor/bootstrap/BootstrapModule.java | 14 ++ .../com/netflix/conductor/bootstrap/Main.java | 82 +++++++ .../conductor/bootstrap/ModulesProvider.java | 127 ++++++++++ .../conductor/jetty/server/JettyModule.java | 11 + .../conductor/jetty/server/JettyServer.java | 123 ++++++++++ .../server/JettyServerConfiguration.java | 26 +++ .../jetty/server/JettyServerProvider.java | 26 +++ .../JettyServerSystemConfiguration.java | 6 + .../conductor/server/ConductorServer.java | 218 ------------------ .../com/netflix/conductor/server/Main.java | 67 ------ .../conductor/server/ServerModule.java | 4 + .../server/ServletContextListner.java | 4 +- .../tests/integration/End2EndTests.java | 35 +-- 27 files changed, 547 insertions(+), 442 deletions(-) create mode 100644 core/src/main/java/com/netflix/conductor/service/Lifecycle.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerSystemConfiguration.java delete mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java rename grpc-server/src/main/java/com/netflix/conductor/grpc/server/{ => service}/EventServiceImpl.java (98%) rename grpc-server/src/main/java/com/netflix/conductor/grpc/server/{ => service}/GRPCHelper.java (98%) rename grpc-server/src/main/java/com/netflix/conductor/grpc/server/{ => service}/MetadataServiceImpl.java (98%) rename grpc-server/src/main/java/com/netflix/conductor/grpc/server/{ => service}/TaskServiceImpl.java (99%) rename grpc-server/src/main/java/com/netflix/conductor/grpc/server/{ => service}/WorkflowServiceImpl.java (99%) create mode 100644 server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java create mode 100644 server/src/main/java/com/netflix/conductor/bootstrap/Main.java create mode 100644 server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/jetty/server/JettyModule.java create mode 100644 server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java create mode 100644 server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java create mode 100644 server/src/main/java/com/netflix/conductor/jetty/server/JettyServerProvider.java create mode 100644 server/src/main/java/com/netflix/conductor/jetty/server/JettyServerSystemConfiguration.java delete mode 100644 server/src/main/java/com/netflix/conductor/server/ConductorServer.java delete mode 100644 server/src/main/java/com/netflix/conductor/server/Main.java diff --git a/core/src/main/java/com/netflix/conductor/service/Lifecycle.java b/core/src/main/java/com/netflix/conductor/service/Lifecycle.java new file mode 100644 index 0000000000..e84a0fca57 --- /dev/null +++ b/core/src/main/java/com/netflix/conductor/service/Lifecycle.java @@ -0,0 +1,18 @@ +package com.netflix.conductor.service; + +public interface Lifecycle { + + default void start() throws Exception { + registerShutdownHook(); + } + + void stop() throws Exception; + + default void registerShutdownHook() { + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + try { + stop(); + } catch (Exception e) {} + })); + } +} diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index 08f8cdcbca..4efa7138ee 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -8,29 +8,6 @@ dependencies { compile project(':conductor-core') compile project(':conductor-grpc') - //FIXME Right now this brings a lot of stuff along for the ride. :-( - compile project(':conductor-server') - compile project(':conductor-es5-persistence') - compile "io.grpc:grpc-netty:${revGrpc}" compile "log4j:log4j:1.2.17" } - -// FIXME This is temporary until the server module refactoring is completed. -build.dependsOn('shadowJar') - -shadowJar { - mergeServiceFiles() - configurations = [project.configurations.compile] - manifest { - attributes 'Description': 'Self contained Conductor server jar' - attributes 'Main-Class' : 'com.netflix.conductor.grpc.server.Main' - } -} - -task server(type: JavaExec) { - systemProperty 'workflow.elasticsearch.url', 'localhost:9300' - systemProperties System.properties - main = 'com.netflix.conductor.grpc.server.Main' - classpath = sourceSets.test.runtimeClasspath -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java index a6136ec00b..acd53a1d9d 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java @@ -1,59 +1,27 @@ package com.netflix.conductor.grpc.server; import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.core.config.CoreModule; -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; -import com.netflix.conductor.dao.es5.index.ElasticSearchModuleV5; -import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; import com.netflix.conductor.grpc.EventServiceGrpc; import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; - -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; - -import javax.inject.Singleton; +import com.netflix.conductor.grpc.server.service.EventServiceImpl; +import com.netflix.conductor.grpc.server.service.MetadataServiceImpl; +import com.netflix.conductor.grpc.server.service.TaskServiceImpl; +import com.netflix.conductor.grpc.server.service.WorkflowServiceImpl; public class GRPCModule extends AbstractModule { - // FIXME Eventually this should be shared with the Jersey code and provided by the server module. - private final int maxThreads = 50; - private ExecutorService es; - - @Override protected void configure() { - configureExecutorService(); - install(new CoreModule()); - install(new ElasticSearchModuleV5()); - install(new MySQLWorkflowModule()); - - bind(Configuration.class).to(SystemPropertiesConfiguration.class).in(Singleton.class); - - bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); + bind(EventServiceGrpc.EventServiceImplBase.class).to(EventServiceImpl.class); bind(MetadataServiceGrpc.MetadataServiceImplBase.class).to(MetadataServiceImpl.class); + bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); bind(WorkflowServiceGrpc.WorkflowServiceImplBase.class).to(WorkflowServiceImpl.class); - bind(EventServiceGrpc.EventServiceImplBase.class).to(EventServiceImpl.class); - - bind(GRPCServer.class).toProvider(GRPCServerProvider.class).asEagerSingleton(); - } - - @Provides - public ExecutorService getExecutorService(){ - return this.es; - } - private void configureExecutorService(){ - AtomicInteger count = new AtomicInteger(0); - this.es = java.util.concurrent.Executors.newFixedThreadPool(maxThreads, runnable -> { - Thread conductorWorkerThread = new Thread(runnable); - conductorWorkerThread.setName("conductor-worker-" + count.getAndIncrement()); - return conductorWorkerThread; - }); + bind(GRPCServerConfiguration.class).to(GRPCServerSystemConfiguration.class); + bind(GRPCServerProvider.class); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index b92e9d3cbc..c3b9c6f92c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -1,8 +1,6 @@ package com.netflix.conductor.grpc.server; -import com.google.inject.Inject; - -import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.service.Lifecycle; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -17,28 +15,26 @@ import io.grpc.ServerBuilder; @Singleton -public class GRPCServer { +public class GRPCServer implements Lifecycle { + private static final Logger logger = LoggerFactory.getLogger(GRPCServer.class); private final Server server; - public final static String CONFIG_PORT = "grpc.port"; - public final static int CONFIG_PORT_DEFAULT = 8090; - - @Inject - public GRPCServer(Configuration conf, BindableService... services) { - final int port = conf.getIntProperty(CONFIG_PORT, CONFIG_PORT_DEFAULT); - + public GRPCServer(int port, BindableService... services) { ServerBuilder builder = ServerBuilder.forPort(port); Arrays.stream(services).forEach(builder::addService); server = builder.build(); } + @Override public void start() throws IOException { + registerShutdownHook(); server.start(); logger.info("grpc: Server started, listening on " + server.getPort()); } + @Override public void stop() { if (server != null) { logger.info("grpc: server shutting down"); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java new file mode 100644 index 0000000000..6122d8be1b --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java @@ -0,0 +1,19 @@ +package com.netflix.conductor.grpc.server; + +import com.netflix.conductor.core.config.Configuration; + +public interface GRPCServerConfiguration extends Configuration { + String ENABLED_PROPERTY_NAME = "conductor.grpc.server.enabled"; + boolean ENABLED_DEFAULT_VALUE = false; + + String PORT_PROPERTY_NAME = "conductor.grpc.server.port"; + int PORT_DEFAULT_VALUE = 8090; + + default boolean isEnabled(){ + return getBooleanProperty(ENABLED_PROPERTY_NAME, ENABLED_DEFAULT_VALUE); + } + + default int getPort(){ + return getIntProperty(PORT_PROPERTY_NAME, PORT_DEFAULT_VALUE); + } +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java index 6ec307204d..83a80a0cee 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java @@ -1,20 +1,51 @@ package com.netflix.conductor.grpc.server; -import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.grpc.EventServiceGrpc; +import com.netflix.conductor.grpc.MetadataServiceGrpc; +import com.netflix.conductor.grpc.TaskServiceGrpc; +import com.netflix.conductor.grpc.WorkflowServiceGrpc; + +import java.util.Optional; import javax.inject.Inject; import javax.inject.Provider; -public class GRPCServerProvider implements Provider { - private final Configuration configuration; +import io.grpc.BindableService; + +public class GRPCServerProvider implements Provider> { + + private final GRPCServerConfiguration configuration; + private final BindableService eventServiceImpl; + private final BindableService metadataSercieImpl; + private final BindableService taskServiceImpl; + private final BindableService workflowServiceImpl; @Inject - public GRPCServerProvider(Configuration conf) { + public GRPCServerProvider( + GRPCServerConfiguration conf, + EventServiceGrpc.EventServiceImplBase eventServiceImpl, + MetadataServiceGrpc.MetadataServiceImplBase metadataServiceImpl, + TaskServiceGrpc.TaskServiceImplBase taskServiceImpl, + WorkflowServiceGrpc.WorkflowServiceImplBase workflowServiceImpl + ) { this.configuration = conf; + this.eventServiceImpl = eventServiceImpl; + this.metadataSercieImpl = metadataServiceImpl; + this.taskServiceImpl = taskServiceImpl; + this.workflowServiceImpl = workflowServiceImpl; } @Override - public GRPCServer get() { - return new GRPCServer(configuration); + public Optional get() { + return configuration.isEnabled() ? + Optional.of( + new GRPCServer( + configuration.getPort(), + eventServiceImpl, + metadataSercieImpl, + taskServiceImpl, + workflowServiceImpl + )) + : Optional.empty(); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerSystemConfiguration.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerSystemConfiguration.java new file mode 100644 index 0000000000..a9ff4900ad --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerSystemConfiguration.java @@ -0,0 +1,6 @@ +package com.netflix.conductor.grpc.server; + +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + +public class GRPCServerSystemConfiguration extends SystemPropertiesConfiguration implements GRPCServerConfiguration { +} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java deleted file mode 100644 index bfc1bcb74f..0000000000 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/Main.java +++ /dev/null @@ -1,53 +0,0 @@ -package com.netflix.conductor.grpc.server; - -import com.google.inject.Guice; -import com.google.inject.Injector; - -import org.apache.log4j.PropertyConfigurator; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.Properties; - -public class Main { - - public static void main(String args[]) throws Exception { - - //FIXME This was copy pasted and seems like a bad way to load a config, given that is has side affects. - loadConfigFile(args.length > 0 ? args[0] : System.getenv("CONDUCTOR_CONFIG_FILE")); - - if (args.length == 2) { - System.out.println("Using log4j config " + args[1]); - PropertyConfigurator.configure(new FileInputStream(new File(args[1]))); - } - - Injector injector = Guice.createInjector(new GRPCModule()); - GRPCServer server = injector.getInstance(GRPCServer.class); - - Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { - @Override - public void run() { - server.stop(); - } - })); - - System.out.println("\n\n\n"); - System.out.println(" _ _ "); - System.out.println(" ___ ___ _ __ __| |_ _ ___| |_ ___ _ __ "); - System.out.println(" / __/ _ \\| '_ \\ / _` | | | |/ __| __/ _ \\| '__|"); - System.out.println("| (_| (_) | | | | (_| | |_| | (__| || (_) | | "); - System.out.println(" \\___\\___/|_| |_|\\__,_|\\__,_|\\___|\\__\\___/|_| "); - System.out.println("\n\n\n"); - - server.start(); - } - - private static void loadConfigFile(String propertyFile) throws IOException { - if (propertyFile == null) return; - System.out.println("Using config file" + propertyFile); - Properties props = new Properties(System.getProperties()); - props.load(new FileInputStream(propertyFile)); - System.setProperties(props); - } -} diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java similarity index 98% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java rename to grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java index f5ff20f9e3..8dd93564bf 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java @@ -1,6 +1,7 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc.server.service; import com.google.protobuf.Empty; + import com.netflix.conductor.core.events.EventProcessor; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.grpc.EventServiceGrpc; @@ -8,11 +9,13 @@ import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.EventHandlerPb; import com.netflix.conductor.service.MetadataService; -import io.grpc.stub.StreamObserver; -import javax.inject.Inject; import java.util.Map; +import javax.inject.Inject; + +import io.grpc.stub.StreamObserver; + public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java similarity index 98% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java rename to grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java index a2b19c9cb6..0aa5eae490 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCHelper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc.server.service; import com.google.rpc.DebugInfo; import io.grpc.Metadata; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java similarity index 98% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java rename to grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java index c123831b1c..39a79a1e37 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc.server.service; import com.google.protobuf.Empty; import com.netflix.conductor.common.metadata.tasks.TaskDef; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java similarity index 99% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java rename to grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java index b74148b7ee..3ae78d3d0c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc.server.service; import java.util.Comparator; import java.util.HashMap; diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java similarity index 99% rename from grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java rename to grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index 078f0cc6f4..fe09d9d635 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.grpc.server; +package com.netflix.conductor.grpc.server.service; import com.google.protobuf.Empty; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; diff --git a/server/build.gradle b/server/build.gradle index 64ecf24f2f..018f9eb151 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -27,6 +27,7 @@ dependencies { compile project(':conductor-contribs') compile project(':conductor-es2-persistence') compile project(':conductor-es5-persistence') + compile project(':conductor-grpc-server') //Jetty compile "org.eclipse.jetty:jetty-server:${revJetteyServer}" diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java b/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java new file mode 100644 index 0000000000..0a1f9066b4 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java @@ -0,0 +1,14 @@ +package com.netflix.conductor.bootstrap; + +import com.google.inject.AbstractModule; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + +public class BootstrapModule extends AbstractModule { + @Override + protected void configure() { + bind(Configuration.class).to(SystemPropertiesConfiguration.class); + bind(ModulesProvider.class); + } +} diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java new file mode 100644 index 0000000000..a517b38385 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java @@ -0,0 +1,82 @@ +/** + * Copyright 2017 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +/** + * + */ +package com.netflix.conductor.bootstrap; + +import com.google.inject.Guice; +import com.google.inject.Injector; + +import com.netflix.conductor.grpc.server.GRPCServerProvider; +import com.netflix.conductor.jetty.server.JettyServerProvider; + +import org.apache.log4j.PropertyConfigurator; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.util.Properties; + +/** + * @author Viren Entry point for the server + */ +public class Main { + + public static void main(String[] args) throws Exception { + + loadConfigFile(args.length > 0 ? args[0] : System.getenv("CONDUCTOR_CONFIG_FILE")); + + if (args.length == 2) { + System.out.println("Using log4j config " + args[1]); + PropertyConfigurator.configure(new FileInputStream(new File(args[1]))); + } + + Injector bootstrapInjector = Guice.createInjector(new BootstrapModule()); + ModulesProvider modulesProvider = bootstrapInjector.getInstance(ModulesProvider.class); + Injector serverInjector = Guice.createInjector(modulesProvider.get()); + + System.out.println("\n\n\n"); + System.out.println(" _ _ "); + System.out.println(" ___ ___ _ __ __| |_ _ ___| |_ ___ _ __ "); + System.out.println(" / __/ _ \\| '_ \\ / _` | | | |/ __| __/ _ \\| '__|"); + System.out.println("| (_| (_) | | | | (_| | |_| | (__| || (_) | | "); + System.out.println(" \\___\\___/|_| |_|\\__,_|\\__,_|\\___|\\__\\___/|_| "); + System.out.println("\n\n\n"); + + serverInjector.getInstance(GRPCServerProvider.class).get().ifPresent(server -> { + try { + server.start(); + } catch (IOException ioe) { + System.exit(3); + } + }); + + serverInjector.getInstance(JettyServerProvider.class).get().ifPresent(server -> { + try { + server.start(); + } catch (Exception ioe) { + System.exit(3); + } + }); + + } + + private static void loadConfigFile(String propertyFile) throws IOException { + if (propertyFile == null) return; + System.out.println("Using config file" + propertyFile); + Properties props = new Properties(System.getProperties()); + props.load(new FileInputStream(propertyFile)); + System.setProperties(props); + } +} diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java new file mode 100644 index 0000000000..0f60f3e853 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java @@ -0,0 +1,127 @@ +package com.netflix.conductor.bootstrap; + +import com.google.inject.AbstractModule; +import com.google.inject.ProvisionException; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.dao.RedisWorkflowModule; +import com.netflix.conductor.dao.es.EmbeddedElasticSearch; +import com.netflix.conductor.dao.es.index.ElasticSearchModule; +import com.netflix.conductor.dao.es5.EmbeddedElasticSearchV5; +import com.netflix.conductor.dao.es5.index.ElasticSearchModuleV5; +import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; +import com.netflix.conductor.server.DynomiteClusterModule; +import com.netflix.conductor.server.JerseyModule; +import com.netflix.conductor.server.LocalRedisModule; +import com.netflix.conductor.server.RedisClusterModule; +import com.netflix.conductor.server.ServerModule; +import com.netflix.conductor.server.SwaggerModule; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import javax.inject.Inject; +import javax.inject.Provider; + +// TODO Investigate whether this should really be a ThrowingProvider. +public class ModulesProvider implements Provider> { + private static final Logger logger = LoggerFactory.getLogger(ModulesProvider.class); + + private final Configuration configuration; + + @Inject + public ModulesProvider(Configuration configuration){ + this.configuration = configuration; + } + + @Override + public List get() { + List modulesToLoad = new ArrayList<>(); + + modulesToLoad.addAll(selectModulesToLoad()); + modulesToLoad.addAll(configuration.getAdditionalModules()); + + return modulesToLoad; + } + + private List selectModulesToLoad() { + Configuration.DB database = null; + List modules = new ArrayList<>(); + + try { + database = configuration.getDB(); + } catch (IllegalArgumentException ie) { + final String message = "Invalid db name: " + configuration.getDBString() + + ", supported values are: " + Arrays.toString(Configuration.DB.values()); + logger.error(message); + throw new ProvisionException(message,ie); + } + + switch (database) { + case REDIS: + case DYNOMITE: + modules.add(new DynomiteClusterModule()); + modules.add(new RedisWorkflowModule()); + logger.info("Starting conductor server using dynomite/redis cluster."); + break; + + case MYSQL: + modules.add(new MySQLWorkflowModule()); + modules.add(new MySQLWorkflowModule()); + logger.info("Starting conductor server using MySQL data store", database); + break; + case MEMORY: + // TODO This ES logic should probably live elsewhere. + try { + if ( + configuration.getIntProperty( + "workflow.elasticsearch.version", + 2 + ) == 5) { + EmbeddedElasticSearchV5.start(); + } else { + // Use ES2 as default. + EmbeddedElasticSearch.start(); + } + if (System.getProperty("workflow.elasticsearch.url") == null) { + System.setProperty("workflow.elasticsearch.url", "localhost:9300"); + } + if (System.getProperty("workflow.elasticsearch.index.name") == null) { + System.setProperty("workflow.elasticsearch.index.name", "conductor"); + } + } catch (Exception e) { + logger.error("Error starting embedded elasticsearch. Search functionality will be impacted: " + e.getMessage(), e); + } + + modules.add(new LocalRedisModule()); + modules.add(new RedisWorkflowModule()); + logger.info("Starting conductor server using in memory data store"); + break; + + case REDIS_CLUSTER: + modules.add(new RedisClusterModule()); + modules.add(new RedisWorkflowModule()); + logger.info("Starting conductor server using redis_cluster."); + break; + } + + if (configuration.getIntProperty("workflow.elasticsearch.version", 2) == 5) { + modules.add(new ElasticSearchModuleV5()); + } else { + modules.add(new ElasticSearchModule()); + } + + if (configuration.getJerseyEnabled()) { + modules.add(new JerseyModule()); + modules.add(new SwaggerModule()); + } + + modules.add(new ServerModule()); + + return modules; + } +} diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyModule.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyModule.java new file mode 100644 index 0000000000..e20a625e99 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyModule.java @@ -0,0 +1,11 @@ +package com.netflix.conductor.jetty.server; + +import com.google.inject.AbstractModule; + +public class JettyModule extends AbstractModule { + @Override + protected void configure() { + bind(JettyServerConfiguration.class).to(JettyServerSystemConfiguration.class); + bind(JettyServerProvider.class); + } +} diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java new file mode 100644 index 0000000000..74b56073b1 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java @@ -0,0 +1,123 @@ +/** + * Copyright 2017 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +/** + * + */ +package com.netflix.conductor.jetty.server; + +import com.google.inject.servlet.GuiceFilter; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.netflix.conductor.bootstrap.Main; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.service.Lifecycle; +import com.sun.jersey.api.client.Client; + +import org.eclipse.jetty.server.Server; +import org.eclipse.jetty.servlet.ServletContextHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InputStream; +import java.util.EnumSet; +import java.util.LinkedList; +import java.util.List; + +import javax.servlet.DispatcherType; +import javax.ws.rs.core.MediaType; + +/** + * @author Viren + */ +public class JettyServer implements Lifecycle { + + private static Logger logger = LoggerFactory.getLogger(JettyServer.class); + + private final int port; + private final boolean join; + + private Server server; + + + public JettyServer(int port, boolean join) { + this.port = port; + this.join = join; + } + + + @Override + public synchronized void start() throws Exception { + + if (server != null) { + throw new IllegalStateException("Server is already running"); + } + + this.server = new Server(port); + + ServletContextHandler context = new ServletContextHandler(); + context.addFilter(GuiceFilter.class, "/*", EnumSet.allOf(DispatcherType.class)); + context.setWelcomeFiles(new String[]{"index.html"}); + + server.setHandler(context); + + server.start(); + System.out.println("Started server on http://localhost:" + port + "/"); + try { + boolean create = Boolean.getBoolean("loadSample"); + if (create) { + System.out.println("Creating kitchensink workflow"); + createKitchenSink(port); + } + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + if (join) { + server.join(); + } + + } + + public synchronized void stop() throws Exception { + if (server == null) { + throw new IllegalStateException("Server is not running. call #start() method to start the server"); + } + server.stop(); + server = null; + } + + + private static void createKitchenSink(int port) throws Exception { + + List taskDefs = new LinkedList<>(); + for (int i = 0; i < 40; i++) { + taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); + } + taskDefs.add(new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0)); + + Client client = Client.create(); + ObjectMapper om = new ObjectMapper(); + client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(om.writeValueAsString(taskDefs)); + + InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); + client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); + + stream = Main.class.getResourceAsStream("/sub_flow_1.json"); + client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); + + String input = "{\"task2Name\":\"task_5\"}"; + client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(input); + + logger.info("Kitchen sink workflows are created!"); + } +} diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java new file mode 100644 index 0000000000..23f52e3fca --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java @@ -0,0 +1,26 @@ +package com.netflix.conductor.jetty.server; + +import com.netflix.conductor.core.config.Configuration; + +public interface JettyServerConfiguration extends Configuration { + String ENABLED_PROPERTY_NAME = "conductor.jetty.server.enabled"; + boolean ENABLED_DEFAULT_VALUE = false; + + String PORT_PROPERTY_NAME = "conductor.jetty.server.port"; + int PORT_DEFAULT_VALUE = 8080; + + String JOIN_PROPERTY_NAME = "conductor.jetty.server.join"; + boolean JOIN_DEFAULT_VALUE = true; + + default boolean isEnabled(){ + return getBooleanProperty(ENABLED_PROPERTY_NAME, ENABLED_DEFAULT_VALUE); + } + + default int getPort() { + return getIntProperty(PORT_PROPERTY_NAME, PORT_DEFAULT_VALUE); + } + + default boolean isJoin(){ + return getBooleanProperty(JOIN_PROPERTY_NAME, JOIN_DEFAULT_VALUE); + } +} diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerProvider.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerProvider.java new file mode 100644 index 0000000000..ed1ccc75e8 --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerProvider.java @@ -0,0 +1,26 @@ +package com.netflix.conductor.jetty.server; + +import java.util.Optional; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class JettyServerProvider implements Provider> { + private final JettyServerConfiguration configuration; + + @Inject + public JettyServerProvider(JettyServerConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public Optional get() { + return configuration.isEnabled() ? + Optional.of( + new JettyServer( + configuration.getPort(), + configuration.isJoin() + )) + : Optional.empty(); + } +} diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerSystemConfiguration.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerSystemConfiguration.java new file mode 100644 index 0000000000..869c850c3c --- /dev/null +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerSystemConfiguration.java @@ -0,0 +1,6 @@ +package com.netflix.conductor.jetty.server; + +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + +public class JettyServerSystemConfiguration extends SystemPropertiesConfiguration implements JettyServerConfiguration { +} diff --git a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java b/server/src/main/java/com/netflix/conductor/server/ConductorServer.java deleted file mode 100644 index ab1f6f81e6..0000000000 --- a/server/src/main/java/com/netflix/conductor/server/ConductorServer.java +++ /dev/null @@ -1,218 +0,0 @@ -/** - * Copyright 2017 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.server; - -import com.google.inject.AbstractModule; -import com.google.inject.Guice; -import com.google.inject.servlet.GuiceFilter; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; -import com.netflix.conductor.dao.RedisWorkflowModule; -import com.netflix.conductor.dao.es.EmbeddedElasticSearch; -import com.netflix.conductor.dao.es.index.ElasticSearchModule; -import com.netflix.conductor.dao.es5.EmbeddedElasticSearchV5; -import com.netflix.conductor.dao.es5.index.ElasticSearchModuleV5; -import com.netflix.conductor.dao.mysql.MySQLWorkflowModule; -import com.sun.jersey.api.client.Client; - -import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.InputStream; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.EnumSet; -import java.util.LinkedList; -import java.util.List; - -import javax.servlet.DispatcherType; -import javax.ws.rs.core.MediaType; - -/** - * @author Viren - */ -public class ConductorServer { - - private static Logger logger = LoggerFactory.getLogger(ConductorServer.class); - - private Server server; - - private SystemPropertiesConfiguration systemPropertiesConfiguration; - - public ConductorServer(SystemPropertiesConfiguration systemPropertiesConfiguration) { - this.systemPropertiesConfiguration = systemPropertiesConfiguration; - } - - private List selectModulesToLoad() { - Configuration.DB database = null; - List modules = new ArrayList<>(); - - try { - database = systemPropertiesConfiguration.getDB(); - } catch (IllegalArgumentException ie) { - logger.error("Invalid db name: " + systemPropertiesConfiguration.getDBString() - + ", supported values are: " + Arrays.toString(Configuration.DB.values())); - System.exit(1); - } - - switch (database) { - case REDIS: - case DYNOMITE: - modules.add(new DynomiteClusterModule()); - modules.add(new RedisWorkflowModule()); - logger.info("Starting conductor server using dynomite/redis cluster."); - break; - - case MYSQL: - modules.add(new MySQLWorkflowModule()); - modules.add(new MySQLWorkflowModule()); - logger.info("Starting conductor server using MySQL data store", database); - break; - case MEMORY: - // TODO This ES logic should probably live elsewhere. - try { - if ( - systemPropertiesConfiguration.getIntProperty( - "workflow.elasticsearch.version", - 2 - ) == 5) { - EmbeddedElasticSearchV5.start(); - } else { - // Use ES2 as default. - EmbeddedElasticSearch.start(); - } - if (System.getProperty("workflow.elasticsearch.url") == null) { - System.setProperty("workflow.elasticsearch.url", "localhost:9300"); - } - if (System.getProperty("workflow.elasticsearch.index.name") == null) { - System.setProperty("workflow.elasticsearch.index.name", "conductor"); - } - } catch (Exception e) { - logger.error("Error starting embedded elasticsearch. Search functionality will be impacted: " + e.getMessage(), e); - } - - modules.add(new LocalRedisModule()); - modules.add(new RedisWorkflowModule()); - logger.info("Starting conductor server using in memory data store"); - break; - - case REDIS_CLUSTER: - modules.add(new RedisClusterModule()); - modules.add(new RedisWorkflowModule()); - logger.info("Starting conductor server using redis_cluster."); - break; - } - - if (systemPropertiesConfiguration.getIntProperty("workflow.elasticsearch.version", 2) == 5) { - modules.add(new ElasticSearchModuleV5()); - } else { - modules.add(new ElasticSearchModule()); - } - - if (systemPropertiesConfiguration.getJerseyEnabled()) { - modules.add(new JerseyModule()); - modules.add(new SwaggerModule()); - } - - modules.add(new ServerModule()); - - return modules; - } - - public synchronized void start(int port, boolean join) throws Exception { - - if (server != null) { - throw new IllegalStateException("Server is already running"); - } - - Guice.createInjector(getModulesToLoad()); - - this.server = new Server(port); - - ServletContextHandler context = new ServletContextHandler(); - context.addFilter(GuiceFilter.class, "/*", EnumSet.allOf(DispatcherType.class)); - context.setWelcomeFiles(new String[]{"index.html"}); - - server.setHandler(context); - - server.start(); - System.out.println("Started server on http://localhost:" + port + "/"); - try { - boolean create = Boolean.getBoolean("loadSample"); - if (create) { - System.out.println("Creating kitchensink workflow"); - createKitchenSink(port); - } - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - if (join) { - server.join(); - } - - } - - public synchronized void stop() throws Exception { - if (server == null) { - throw new IllegalStateException("Server is not running. call #start() method to start the server"); - } - server.stop(); - server = null; - } - - private List getAdditionalModules() { - return systemPropertiesConfiguration.getAdditionalModules(); - } - - public List getModulesToLoad() { - List modulesToLoad = new ArrayList<>(); - - modulesToLoad.addAll(selectModulesToLoad()); - modulesToLoad.addAll(getAdditionalModules()); - - return modulesToLoad; - } - - private static void createKitchenSink(int port) throws Exception { - - List taskDefs = new LinkedList<>(); - for (int i = 0; i < 40; i++) { - taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); - } - taskDefs.add(new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0)); - - Client client = Client.create(); - ObjectMapper om = new ObjectMapper(); - client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(om.writeValueAsString(taskDefs)); - - InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); - client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); - - stream = Main.class.getResourceAsStream("/sub_flow_1.json"); - client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); - - String input = "{\"task2Name\":\"task_5\"}"; - client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(input); - - logger.info("Kitchen sink workflows are created!"); - } -} diff --git a/server/src/main/java/com/netflix/conductor/server/Main.java b/server/src/main/java/com/netflix/conductor/server/Main.java deleted file mode 100644 index c4b515c5c5..0000000000 --- a/server/src/main/java/com/netflix/conductor/server/Main.java +++ /dev/null @@ -1,67 +0,0 @@ -/** - * Copyright 2017 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.server; - -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; - -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.util.Properties; - -import org.apache.log4j.PropertyConfigurator; - -/** - * @author Viren - * Entry point for the server - */ -public class Main { - - public static void main(String[] args) throws Exception { - - loadConfigFile(args.length > 0 ? args[0] : System.getenv("CONDUCTOR_CONFIG_FILE")); - - if(args.length == 2) { - System.out.println("Using log4j config " + args[1]); - PropertyConfigurator.configure(new FileInputStream(new File(args[1]))); - } - - SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); - ConductorServer server = new ConductorServer(config); - - System.out.println("\n\n\n"); - System.out.println(" _ _ "); - System.out.println(" ___ ___ _ __ __| |_ _ ___| |_ ___ _ __ "); - System.out.println(" / __/ _ \\| '_ \\ / _` | | | |/ __| __/ _ \\| '__|"); - System.out.println("| (_| (_) | | | | (_| | |_| | (__| || (_) | | "); - System.out.println(" \\___\\___/|_| |_|\\__,_|\\__,_|\\___|\\__\\___/|_| "); - System.out.println("\n\n\n"); - - server.start(config.getIntProperty("port", 8080), true); - - } - - private static void loadConfigFile(String propertyFile) throws IOException { - if (propertyFile == null) return; - System.out.println("Using config file" + propertyFile); - Properties props = new Properties(System.getProperties()); - props.load(new FileInputStream(propertyFile)); - System.setProperties(props); - } -} diff --git a/server/src/main/java/com/netflix/conductor/server/ServerModule.java b/server/src/main/java/com/netflix/conductor/server/ServerModule.java index 307d4ec8f2..f51ce72985 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServerModule.java +++ b/server/src/main/java/com/netflix/conductor/server/ServerModule.java @@ -21,6 +21,8 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; +import com.netflix.conductor.grpc.server.GRPCModule; +import com.netflix.conductor.jetty.server.JettyModule; import java.util.concurrent.ExecutorService; @@ -33,6 +35,8 @@ public class ServerModule extends AbstractModule { @Override protected void configure() { install(new CoreModule()); + install(new JettyModule()); + install(new GRPCModule()); bind(Configuration.class).to(SystemPropertiesDynomiteConfiguration.class); bind(ExecutorService.class).toProvider(ExecutorServiceProvider.class).in(Scopes.SINGLETON); diff --git a/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java b/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java index 2b9d4f1f78..8ba68821e8 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java +++ b/server/src/main/java/com/netflix/conductor/server/ServletContextListner.java @@ -19,6 +19,7 @@ import com.google.inject.Injector; import com.google.inject.servlet.GuiceServletContextListener; +import com.netflix.conductor.bootstrap.ModulesProvider; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import org.apache.log4j.PropertyConfigurator; @@ -39,9 +40,8 @@ protected Injector getInjector() { loadProperties(); SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); - ConductorServer server = new ConductorServer(config); - return Guice.createInjector(server.getModulesToLoad()); + return Guice.createInjector(new ModulesProvider(config).get()); } private void loadProperties() { diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index b5a83c5fd2..8a798c32e3 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -18,17 +18,11 @@ */ package com.netflix.conductor.tests.integration; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; - -import org.junit.BeforeClass; -import org.junit.Test; +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.netflix.conductor.bootstrap.BootstrapModule; +import com.netflix.conductor.bootstrap.ModulesProvider; import com.netflix.conductor.client.http.TaskClient; import com.netflix.conductor.client.http.WorkflowClient; import com.netflix.conductor.common.metadata.tasks.Task; @@ -43,8 +37,18 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; -import com.netflix.conductor.server.ConductorServer; +import com.netflix.conductor.jetty.server.JettyServer; + +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; /** * @author Viren @@ -69,9 +73,10 @@ public class End2EndTests { @BeforeClass public static void setup() throws Exception { - - ConductorServer server = new ConductorServer(new SystemPropertiesConfiguration()); - server.start(8080, false); + Injector bootInjector = Guice.createInjector(new BootstrapModule()); + Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); + JettyServer server = new JettyServer(8080, false); + server.start(); tc = new TaskClient(); tc.setRootURI("http://localhost:8080/api/"); From 8528b44be0bbc09c7397dad76f64148aaee7c3ac Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 18 Jun 2018 10:56:33 +0200 Subject: [PATCH 049/163] Fix Main class and docker configurations. This gets the uber jar and docker working again, including resolving a mysql initialization bug. --- docker/grpc/Dockerfile.grpc | 22 ----------- docker/grpc/Makefile | 2 + docker/grpc/bin/startup.sh | 21 ---------- docker/grpc/config/config-local.properties | 39 ------------------- docker/grpc/docker-compose.apps.yaml | 24 +++--------- docker/grpc/docker-compose.dependencies.yaml | 3 ++ docker/server/Dockerfile | 1 + docker/server/config/config-local.properties | 4 ++ .../config/config-mysql-grpc.properties} | 21 +++------- docker/server/config/config-mysql.properties | 4 ++ docker/server/config/config.properties | 4 ++ server/build.gradle | 2 +- .../conductor/bootstrap/ModulesProvider.java | 1 - .../server/JettyServerConfiguration.java | 2 +- 14 files changed, 30 insertions(+), 120 deletions(-) delete mode 100644 docker/grpc/Dockerfile.grpc delete mode 100755 docker/grpc/bin/startup.sh delete mode 100755 docker/grpc/config/config-local.properties rename docker/{grpc/config/config.properties => server/config/config-mysql-grpc.properties} (57%) diff --git a/docker/grpc/Dockerfile.grpc b/docker/grpc/Dockerfile.grpc deleted file mode 100644 index 65427327b0..0000000000 --- a/docker/grpc/Dockerfile.grpc +++ /dev/null @@ -1,22 +0,0 @@ -# -# conductor:server - Netflix conductor server -# -FROM openjdk:8-jre-slim - -MAINTAINER Netflix OSS - -# Make app folders -RUN mkdir -p /app/config /app/logs /app/libs - -# Copy the project directly onto the image -COPY ./docker/grpc/bin /app -COPY ./docker/grpc/config /app/config -COPY ./grpc-server/build/libs/conductor-grpc-server-*-all.jar /app/libs - -# Copy the files for the server into the app folders -RUN chmod +x /app/startup.sh - -EXPOSE 8090 - -CMD [ "/app/startup.sh" ] -ENTRYPOINT [ "/bin/sh"] diff --git a/docker/grpc/Makefile b/docker/grpc/Makefile index 6da40b1a19..142b96b4a9 100644 --- a/docker/grpc/Makefile +++ b/docker/grpc/Makefile @@ -12,6 +12,8 @@ dependencies-down: docker-compose -f docker-compose.dependencies.yaml down stack-up: + docker-compose -f docker-compose.dependencies.yaml up -d + sleep 30 docker-compose -f docker-compose.dependencies.yaml -f docker-compose.apps.yaml up stack-down: diff --git a/docker/grpc/bin/startup.sh b/docker/grpc/bin/startup.sh deleted file mode 100755 index 5ac1723c0a..0000000000 --- a/docker/grpc/bin/startup.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -# startup.sh - startup script for the server docker image - -echo "Starting Conductor server" - -# Start the server -cd /app/libs -echo "Property file: $CONFIG_PROP" -echo $CONFIG_PROP -export config_file= - -if [ -z "$CONFIG_PROP" ]; - then - echo "Using an in-memory instance of conductor"; - export config_file=/app/config/config-local.properties - else - echo "Using '$CONFIG_PROP'"; - export config_file=/app/config/$CONFIG_PROP -fi - -java -jar conductor-grpc-server-*-all.jar $config_file diff --git a/docker/grpc/config/config-local.properties b/docker/grpc/config/config-local.properties deleted file mode 100755 index b9cced6448..0000000000 --- a/docker/grpc/config/config-local.properties +++ /dev/null @@ -1,39 +0,0 @@ -# Database persistence model. Possible values are memory, redis, and dynomite. -# If ommitted, the persistence used is memory -# -# memory : The data is stored in memory and lost when the server dies. Useful for testing or demo -# redis : non-Dynomite based redis instance -# dynomite : Dynomite cluster. Use this for HA configuration. - -db=memory - -# Dynomite Cluster details. -# format is host:port:rack separated by semicolon -workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c - -# Namespace for the keys stored in Dynomite/Redis -workflow.namespace.prefix=conductor - -# Namespace prefix for the dyno queues -workflow.namespace.queue.prefix=conductor_queues - -# No. of threads allocated to dyno-queues (optional) -queues.dynomite.threads=10 - -# Non-quorum port used to connect to local redis. Used by dyno-queues. -# When using redis directly, set this to the same port as redis server -# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. -queues.dynomite.nonQuorum.port=22122 - - -# Transport address to elasticsearch -workflow.elasticsearch.url=localhost:9300 - -# Name of the elasticsearch cluster -workflow.elasticsearch.index.name=conductor - -# Additional modules (optional) -# conductor.additional.modules=class_extending_com.google.inject.AbstractModule - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/grpc/docker-compose.apps.yaml b/docker/grpc/docker-compose.apps.yaml index 710395f895..d9cc32622e 100644 --- a/docker/grpc/docker-compose.apps.yaml +++ b/docker/grpc/docker-compose.apps.yaml @@ -2,30 +2,16 @@ version: '3.4' services: - conductor-server-grpc: + conductor-server: environment: - - CONFIG_PROP=config.properties - image: conductor:grpc-server - build: - context: ../../ - dockerfile: docker/grpc/Dockerfile.grpc - ports: - - 8090:8090 - networks: - - internal - depends_on: - - mysql - - elasticsearch - - conductor-server-rest: + - CONFIG_PROP=config-mysql-grpc.properties image: conductor:server build: context: ../../ dockerfile: docker/server/Dockerfile - environment: - - CONFIG_PROP=config-mysql.properties ports: - 8080:8080 + - 8090:8090 networks: - internal depends_on: @@ -34,7 +20,7 @@ services: conductor-ui: environment: - - WF_SERVER=http://conductor-server-rest:8080/api/ + - WF_SERVER=http://conductor-server:8080/api/ image: conductor:ui build: context: ../../ @@ -44,4 +30,4 @@ services: networks: - internal depends_on: - - conductor-server-rest + - conductor-server diff --git a/docker/grpc/docker-compose.dependencies.yaml b/docker/grpc/docker-compose.dependencies.yaml index 60f4b95e60..214ef7352e 100644 --- a/docker/grpc/docker-compose.dependencies.yaml +++ b/docker/grpc/docker-compose.dependencies.yaml @@ -22,6 +22,9 @@ services: image: elasticsearch:5.6-alpine environment: ES_JAVA_OPTS: "-Xms750m -Xmx750m" + transport.host: 0.0.0.0 + discovery.type: single-node + cluster.name: conductor networks: - internal ports: diff --git a/docker/server/Dockerfile b/docker/server/Dockerfile index ab6e8313c3..a3d3efdd5d 100644 --- a/docker/server/Dockerfile +++ b/docker/server/Dockerfile @@ -17,6 +17,7 @@ COPY ./server/build/libs/conductor-server-*-all.jar /app/libs RUN chmod +x /app/startup.sh EXPOSE 8080 +EXPOSE 8090 CMD [ "/app/startup.sh" ] ENTRYPOINT [ "/bin/sh"] diff --git a/docker/server/config/config-local.properties b/docker/server/config/config-local.properties index b9cced6448..b59ea05a26 100755 --- a/docker/server/config/config-local.properties +++ b/docker/server/config/config-local.properties @@ -1,3 +1,7 @@ +# Servers. +conductor.jetty.server.enabled=true +conductor.grpc.server.enabled=false + # Database persistence model. Possible values are memory, redis, and dynomite. # If ommitted, the persistence used is memory # diff --git a/docker/grpc/config/config.properties b/docker/server/config/config-mysql-grpc.properties similarity index 57% rename from docker/grpc/config/config.properties rename to docker/server/config/config-mysql-grpc.properties index a6e3902263..dc193c28bb 100755 --- a/docker/grpc/config/config.properties +++ b/docker/server/config/config-mysql-grpc.properties @@ -1,3 +1,7 @@ +# Servers. +conductor.jetty.server.enabled=true +conductor.grpc.server.enabled=true + # Database persistence model. Possible values are memory, redis, and dynomite. # If ommitted, the persistence used is memory # @@ -8,20 +12,8 @@ db=mysql jdbc.url=jdbc:mysql://mysql:3306/conductor - -# Namespace prefix for the dyno queues -workflow.namespace.queue.prefix=conductor_queues - -# No. of threads allocated to dyno-queues (optional) -queues.dynomite.threads=10 - -# Non-quorum port used to connect to local redis. Used by dyno-queues. -# When using redis directly, set this to the same port as redis server -# For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. -queues.dynomite.nonQuorum.port=22122 - - # Transport address to elasticsearch +workflow.elasticsearch.version=5 workflow.elasticsearch.url=elasticsearch:9300 # Name of the elasticsearch cluster @@ -29,6 +21,3 @@ workflow.elasticsearch.index.name=conductor # Additional modules (optional) # conductor.additional.modules=class_extending_com.google.inject.AbstractModule - -# Load sample kitchen sink workflow -loadSample=true diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties index 575254578f..52f2eb4b3c 100755 --- a/docker/server/config/config-mysql.properties +++ b/docker/server/config/config-mysql.properties @@ -1,3 +1,7 @@ +# Servers. +conductor.jetty.server.enabled=true +conductor.grpc.server.enabled=false + # Database persistence model. Possible values are memory, redis, and dynomite. # If ommitted, the persistence used is memory # diff --git a/docker/server/config/config.properties b/docker/server/config/config.properties index a03f97afa4..c289e65964 100755 --- a/docker/server/config/config.properties +++ b/docker/server/config/config.properties @@ -1,3 +1,7 @@ +# Servers. +conductor.jetty.server.enabled=true +conductor.grpc.server.enabled=false + # Database persistence model. Possible values are memory, redis, and dynomite. # If ommitted, the persistence used is memory # diff --git a/server/build.gradle b/server/build.gradle index 018f9eb151..6121bac609 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -47,7 +47,7 @@ shadowJar { configurations = [project.configurations.compile] manifest { attributes 'Description': 'Self contained Conductor server jar' - attributes 'Main-Class' : 'com.netflix.conductor.server.Main' + attributes 'Main-Class' : 'com.netflix.conductor.bootstrap.Main' } } publishing { diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java index 0f60f3e853..27b0602277 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java @@ -70,7 +70,6 @@ private List selectModulesToLoad() { break; case MYSQL: - modules.add(new MySQLWorkflowModule()); modules.add(new MySQLWorkflowModule()); logger.info("Starting conductor server using MySQL data store", database); break; diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java index 23f52e3fca..9b3a1e9919 100644 --- a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServerConfiguration.java @@ -4,7 +4,7 @@ public interface JettyServerConfiguration extends Configuration { String ENABLED_PROPERTY_NAME = "conductor.jetty.server.enabled"; - boolean ENABLED_DEFAULT_VALUE = false; + boolean ENABLED_DEFAULT_VALUE = true; String PORT_PROPERTY_NAME = "conductor.jetty.server.port"; int PORT_DEFAULT_VALUE = 8080; From 6f7e8d67978237ab1e656825fb12446f61f1c6cd Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 18 Jun 2018 12:38:29 +0200 Subject: [PATCH 050/163] Review feedback. --- .../dao/dynomite/queue/DynoQueueDAO.java | 1 - .../dyno/DynoShardSupplierProvider.java | 4 ++-- .../conductor/dyno/RedisQueuesProvider.java | 4 +++- .../server/DynomiteClusterModule.java | 4 +++- .../server/ExecutorServiceProvider.java | 23 +++++++++++-------- .../conductor/server/LocalRedisModule.java | 3 ++- 6 files changed, 24 insertions(+), 15 deletions(-) diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java index cb1ee72a37..9ce1a29513 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/queue/DynoQueueDAO.java @@ -204,7 +204,6 @@ public Map>> queuesDetailVerbose() { public void processUnacks(String queueName) { ((RedisDynoQueue) queues.get(queueName)).processUnacks(); - ; } @Override diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java index 9e309859b6..79a630b6c1 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/DynoShardSupplierProvider.java @@ -22,12 +22,12 @@ public DynoShardSupplierProvider(HostSupplier hostSupplier, DynomiteConfiguratio @Override public ShardSupplier get() { - if(configuration.getAvailabilityZone() == null) + if(configuration.getAvailabilityZone() == null) { throw new ProvisionException( "Availability zone is not defined. Ensure Configuration.getAvailabilityZone() returns a non-null " + "and non-empty value." ); - + } String localDC = configuration.getAvailabilityZone().replaceAll(configuration.getRegion(), ""); diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java index 4b037a19f4..c9521cdf64 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dyno/RedisQueuesProvider.java @@ -14,6 +14,8 @@ public class RedisQueuesProvider implements Provider { + public static final String READ_CLIENT_INJECTION_NAME = "DynoReadClient"; + private static final Logger logger = LoggerFactory.getLogger(RedisQueuesProvider.class); private final JedisCommands dynoClient; @@ -24,7 +26,7 @@ public class RedisQueuesProvider implements Provider { @Inject public RedisQueuesProvider( JedisCommands dynoClient, - @Named("DynoReadClient") JedisCommands dynoClientRead, + @Named(READ_CLIENT_INJECTION_NAME) JedisCommands dynoClientRead, ShardSupplier ss, DynomiteConfiguration config ) { diff --git a/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java b/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java index aa4d75e3f7..56ac5c5f2d 100644 --- a/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java +++ b/server/src/main/java/com/netflix/conductor/server/DynomiteClusterModule.java @@ -5,6 +5,7 @@ import com.netflix.conductor.dyno.DynoShardSupplierProvider; import com.netflix.conductor.dyno.DynomiteConfiguration; +import com.netflix.conductor.dyno.RedisQueuesProvider; import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; import com.netflix.conductor.jedis.ConfigurationHostSupplierProvider; import com.netflix.conductor.jedis.DynomiteJedisProvider; @@ -16,13 +17,14 @@ import redis.clients.jedis.JedisCommands; public class DynomiteClusterModule extends AbstractModule { + @Override protected void configure() { bind(DynomiteConfiguration.class).to(SystemPropertiesDynomiteConfiguration.class); bind(JedisCommands.class).toProvider(DynomiteJedisProvider.class).asEagerSingleton(); bind(JedisCommands.class) - .annotatedWith(Names.named("DynoReadClient")) + .annotatedWith(Names.named(RedisQueuesProvider.READ_CLIENT_INJECTION_NAME)) .toProvider(DynomiteJedisProvider.class) .asEagerSingleton(); bind(HostSupplier.class).toProvider(ConfigurationHostSupplierProvider.class); diff --git a/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java b/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java index 3513c14342..869d7a5aad 100644 --- a/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java +++ b/server/src/main/java/com/netflix/conductor/server/ExecutorServiceProvider.java @@ -1,9 +1,11 @@ package com.netflix.conductor.server; +import com.google.common.util.concurrent.ThreadFactoryBuilder; + import com.netflix.conductor.core.config.Configuration; import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.ThreadFactory; import javax.inject.Inject; import javax.inject.Provider; @@ -15,18 +17,21 @@ public class ExecutorServiceProvider implements Provider { private final ExecutorService executorService; @Inject - public ExecutorServiceProvider(Configuration configuration){ + public ExecutorServiceProvider(Configuration configuration) { this.configuration = configuration; - - AtomicInteger count = new AtomicInteger(0); - this.executorService = java.util.concurrent.Executors.newFixedThreadPool(MAX_THREADS, runnable -> { - Thread conductorWorkerThread = new Thread(runnable); - conductorWorkerThread.setName("conductor-worker-" + count.getAndIncrement()); - return conductorWorkerThread; - }); + // TODO Use configuration to set max threads. + this.executorService = java.util.concurrent.Executors.newFixedThreadPool(MAX_THREADS, buildThreadFactory()); } + @Override public ExecutorService get() { return executorService; } + + private ThreadFactory buildThreadFactory() { + return new ThreadFactoryBuilder() + .setNameFormat("conductor-worker-%d") + .setDaemon(true) + .build(); + } } diff --git a/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java b/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java index 9adfc46150..ee4d7cf83d 100644 --- a/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java +++ b/server/src/main/java/com/netflix/conductor/server/LocalRedisModule.java @@ -5,6 +5,7 @@ import com.netflix.conductor.dyno.DynoShardSupplierProvider; import com.netflix.conductor.dyno.DynomiteConfiguration; +import com.netflix.conductor.dyno.RedisQueuesProvider; import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; import com.netflix.conductor.jedis.InMemoryJedisProvider; import com.netflix.conductor.jedis.LocalHostSupplierProvider; @@ -20,7 +21,7 @@ protected void configure() { bind(DynomiteConfiguration.class).to(SystemPropertiesDynomiteConfiguration.class); bind(JedisCommands.class).toProvider(InMemoryJedisProvider.class); bind(JedisCommands.class) - .annotatedWith(Names.named("DynoReadClient")) + .annotatedWith(Names.named(RedisQueuesProvider.READ_CLIENT_INJECTION_NAME)) .toProvider(InMemoryJedisProvider.class); bind(HostSupplier.class).toProvider(LocalHostSupplierProvider.class); bind(ShardSupplier.class).toProvider(DynoShardSupplierProvider.class); From 45dd4eaf1432656f15a8ea4e65e9be2e1ee2aef9 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 18 Jun 2018 13:36:38 +0200 Subject: [PATCH 051/163] test-harness: Link against grpc-server and client --- test-harness/build.gradle | 2 ++ .../com/netflix/conductor/tests/integration/End2EndTests.java | 4 ++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/test-harness/build.gradle b/test-harness/build.gradle index 5bcc968280..06cddb16e8 100644 --- a/test-harness/build.gradle +++ b/test-harness/build.gradle @@ -10,6 +10,8 @@ dependencies { testCompile project(':conductor-redis-persistence').sourceSets.test.output testCompile project(':conductor-client') testCompile project(':conductor-server') + testCompile project(':conductor-grpc-client') + testCompile project(':conductor-grpc-server') testCompile "org.elasticsearch:elasticsearch:${revElasticSearch5}" testCompile "org.eclipse.jetty:jetty-server:${revJetteyServer}" testCompile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 8a798c32e3..e5a93315c5 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -73,8 +73,8 @@ public class End2EndTests { @BeforeClass public static void setup() throws Exception { - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); + Injector bootInjector = Guice.createInjector(new BootstrapModule()); + Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); JettyServer server = new JettyServer(8080, false); server.start(); From 528a33377c8f336c6cae8b8b35b8803afcd3d2ec Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 18 Jun 2018 18:28:24 +0200 Subject: [PATCH 052/163] protogen: Add support for nullable fields --- .../conductor/protogen/types/MessageType.java | 6 ++-- .../conductor/protogen/types/ScalarType.java | 34 ++++++++++++++++--- 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java index d8f0257890..fab158f9fa 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java @@ -35,8 +35,10 @@ public File getProtoFile() { @Override public void mapToProto(String field, MethodSpec.Builder method) { - method.addStatement("to.$L( toProto( from.$L() ) )", - fieldMethod("set", field), fieldMethod("get", field)); + final String getter = fieldMethod("get", field); + method.beginControlFlow("if (from.$L() != null)", getter); + method.addStatement("to.$L( toProto( from.$L() ) )", fieldMethod("set", field), getter); + method.endControlFlow(); } @Override diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java index 556cb8bec4..62d68fe01b 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java @@ -3,6 +3,7 @@ import com.netflix.conductor.protogen.types.AbstractType; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; +import org.apache.commons.lang3.ClassUtils; import java.lang.reflect.Type; import java.util.Set; @@ -32,14 +33,39 @@ private void mapCode(String field, MethodSpec.Builder method, String getter) { @Override public void mapFromProto(String field, MethodSpec.Builder method) { - mapCode(field, method, "get"); + method.addStatement("to.$L( from.$L() )", + fieldMethod("set", field), fieldMethod("get", field)); + } + + private boolean isNullableType() { + final Type jt = getJavaType(); + return jt.equals(Boolean.class) || + jt.equals(Byte.class) || + jt.equals(Character.class) || + jt.equals(Short.class) || + jt.equals(Integer.class) || + jt.equals(Long.class) || + jt.equals(Double.class) || + jt.equals(Float.class) || + jt.equals(String.class); } @Override public void mapToProto(String field, MethodSpec.Builder method) { - String getter = (getJavaType().equals(boolean.class) || - getJavaType().equals(Boolean.class)) ? "is" : "get"; - mapCode(field, method, getter); + final boolean nullable = isNullableType(); + String getter = ( + getJavaType().equals(boolean.class) || + getJavaType().equals(Boolean.class)) ? + fieldMethod("is", field) : + fieldMethod("get", field); + + if (nullable) + method.beginControlFlow("if (from.$L() != null)", getter); + + method.addStatement("to.$L( from.$L() )", fieldMethod("set", field), getter); + + if (nullable) + method.endControlFlow(); } @Override From 172774cd65336c4ec849dcf26c992946e4ce0b88 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 18 Jun 2018 18:29:11 +0200 Subject: [PATCH 053/163] grpc: Handle nullable fields in AbstractProtoMapper --- .../metadata/workflow/WorkflowTask.java | 14 +- .../conductor/grpc/AbstractProtoMapper.java | 432 +++++++++++++----- 2 files changed, 327 insertions(+), 119 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 99219d2874..e3a8d34f6c 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -388,21 +388,13 @@ public void setSink(String sink) { } /** - * + * * @return If the task is optional. When set to true, the workflow execution continues even when the task is in failed status. */ - public Boolean getOptional() { + public Boolean isOptional() { return optional; } - - /** - * - * @return true if the task is optional. False otherwise. - */ - public boolean isOptional() { - return (optional != null && optional.booleanValue()); - } - + /** * * @param optional when set to true, the task is marked as optional diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 19287c8080..c1166111a9 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -53,13 +53,25 @@ public abstract class AbstractProtoMapper { public EventExecutionPb.EventExecution toProto(EventExecution from) { EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); - to.setId( from.getId() ); - to.setMessageId( from.getMessageId() ); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); + if (from.getId() != null) { + to.setId( from.getId() ); + } + if (from.getMessageId() != null) { + to.setMessageId( from.getMessageId() ); + } + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getEvent() != null) { + to.setEvent( from.getEvent() ); + } to.setCreated( from.getCreated() ); - to.setStatus( toProto( from.getStatus() ) ); - to.setAction( toProto( from.getAction() ) ); + if (from.getStatus() != null) { + to.setStatus( toProto( from.getStatus() ) ); + } + if (from.getAction() != null) { + to.setAction( toProto( from.getAction() ) ); + } for (Map.Entry pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } @@ -109,9 +121,15 @@ public EventExecution.Status fromProto(EventExecutionPb.EventExecution.Status fr public EventHandlerPb.EventHandler toProto(EventHandler from) { EventHandlerPb.EventHandler.Builder to = EventHandlerPb.EventHandler.newBuilder(); - to.setName( from.getName() ); - to.setEvent( from.getEvent() ); - to.setCondition( from.getCondition() ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getEvent() != null) { + to.setEvent( from.getEvent() ); + } + if (from.getCondition() != null) { + to.setCondition( from.getCondition() ); + } for (EventHandler.Action elem : from.getActions()) { to.addActions( toProto(elem) ); } @@ -131,9 +149,15 @@ public EventHandler fromProto(EventHandlerPb.EventHandler from) { public EventHandlerPb.EventHandler.StartWorkflow toProto(EventHandler.StartWorkflow from) { EventHandlerPb.EventHandler.StartWorkflow.Builder to = EventHandlerPb.EventHandler.StartWorkflow.newBuilder(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getVersion() != null) { + to.setVersion( from.getVersion() ); + } + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } for (Map.Entry pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } @@ -155,8 +179,12 @@ public EventHandler.StartWorkflow fromProto(EventHandlerPb.EventHandler.StartWor public EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails from) { EventHandlerPb.EventHandler.TaskDetails.Builder to = EventHandlerPb.EventHandler.TaskDetails.newBuilder(); - to.setWorkflowId( from.getWorkflowId() ); - to.setTaskRefName( from.getTaskRefName() ); + if (from.getWorkflowId() != null) { + to.setWorkflowId( from.getWorkflowId() ); + } + if (from.getTaskRefName() != null) { + to.setTaskRefName( from.getTaskRefName() ); + } for (Map.Entry pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } @@ -177,10 +205,18 @@ public EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetail public EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { EventHandlerPb.EventHandler.Action.Builder to = EventHandlerPb.EventHandler.Action.newBuilder(); - to.setAction( toProto( from.getAction() ) ); - to.setStartWorkflow( toProto( from.getStartWorkflow() ) ); - to.setCompleteTask( toProto( from.getCompleteTask() ) ); - to.setFailTask( toProto( from.getFailTask() ) ); + if (from.getAction() != null) { + to.setAction( toProto( from.getAction() ) ); + } + if (from.getStartWorkflow() != null) { + to.setStartWorkflow( toProto( from.getStartWorkflow() ) ); + } + if (from.getCompleteTask() != null) { + to.setCompleteTask( toProto( from.getCompleteTask() ) ); + } + if (from.getFailTask() != null) { + to.setFailTask( toProto( from.getFailTask() ) ); + } to.setExpandInlineJson( from.isExpandInlineJson() ); return to.build(); } @@ -219,9 +255,15 @@ public EventHandler.Action.Type fromProto(EventHandlerPb.EventHandler.Action.Typ public PollDataPb.PollData toProto(PollData from) { PollDataPb.PollData.Builder to = PollDataPb.PollData.newBuilder(); - to.setQueueName( from.getQueueName() ); - to.setDomain( from.getDomain() ); - to.setWorkerId( from.getWorkerId() ); + if (from.getQueueName() != null) { + to.setQueueName( from.getQueueName() ); + } + if (from.getDomain() != null) { + to.setDomain( from.getDomain() ); + } + if (from.getWorkerId() != null) { + to.setWorkerId( from.getWorkerId() ); + } to.setLastPollTime( from.getLastPollTime() ); return to.build(); } @@ -237,38 +279,64 @@ public PollData fromProto(PollDataPb.PollData from) { public TaskPb.Task toProto(Task from) { TaskPb.Task.Builder to = TaskPb.Task.newBuilder(); - to.setTaskType( from.getTaskType() ); - to.setStatus( toProto( from.getStatus() ) ); + if (from.getTaskType() != null) { + to.setTaskType( from.getTaskType() ); + } + if (from.getStatus() != null) { + to.setStatus( toProto( from.getStatus() ) ); + } for (Map.Entry pair : from.getInputData().entrySet()) { to.putInputData( pair.getKey(), toProto( pair.getValue() ) ); } - to.setReferenceTaskName( from.getReferenceTaskName() ); + if (from.getReferenceTaskName() != null) { + to.setReferenceTaskName( from.getReferenceTaskName() ); + } to.setRetryCount( from.getRetryCount() ); to.setSeq( from.getSeq() ); - to.setCorrelationId( from.getCorrelationId() ); + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } to.setPollCount( from.getPollCount() ); - to.setTaskDefName( from.getTaskDefName() ); + if (from.getTaskDefName() != null) { + to.setTaskDefName( from.getTaskDefName() ); + } to.setScheduledTime( from.getScheduledTime() ); to.setStartTime( from.getStartTime() ); to.setEndTime( from.getEndTime() ); to.setUpdateTime( from.getUpdateTime() ); to.setStartDelayInSeconds( from.getStartDelayInSeconds() ); - to.setRetriedTaskId( from.getRetriedTaskId() ); + if (from.getRetriedTaskId() != null) { + to.setRetriedTaskId( from.getRetriedTaskId() ); + } to.setRetried( from.isRetried() ); to.setExecuted( from.isExecuted() ); to.setCallbackFromWorker( from.isCallbackFromWorker() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); + if (from.getWorkflowInstanceId() != null) { + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + } + if (from.getWorkflowType() != null) { + to.setWorkflowType( from.getWorkflowType() ); + } + if (from.getTaskId() != null) { + to.setTaskId( from.getTaskId() ); + } + if (from.getReasonForIncompletion() != null) { + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + } to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); + if (from.getWorkerId() != null) { + to.setWorkerId( from.getWorkerId() ); + } for (Map.Entry pair : from.getOutputData().entrySet()) { to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); } - to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); - to.setDomain( from.getDomain() ); + if (from.getWorkflowTask() != null) { + to.setWorkflowTask( toProto( from.getWorkflowTask() ) ); + } + if (from.getDomain() != null) { + to.setDomain( from.getDomain() ); + } return to.build(); } @@ -351,17 +419,27 @@ public Task.Status fromProto(TaskPb.Task.Status from) { public TaskDefPb.TaskDef toProto(TaskDef from) { TaskDefPb.TaskDef.Builder to = TaskDefPb.TaskDef.newBuilder(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getDescription() != null) { + to.setDescription( from.getDescription() ); + } to.setRetryCount( from.getRetryCount() ); to.setTimeoutSeconds( from.getTimeoutSeconds() ); to.addAllInputKeys( from.getInputKeys() ); to.addAllOutputKeys( from.getOutputKeys() ); - to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); - to.setRetryLogic( toProto( from.getRetryLogic() ) ); + if (from.getTimeoutPolicy() != null) { + to.setTimeoutPolicy( toProto( from.getTimeoutPolicy() ) ); + } + if (from.getRetryLogic() != null) { + to.setRetryLogic( toProto( from.getRetryLogic() ) ); + } to.setRetryDelaySeconds( from.getRetryDelaySeconds() ); to.setResponseTimeoutSeconds( from.getResponseTimeoutSeconds() ); - to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); + if (from.getConcurrentExecLimit() != null) { + to.setConcurrentExecLimit( from.getConcurrentExecLimit() ); + } for (Map.Entry pair : from.getInputTemplate().entrySet()) { to.putInputTemplate( pair.getKey(), toProto( pair.getValue() ) ); } @@ -433,8 +511,12 @@ public TaskDef.TimeoutPolicy fromProto(TaskDefPb.TaskDef.TimeoutPolicy from) { public TaskExecLogPb.TaskExecLog toProto(TaskExecLog from) { TaskExecLogPb.TaskExecLog.Builder to = TaskExecLogPb.TaskExecLog.newBuilder(); - to.setLog( from.getLog() ); - to.setTaskId( from.getTaskId() ); + if (from.getLog() != null) { + to.setLog( from.getLog() ); + } + if (from.getTaskId() != null) { + to.setTaskId( from.getTaskId() ); + } to.setCreatedTime( from.getCreatedTime() ); return to.build(); } @@ -449,12 +531,22 @@ public TaskExecLog fromProto(TaskExecLogPb.TaskExecLog from) { public TaskResultPb.TaskResult toProto(TaskResult from) { TaskResultPb.TaskResult.Builder to = TaskResultPb.TaskResult.newBuilder(); - to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); - to.setTaskId( from.getTaskId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); + if (from.getWorkflowInstanceId() != null) { + to.setWorkflowInstanceId( from.getWorkflowInstanceId() ); + } + if (from.getTaskId() != null) { + to.setTaskId( from.getTaskId() ); + } + if (from.getReasonForIncompletion() != null) { + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + } to.setCallbackAfterSeconds( from.getCallbackAfterSeconds() ); - to.setWorkerId( from.getWorkerId() ); - to.setStatus( toProto( from.getStatus() ) ); + if (from.getWorkerId() != null) { + to.setWorkerId( from.getWorkerId() ); + } + if (from.getStatus() != null) { + to.setStatus( toProto( from.getStatus() ) ); + } for (Map.Entry pair : from.getOutputData().entrySet()) { to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); } @@ -505,13 +597,21 @@ public TaskResult.Status fromProto(TaskResultPb.TaskResult.Status from) { public DynamicForkJoinTaskPb.DynamicForkJoinTask toProto(DynamicForkJoinTask from) { DynamicForkJoinTaskPb.DynamicForkJoinTask.Builder to = DynamicForkJoinTaskPb.DynamicForkJoinTask.newBuilder(); - to.setTaskName( from.getTaskName() ); - to.setWorkflowName( from.getWorkflowName() ); - to.setReferenceName( from.getReferenceName() ); + if (from.getTaskName() != null) { + to.setTaskName( from.getTaskName() ); + } + if (from.getWorkflowName() != null) { + to.setWorkflowName( from.getWorkflowName() ); + } + if (from.getReferenceName() != null) { + to.setReferenceName( from.getReferenceName() ); + } for (Map.Entry pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } - to.setType( from.getType() ); + if (from.getType() != null) { + to.setType( from.getType() ); + } return to.build(); } @@ -546,15 +646,21 @@ public DynamicForkJoinTaskList fromProto( public RerunWorkflowRequestPb.RerunWorkflowRequest toProto(RerunWorkflowRequest from) { RerunWorkflowRequestPb.RerunWorkflowRequest.Builder to = RerunWorkflowRequestPb.RerunWorkflowRequest.newBuilder(); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + if (from.getReRunFromWorkflowId() != null) { + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + } for (Map.Entry pair : from.getWorkflowInput().entrySet()) { to.putWorkflowInput( pair.getKey(), toProto( pair.getValue() ) ); } - to.setReRunFromTaskId( from.getReRunFromTaskId() ); + if (from.getReRunFromTaskId() != null) { + to.setReRunFromTaskId( from.getReRunFromTaskId() ); + } for (Map.Entry pair : from.getTaskInput().entrySet()) { to.putTaskInput( pair.getKey(), toProto( pair.getValue() ) ); } - to.setCorrelationId( from.getCorrelationId() ); + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } return to.build(); } @@ -593,9 +699,15 @@ public SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { public StartWorkflowRequestPb.StartWorkflowRequest toProto(StartWorkflowRequest from) { StartWorkflowRequestPb.StartWorkflowRequest.Builder to = StartWorkflowRequestPb.StartWorkflowRequest.newBuilder(); - to.setName( from.getName() ); - to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getVersion() != null) { + to.setVersion( from.getVersion() ); + } + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } for (Map.Entry pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } @@ -619,8 +731,12 @@ public StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowReques public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { SubWorkflowParamsPb.SubWorkflowParams.Builder to = SubWorkflowParamsPb.SubWorkflowParams.newBuilder(); - to.setName( from.getName() ); - to.setVersion( toProto( from.getVersion() ) ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getVersion() != null) { + to.setVersion( toProto( from.getVersion() ) ); + } return to.build(); } @@ -633,8 +749,12 @@ public SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { public WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { WorkflowDefPb.WorkflowDef.Builder to = WorkflowDefPb.WorkflowDef.newBuilder(); - to.setName( from.getName() ); - to.setDescription( from.getDescription() ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getDescription() != null) { + to.setDescription( from.getDescription() ); + } to.setVersion( from.getVersion() ); for (WorkflowTask elem : from.getTasks()) { to.addTasks( toProto(elem) ); @@ -643,7 +763,9 @@ public WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { for (Map.Entry pair : from.getOutputParameters().entrySet()) { to.putOutputParameters( pair.getKey(), toProto( pair.getValue() ) ); } - to.setFailureWorkflow( from.getFailureWorkflow() ); + if (from.getFailureWorkflow() != null) { + to.setFailureWorkflow( from.getFailureWorkflow() ); + } to.setSchemaVersion( from.getSchemaVersion() ); return to.build(); } @@ -667,21 +789,39 @@ public WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { WorkflowTaskPb.WorkflowTask.Builder to = WorkflowTaskPb.WorkflowTask.newBuilder(); - to.setName( from.getName() ); - to.setTaskReferenceName( from.getTaskReferenceName() ); - to.setDescription( from.getDescription() ); + if (from.getName() != null) { + to.setName( from.getName() ); + } + if (from.getTaskReferenceName() != null) { + to.setTaskReferenceName( from.getTaskReferenceName() ); + } + if (from.getDescription() != null) { + to.setDescription( from.getDescription() ); + } for (Map.Entry pair : from.getInputParameters().entrySet()) { to.putInputParameters( pair.getKey(), toProto( pair.getValue() ) ); } - to.setType( from.getType() ); - to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); - to.setCaseValueParam( from.getCaseValueParam() ); - to.setCaseExpression( from.getCaseExpression() ); + if (from.getType() != null) { + to.setType( from.getType() ); + } + if (from.getDynamicTaskNameParam() != null) { + to.setDynamicTaskNameParam( from.getDynamicTaskNameParam() ); + } + if (from.getCaseValueParam() != null) { + to.setCaseValueParam( from.getCaseValueParam() ); + } + if (from.getCaseExpression() != null) { + to.setCaseExpression( from.getCaseExpression() ); + } for (Map.Entry> pair : from.getDecisionCases().entrySet()) { to.putDecisionCases( pair.getKey(), toProto( pair.getValue() ) ); } - to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); - to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); + if (from.getDynamicForkTasksParam() != null) { + to.setDynamicForkTasksParam( from.getDynamicForkTasksParam() ); + } + if (from.getDynamicForkTasksInputParamName() != null) { + to.setDynamicForkTasksInputParamName( from.getDynamicForkTasksInputParamName() ); + } for (WorkflowTask elem : from.getDefaultCase()) { to.addDefaultCase( toProto(elem) ); } @@ -689,10 +829,16 @@ public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { to.addForkTasks( toProto(elem) ); } to.setStartDelay( from.getStartDelay() ); - to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); + if (from.getSubWorkflowParam() != null) { + to.setSubWorkflowParam( toProto( from.getSubWorkflowParam() ) ); + } to.addAllJoinOn( from.getJoinOn() ); - to.setSink( from.getSink() ); - to.setOptional( from.isOptional() ); + if (from.getSink() != null) { + to.setSink( from.getSink() ); + } + if (from.isOptional() != null) { + to.setOptional( from.isOptional() ); + } return to.build(); } @@ -765,32 +911,68 @@ public WorkflowTask.Type fromProto(WorkflowTaskPb.WorkflowTask.Type from) { public TaskSummaryPb.TaskSummary toProto(TaskSummary from) { TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); - to.setWorkflowId( from.getWorkflowId() ); - to.setWorkflowType( from.getWorkflowType() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setScheduledTime( from.getScheduledTime() ); - to.setStartTime( from.getStartTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setEndTime( from.getEndTime() ); - to.setStatus( toProto( from.getStatus() ) ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); + if (from.getWorkflowId() != null) { + to.setWorkflowId( from.getWorkflowId() ); + } + if (from.getWorkflowType() != null) { + to.setWorkflowType( from.getWorkflowType() ); + } + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } + if (from.getScheduledTime() != null) { + to.setScheduledTime( from.getScheduledTime() ); + } + if (from.getStartTime() != null) { + to.setStartTime( from.getStartTime() ); + } + if (from.getUpdateTime() != null) { + to.setUpdateTime( from.getUpdateTime() ); + } + if (from.getEndTime() != null) { + to.setEndTime( from.getEndTime() ); + } + if (from.getStatus() != null) { + to.setStatus( toProto( from.getStatus() ) ); + } + if (from.getReasonForIncompletion() != null) { + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + } to.setExecutionTime( from.getExecutionTime() ); to.setQueueWaitTime( from.getQueueWaitTime() ); - to.setTaskDefName( from.getTaskDefName() ); - to.setTaskType( from.getTaskType() ); - to.setInput( from.getInput() ); - to.setOutput( from.getOutput() ); - to.setTaskId( from.getTaskId() ); + if (from.getTaskDefName() != null) { + to.setTaskDefName( from.getTaskDefName() ); + } + if (from.getTaskType() != null) { + to.setTaskType( from.getTaskType() ); + } + if (from.getInput() != null) { + to.setInput( from.getInput() ); + } + if (from.getOutput() != null) { + to.setOutput( from.getOutput() ); + } + if (from.getTaskId() != null) { + to.setTaskId( from.getTaskId() ); + } return to.build(); } public WorkflowPb.Workflow toProto(Workflow from) { WorkflowPb.Workflow.Builder to = WorkflowPb.Workflow.newBuilder(); - to.setStatus( toProto( from.getStatus() ) ); + if (from.getStatus() != null) { + to.setStatus( toProto( from.getStatus() ) ); + } to.setEndTime( from.getEndTime() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setParentWorkflowId( from.getParentWorkflowId() ); - to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); + if (from.getWorkflowId() != null) { + to.setWorkflowId( from.getWorkflowId() ); + } + if (from.getParentWorkflowId() != null) { + to.setParentWorkflowId( from.getParentWorkflowId() ); + } + if (from.getParentWorkflowTaskId() != null) { + to.setParentWorkflowTaskId( from.getParentWorkflowTaskId() ); + } for (Task elem : from.getTasks()) { to.addTasks( toProto(elem) ); } @@ -800,13 +982,23 @@ public WorkflowPb.Workflow toProto(Workflow from) { for (Map.Entry pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } - to.setWorkflowType( from.getWorkflowType() ); + if (from.getWorkflowType() != null) { + to.setWorkflowType( from.getWorkflowType() ); + } to.setVersion( from.getVersion() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } + if (from.getReRunFromWorkflowId() != null) { + to.setReRunFromWorkflowId( from.getReRunFromWorkflowId() ); + } + if (from.getReasonForIncompletion() != null) { + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + } to.setSchemaVersion( from.getSchemaVersion() ); - to.setEvent( from.getEvent() ); + if (from.getEvent() != null) { + to.setEvent( from.getEvent() ); + } to.putAllTaskToDomain( from.getTaskToDomain() ); to.addAllFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); return to.build(); @@ -872,20 +1064,44 @@ public Workflow.WorkflowStatus fromProto(WorkflowPb.Workflow.WorkflowStatus from public WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { WorkflowSummaryPb.WorkflowSummary.Builder to = WorkflowSummaryPb.WorkflowSummary.newBuilder(); - to.setWorkflowType( from.getWorkflowType() ); + if (from.getWorkflowType() != null) { + to.setWorkflowType( from.getWorkflowType() ); + } to.setVersion( from.getVersion() ); - to.setWorkflowId( from.getWorkflowId() ); - to.setCorrelationId( from.getCorrelationId() ); - to.setStartTime( from.getStartTime() ); - to.setUpdateTime( from.getUpdateTime() ); - to.setEndTime( from.getEndTime() ); - to.setStatus( toProto( from.getStatus() ) ); - to.setInput( from.getInput() ); - to.setOutput( from.getOutput() ); - to.setReasonForIncompletion( from.getReasonForIncompletion() ); + if (from.getWorkflowId() != null) { + to.setWorkflowId( from.getWorkflowId() ); + } + if (from.getCorrelationId() != null) { + to.setCorrelationId( from.getCorrelationId() ); + } + if (from.getStartTime() != null) { + to.setStartTime( from.getStartTime() ); + } + if (from.getUpdateTime() != null) { + to.setUpdateTime( from.getUpdateTime() ); + } + if (from.getEndTime() != null) { + to.setEndTime( from.getEndTime() ); + } + if (from.getStatus() != null) { + to.setStatus( toProto( from.getStatus() ) ); + } + if (from.getInput() != null) { + to.setInput( from.getInput() ); + } + if (from.getOutput() != null) { + to.setOutput( from.getOutput() ); + } + if (from.getReasonForIncompletion() != null) { + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + } to.setExecutionTime( from.getExecutionTime() ); - to.setEvent( from.getEvent() ); - to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + if (from.getEvent() != null) { + to.setEvent( from.getEvent() ); + } + if (from.getFailedReferenceTaskNames() != null) { + to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + } return to.build(); } From 179a2a451e103313ae4fb3efa464a67c046cc245 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 18 Jun 2018 18:30:31 +0200 Subject: [PATCH 054/163] grpc-server: Implement batchPoll --- .../conductor/client/grpc/MetadataClient.java | 1 + .../conductor/client/grpc/TaskClient.java | 45 +++++++++++++++- .../grpc/server/service/TaskServiceImpl.java | 52 +++++++------------ grpc/src/main/proto/grpc/task_service.proto | 10 ++-- 4 files changed, 67 insertions(+), 41 deletions(-) diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java index 7ec87b7222..d912d12c15 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java @@ -15,6 +15,7 @@ public class MetadataClient extends ClientBase { public MetadataClient(String address, int port) { super(address, port); + this.stub = MetadataServiceGrpc.newBlockingStub(this.channel); } /** diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java index b9b155cd7c..d9b1707040 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java @@ -1,6 +1,8 @@ package com.netflix.conductor.client.grpc; import com.google.common.base.Preconditions; +import com.google.common.collect.Iterators; +import com.google.common.collect.Lists; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; @@ -9,8 +11,7 @@ import com.netflix.conductor.proto.TaskPb; import org.apache.commons.lang3.StringUtils; -import java.util.List; -import java.util.Optional; +import java.util.*; import java.util.stream.Collectors; public class TaskClient extends ClientBase { @@ -44,6 +45,46 @@ public Task pollTask(String taskType, String workerId, String domain) { return protoMapper.fromProto(task); } + /** + * Perform a batch poll for tasks by task type. Batch size is configurable by count. + * + * @param taskType Type of task to poll for + * @param workerId Name of the client worker. Used for logging. + * @param count Maximum number of tasks to be returned. Actual number of tasks returned can be less than this number. + * @param timeoutInMillisecond Long poll wait timeout. + * @return List of tasks awaiting to be executed. + */ + public List batchPollTasksByTaskType(String taskType, String workerId, int count, int timeoutInMillisecond) { + return Lists.newArrayList(batchPollTasksByTaskTypeAsync(taskType, workerId, count, timeoutInMillisecond)); + } + + /** + * Perform a batch poll for tasks by task type. Batch size is configurable by count. + * Returns an iterator that streams tasks as they become available through GRPC. + * + * @param taskType Type of task to poll for + * @param workerId Name of the client worker. Used for logging. + * @param count Maximum number of tasks to be returned. Actual number of tasks returned can be less than this number. + * @param timeoutInMillisecond Long poll wait timeout. + * @return Iterator of tasks awaiting to be executed. + */ + public Iterator batchPollTasksByTaskTypeAsync(String taskType, String workerId, int count, int timeoutInMillisecond) { + Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); + Preconditions.checkArgument(StringUtils.isNotBlank(workerId), "Worker id cannot be blank"); + Preconditions.checkArgument(count > 0, "Count must be greater than 0"); + + Iterator it = stub.batchPoll( + TaskServicePb.BatchPollRequest.newBuilder() + .setTaskType(taskType) + .setWorkerId(workerId) + .setCount(count) + .setTimeout(timeoutInMillisecond) + .build() + ); + + return Iterators.transform(it, protoMapper::fromProto); + } + /** * Retrieve pending tasks by type * diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java index 3ae78d3d0c..f32e996a12 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java @@ -35,6 +35,7 @@ public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { private static final int MAX_TASK_COUNT = 100; private static final int POLL_TIMEOUT_MS = 100; + private static final int MAX_POLL_TIMEOUT_MS = 5000; private final ExecutionService taskService; private final QueueDAO queues; @@ -60,41 +61,24 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver resp } @Override - public StreamObserver pollStream(StreamObserver observer) { - final ServerCallStreamObserver responseObserver = - (ServerCallStreamObserver) observer; - - return new StreamObserver() { - @Override - public void onNext(TaskServicePb.StreamingPollRequest req) { - try { - for (TaskResultPb.TaskResult result : req.getCompletedList()) { - TaskResult task = protoMapper.fromProto(result); - taskService.updateTask(task); - } - - List newTasks = taskService.poll( - req.getTaskType(), req.getWorkerId(), req.getDomain(), - req.getCapacity(), POLL_TIMEOUT_MS); - - for (Task task : newTasks) { - responseObserver.onNext(protoMapper.toProto(task)); - } - } catch (Exception e) { - grpcHelper.onError(observer, e); - } - } - - @Override - public void onError(Throwable t) { - responseObserver.onError(t); - } + public void batchPoll(TaskServicePb.BatchPollRequest req, StreamObserver response) { + final int count = (req.getCount() == 0) ? 1 : req.getCount(); + final int timeout = (req.getTimeout() == 0) ? POLL_TIMEOUT_MS : req.getTimeout(); + + if (timeout > MAX_POLL_TIMEOUT_MS) { + response.onError(Status.INVALID_ARGUMENT + .withDescription("longpoll timeout cannot be longer than " + MAX_POLL_TIMEOUT_MS + "ms") + .asRuntimeException() + ); + return; + } - @Override - public void onCompleted() { - responseObserver.onCompleted(); - } - }; + try { + List polledTasks = taskService.poll(req.getTaskType(), req.getWorkerId(), req.getDomain(), count, timeout); + polledTasks.stream().map(protoMapper::toProto).forEach(response::onNext); + } catch (Exception e) { + grpcHelper.onError(response, e); + } } @Override diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto index 1dc98c54a0..1caef0e10c 100644 --- a/grpc/src/main/proto/grpc/task_service.proto +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -14,8 +14,8 @@ service TaskService { // GET /poll/{tasktype} rpc Poll(PollRequest) returns (conductor.proto.Task); - // GRPC-only - rpc PollStream(stream StreamingPollRequest) returns (stream conductor.proto.Task); + // /poll/batch/{tasktype} + rpc BatchPoll(BatchPollRequest) returns (stream conductor.proto.Task); // GET /in_progress/{tasktype} rpc GetTasksInProgress(TasksInProgressRequest) returns (TasksInProgressResponse); @@ -57,12 +57,12 @@ message PollRequest { string domain = 3; } -message StreamingPollRequest { +message BatchPollRequest { string task_type = 1; string worker_id = 2; string domain = 3; - int32 capacity = 4; - repeated conductor.proto.TaskResult completed = 5; + int32 count = 4; + int32 timeout = 5; } message TasksInProgressRequest { From 90a1092a2abecb77bb94d27794b4a07a85f3f1f8 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 18 Jun 2018 18:31:23 +0200 Subject: [PATCH 055/163] grpc-server: Properly handle empty responses Unlike what the GRPC Java docs would lead us to believe, an unary call always needs to call `onNext` at least once, even for APIs that return an Empty message. --- .../grpc/server/service/EventServiceImpl.java | 9 +++++++-- .../grpc/server/service/GRPCHelper.java | 6 ++++++ .../server/service/MetadataServiceImpl.java | 10 +++++----- .../grpc/server/service/TaskServiceImpl.java | 2 +- .../server/service/WorkflowServiceImpl.java | 18 +++++++++--------- 5 files changed, 28 insertions(+), 17 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java index 8dd93564bf..8b4af7b1ef 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java @@ -15,9 +15,13 @@ import javax.inject.Inject; import io.grpc.stub.StreamObserver; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { + private static final Logger logger = LoggerFactory.getLogger(EventServiceImpl.class); private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; + private static final GRPCHelper grpcHelper = new GRPCHelper(logger); private final MetadataService service; private final EventProcessor ep; @@ -31,18 +35,19 @@ public EventServiceImpl(MetadataService service, EventProcessor ep) { @Override public void addEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { service.addEventHandler(protoMapper.fromProto(req)); - response.onCompleted(); + grpcHelper.emptyResponse(response); } @Override public void updateEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { service.updateEventHandler(protoMapper.fromProto(req)); - response.onCompleted(); + grpcHelper.emptyResponse(response); } @Override public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, StreamObserver response) { service.removeEventHandlerStatus(req.getName()); + grpcHelper.emptyResponse(response); } @Override diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java index 0aa5eae490..0777eaba87 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java @@ -1,5 +1,6 @@ package com.netflix.conductor.grpc.server.service; +import com.google.protobuf.Empty; import com.google.rpc.DebugInfo; import io.grpc.Metadata; import io.grpc.Status; @@ -89,4 +90,9 @@ void onError(StreamObserver response, Throwable t) { logger.error("internal exception during GRPC request", t); response.onError(throwableToStatusException(t)); } + + void emptyResponse(StreamObserver response) { + response.onNext(Empty.getDefaultInstance()); + response.onCompleted(); + } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java index 39a79a1e37..167df56172 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java @@ -34,7 +34,7 @@ public MetadataServiceImpl(MetadataService service) { public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver response) { try { service.registerWorkflowDef(protoMapper.fromProto(req)); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -47,7 +47,7 @@ public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, Stream try { service.updateWorkflowDef(workflows); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -73,13 +73,13 @@ public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver service.registerTaskDef( req.getDefsList().stream().map(protoMapper::fromProto).collect(Collectors.toList()) ); - response.onCompleted(); + grpcHelper.emptyResponse(response); } @Override public void updateTask(TaskDefPb.TaskDef req, StreamObserver response) { service.updateTaskDef(protoMapper.fromProto(req)); - response.onCompleted(); + grpcHelper.emptyResponse(response); } @Override @@ -99,6 +99,6 @@ public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { service.unregisterTaskDef(req.getTaskType()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java index f32e996a12..1d34307530 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java @@ -174,7 +174,7 @@ public void getTask(TaskServicePb.TaskId req, StreamObserver respon @Override public void removeTaskFromQueue(TaskServicePb.RemoveTaskRequest req, StreamObserver response) { taskService.removeTaskfromQueue(req.getTaskType(), req.getTaskId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } @Override diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index fe09d9d635..659cb7c6ed 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -112,7 +112,7 @@ public void getWorkflowStatus(WorkflowServicePb.GetWorkflowStatusRequest req, St public void removeWorkflow(WorkflowServicePb.RemoveWorkflowRequest req, StreamObserver response) { try { service.removeWorkflow(req.getWorkflodId(), req.getArchiveWorkflow()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -144,7 +144,7 @@ public void getRunningWorkflows(WorkflowServicePb.GetRunningWorkflowsRequest req public void decideWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { executor.decide(req.getWorkflowId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -154,7 +154,7 @@ public void decideWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { executor.pauseWorkflow(req.getWorkflowId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -164,7 +164,7 @@ public void pauseWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { executor.resumeWorkflow(req.getWorkflowId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -175,7 +175,7 @@ public void skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest req, StreamOb try { SkipTaskRequest skipTask = protoMapper.fromProto(req.getRequest()); executor.skipTaskFromWorkflow(req.getWorkflowId(), req.getTaskReferenceName(), skipTask); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -196,7 +196,7 @@ public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, Strea public void restartWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { executor.rewind(req.getWorkflowId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -206,7 +206,7 @@ public void restartWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { executor.retry(req.getWorkflowId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -216,7 +216,7 @@ public void retryWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { try { executor.resetCallbacksForInProgressTasks(req.getWorkflowId()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -226,7 +226,7 @@ public void resetWorkflowCallbacks(WorkflowServicePb.WorkflowId req, StreamObser public void terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest req, StreamObserver response) { try { executor.terminateWorkflow(req.getWorkflowId(), req.getReason()); - response.onCompleted(); + grpcHelper.emptyResponse(response); } catch (Exception e) { grpcHelper.onError(response, e); } From 325b99a896c13593b88a166d88a75294ede794c2 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 18 Jun 2018 18:33:41 +0200 Subject: [PATCH 056/163] test-harness: Implement GRPC integration tests --- .../tests/integration/End2EndGrpcTests.java | 216 ++++++++++++++++++ .../tests/integration/End2EndTests.java | 24 +- .../tests/utils/TestEnvironment.java | 23 ++ 3 files changed, 249 insertions(+), 14 deletions(-) create mode 100644 test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java create mode 100644 test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java new file mode 100644 index 0000000000..e9a32cfac5 --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -0,0 +1,216 @@ +/** + * Copyright 2016 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * + */ +package com.netflix.conductor.tests.integration; + +import com.google.inject.Guice; +import com.google.inject.Injector; +import com.netflix.conductor.bootstrap.BootstrapModule; +import com.netflix.conductor.bootstrap.ModulesProvider; +import com.netflix.conductor.client.grpc.MetadataClient; +import com.netflix.conductor.client.grpc.TaskClient; +import com.netflix.conductor.client.grpc.WorkflowClient; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.Workflow.WorkflowStatus; +import com.netflix.conductor.grpc.server.GRPCServer; +import com.netflix.conductor.grpc.server.GRPCServerProvider; +import com.netflix.conductor.tests.utils.TestEnvironment; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; + +import static org.junit.Assert.*; + +/** + * @author Viren + * + */ +public class End2EndGrpcTests { + private static TaskClient tc; + private static WorkflowClient wc; + private static MetadataClient mc; + + @BeforeClass + public static void setup() throws Exception { + TestEnvironment.setup(); + System.setProperty("conductor.grpc.server.enabled", "true"); + + Injector bootInjector = Guice.createInjector(new BootstrapModule()); + Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); + Optional server = serverInjector.getInstance(GRPCServerProvider.class).get(); + assertTrue("failed to instantiate GRPCServer", server.isPresent()); + server.get().start(); + + tc = new TaskClient("localhost", 8090); + wc = new WorkflowClient("localhost", 8090); + mc = new MetadataClient("localhost", 8090); + } + + @AfterClass + public static void teardown() { + TestEnvironment.teardown(); + } + + @Test + public void testAll() throws Exception { + assertNotNull(tc); + List defs = new LinkedList<>(); + for(int i = 0; i < 5; i++) { + TaskDef def = new TaskDef("t" + i, "task " + i); + def.setTimeoutPolicy(TimeoutPolicy.RETRY); + defs.add(def); + } + mc.registerTaskDefs(defs); + + for(int i = 0; i < 5; i++) { + final String taskName = "t" + i; + TaskDef def = mc.getTaskDef(taskName); + assertNotNull(def); + assertEquals(taskName, def.getName()); + } + + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + WorkflowTask t0 = new WorkflowTask(); + t0.setName("t0"); + t0.setWorkflowTaskType(Type.SIMPLE); + t0.setTaskReferenceName("t0"); + + WorkflowTask t1 = new WorkflowTask(); + t1.setName("t1"); + t1.setWorkflowTaskType(Type.SIMPLE); + t1.setTaskReferenceName("t1"); + + + def.getTasks().add(t0); + def.getTasks().add(t1); + + mc.registerWorkflowDef(def); + WorkflowDef foundd = mc.getWorkflowDef(def.getName(), null); + assertNotNull(foundd); + assertEquals(def.getName(), foundd.getName()); + assertEquals(def.getVersion(), foundd.getVersion()); + + String correlationId = "test_corr_id"; + StartWorkflowRequest startWf = new StartWorkflowRequest(); + startWf.setName(def.getName()); + startWf.setCorrelationId(correlationId); + + String workflowId = wc.startWorkflow(startWf); + assertNotNull(workflowId); + System.out.println("Started workflow id="+workflowId); + + Workflow wf = wc.getWorkflow(workflowId, false); + assertEquals(0, wf.getTasks().size()); + assertEquals(workflowId, wf.getWorkflowId()); + + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(1, wf.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); + assertEquals(workflowId, wf.getWorkflowId()); + + List runningIds = wc.getRunningWorkflow(def.getName(), Optional.of(def.getVersion())); + assertNotNull(runningIds); + assertEquals(1, runningIds.size()); + assertEquals(workflowId, runningIds.get(0)); + + List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); + assertNotNull(polled); + assertEquals(0, polled.size()); + + polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + assertNotNull(polled); + assertEquals(1, polled.size()); + assertEquals(t0.getName(), polled.get(0).getTaskDefName()); + Task task = polled.get(0); + + Boolean acked = tc.ack(task.getTaskId(), "test"); + assertNotNull(acked); + assertTrue(acked.booleanValue()); + + task.getOutputData().put("key1", "value1"); + task.setStatus(Status.COMPLETED); + tc.updateTask(new TaskResult(task)); + + polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + assertNotNull(polled); + assertTrue(polled.toString(), polled.isEmpty()); + + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(2, wf.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); + assertEquals(t1.getTaskReferenceName(), wf.getTasks().get(1).getReferenceTaskName()); + assertEquals(Status.COMPLETED, wf.getTasks().get(0).getStatus()); + assertEquals(Status.SCHEDULED, wf.getTasks().get(1).getStatus()); + + Task taskById = tc.getTaskDetails(task.getTaskId()); + assertNotNull(taskById); + assertEquals(task.getTaskId(), taskById.getTaskId()); + + + List getTasks = tc.getPendingTasksByType(t0.getName(), null, Optional.of(1)); + assertNotNull(getTasks); + assertEquals(0, getTasks.size()); //getTasks only gives pending tasks + + + getTasks = tc.getPendingTasksByType(t1.getName(), null, Optional.of(1)); + assertNotNull(getTasks); + assertEquals(1, getTasks.size()); + + + Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); + assertNotNull(pending); + assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); + assertEquals(workflowId, pending.getWorkflowInstanceId()); + + /* + Thread.sleep(1000); + SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); + assertNotNull(searchResult); + assertEquals(1, searchResult.getTotalHits()); + + wc.terminateWorkflow(workflowId, "terminate reason"); + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); + + wc.restart(workflowId); + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(1, wf.getTasks().size()); + */ + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index e5a93315c5..07383f7316 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -39,6 +39,8 @@ import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.jetty.server.JettyServer; +import com.netflix.conductor.tests.utils.TestEnvironment; +import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -55,24 +57,13 @@ * */ public class End2EndTests { - - static { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - System.setProperty("workflow.elasticsearch.url", "localhost:9300"); - System.setProperty("workflow.elasticsearch.index.name", "conductor"); - System.setProperty("workflow.namespace.prefix", "integration-test"); - System.setProperty("db", "memory"); - System.setProperty("workflow.elasticsearch.version", "5"); - } - private static TaskClient tc; - private static WorkflowClient wc; - - + @BeforeClass public static void setup() throws Exception { + TestEnvironment.setup(); + Injector bootInjector = Guice.createInjector(new BootstrapModule()); Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); JettyServer server = new JettyServer(8080, false); @@ -84,6 +75,11 @@ public static void setup() throws Exception { wc = new WorkflowClient(); wc.setRootURI("http://localhost:8080/api/"); } + + @AfterClass + public static void teardown() { + TestEnvironment.teardown(); + } @Test public void testAll() throws Exception { diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java new file mode 100644 index 0000000000..cb0dbe94ac --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java @@ -0,0 +1,23 @@ +package com.netflix.conductor.tests.utils; + +public class TestEnvironment { + private TestEnvironment() {} + + private static void setupSystemProperties() { + System.setProperty("EC2_REGION", "us-east-1"); + System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); + System.setProperty("workflow.elasticsearch.url", "localhost:9300"); + System.setProperty("workflow.elasticsearch.index.name", "conductor"); + System.setProperty("workflow.namespace.prefix", "integration-test"); + System.setProperty("db", "memory"); + System.setProperty("workflow.elasticsearch.version", "5"); + } + + public static void setup() { + setupSystemProperties(); + } + + public static void teardown() { + System.setProperties(null); + } +} From bfe24a0cc57c4faebe2d027d52dacb92cd360646 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Tue, 19 Jun 2018 11:37:23 +0200 Subject: [PATCH 057/163] Fix review feedback about formatting. --- .../conductor/core/config/Configuration.java | 57 +++++++------------ 1 file changed, 22 insertions(+), 35 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index ade4128b32..920aeef36c 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -22,7 +22,6 @@ /** * @author Viren - * */ public interface Configuration { String DB_PROPERTY_NAME = "db"; @@ -59,7 +58,7 @@ public interface Configuration { String ADDITIONAL_MODULES_PROPERTY_NAME = "conductor.additional.modules"; - default DB getDB(){ + default DB getDB() { return DB.valueOf(getDBString()); } @@ -68,77 +67,65 @@ default String getDBString() { } /** - * * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. */ int getSweepFrequency(); /** - * * @return when set to true, the sweep is disabled */ boolean disableSweep(); /** - * * @return when set to true, the background task workers executing async system tasks (eg HTTP) are disabled - * */ boolean disableAsyncWorkers(); /** - * * @return ID of the server. Can be host name, IP address or any other meaningful identifier. Used for logging */ String getServerId(); /** - * * @return Current environment. e.g. test, prod */ String getEnvironment(); /** - * * @return name of the stack under which the app is running. e.g. devint, testintg, staging, prod etc. */ String getStack(); /** - * * @return APP ID. Used for logging */ String getAppId(); /** - * * @return Data center region. if hosting on Amazon the value is something like us-east-1, us-west-2 etc. */ String getRegion(); /** - * * @return Availability zone / rack. for AWS deployments, the value is something like us-east-1a, etc. */ String getAvailabilityZone(); - default boolean getJerseyEnabled(){ + default boolean getJerseyEnabled() { return getBooleanProperty(JERSEY_ENABLED_PROPERTY_NAME, JERSEY_ENABLED_DEFAULT_VALUE); } /** - * - * @param name Name of the property - * @param defaultValue Default value when not specified + * @param name Name of the property + * @param defaultValue Default value when not specified * @return User defined integer property. */ int getIntProperty(String name, int defaultValue); /** - * - * @param name Name of the property - * @param defaultValue Default value when not specified + * @param name Name of the property + * @param defaultValue Default value when not specified * @return User defined string property. */ String getProperty(String name, String defaultValue); @@ -147,36 +134,36 @@ default boolean getJerseyEnabled(){ default boolean getBoolProperty(String name, boolean defaultValue) { String value = getProperty(name, null); - if(null == value || value.trim().length() == 0){ return defaultValue; } + if (null == value || value.trim().length() == 0) { + return defaultValue; + } return Boolean.valueOf(value.trim()); } /** - * * @return Returns all the configurations in a map. */ Map getAll(); /** - * - * @return Provides a list of additional modules to configure. - * Use this to inject additional modules that should be loaded as part of the Conductor server initialization - * If you are creating custom tasks (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask) then initialize them as part of the custom modules. + * @return Provides a list of additional modules to configure. Use this to inject additional modules that should be + * loaded as part of the Conductor server initialization If you are creating custom tasks + * (com.netflix.conductor.core.execution.tasks.WorkflowSystemTask) then initialize them as part of the custom + * modules. */ default List getAdditionalModules() { return null; } - /** - * - * @param name Name of the property - * @param defaultValue Default value when not specified - * @return User defined Long property. - */ - long getLongProperty(String name, long defaultValue); + /** + * @param name Name of the property + * @param defaultValue Default value when not specified + * @return User defined Long property. + */ + long getLongProperty(String name, long defaultValue); - enum DB { - REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL - } + enum DB { + REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL + } } From 39bc70773723955b75a212b154a5c39bc3b714d3 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 19 Jun 2018 15:01:31 +0200 Subject: [PATCH 058/163] protogen: Better handling of extern proto types --- .../com/netflix/conductor/protogen/File.java | 8 +++--- .../protogen/types/AbstractType.java | 21 +++++++++------ .../{AnyType.java => ExternMessageType.java} | 17 ++++++------ .../conductor/protogen/types/MessageType.java | 27 +++++++++++++------ 4 files changed, 45 insertions(+), 28 deletions(-) rename protogen/src/main/java/com/netflix/conductor/protogen/types/{AnyType.java => ExternMessageType.java} (71%) diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/File.java b/protogen/src/main/java/com/netflix/conductor/protogen/File.java index 7c00640117..e4c134bed9 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/File.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/File.java @@ -10,11 +10,13 @@ public class File { private ClassName baseClass; private Element message; + private String filePath; public File(Class object) { String className = object.getSimpleName() + PROTO_SUFFIX; - baseClass = ClassName.get(ProtoGen.PROTO_JAVA_PACKAGE_NAME, className); - this.message = new Message(object, AbstractType.baseClass(baseClass, this)); + this.filePath = "model/" + object.getSimpleName().toLowerCase() + ".proto"; + this.baseClass = ClassName.get(ProtoGen.PROTO_JAVA_PACKAGE_NAME, className); + this.message = new Message(object, AbstractType.baseClass(baseClass, filePath)); } public String getJavaClassName() { @@ -22,7 +24,7 @@ public String getJavaClassName() { } public String getFilePath() { - return "model/" + message.getName().toLowerCase() + ".proto"; + return filePath; } public String getPackageName() { diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java index abdb67b23f..cfbc02d557 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java @@ -5,9 +5,7 @@ import com.squareup.javapoet.ClassName; import com.squareup.javapoet.MethodSpec; import com.squareup.javapoet.TypeName; -import com.squareup.javapoet.TypeSpec; -import javax.lang.model.element.Modifier; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.*; @@ -26,7 +24,14 @@ private static void addScalar(Type t, String protoType) { addScalar(boolean.class, "bool"); addScalar(Boolean.class, "bool"); - TYPES.put(Object.class, new AnyType()); + + TYPES.put(Object.class, + new ExternMessageType( + Object.class, + ClassName.get("com.google.protobuf", "Value"), + "google.protobuf.Value", + "google/protobuf/struct.proto") + ); } static Map PROTO_LIST_TYPES = new HashMap<>(); @@ -65,12 +70,12 @@ public static MessageType get(String className) { } public static MessageType declare(Class type, MessageType parent) { - return declare(type, (ClassName)parent.getJavaProtoType(), parent.getProtoFile()); + return declare(type, (ClassName)parent.getJavaProtoType(), parent.getProtoFilePath()); } - public static MessageType declare(Class type, ClassName parentType, File protoFile) { + public static MessageType declare(Class type, ClassName parentType, String protoFilePath) { String simpleName = type.getSimpleName(); - MessageType t = new MessageType(type, parentType.nestedClass(simpleName), protoFile); + MessageType t = new MessageType(type, parentType.nestedClass(simpleName), protoFilePath); if (TYPES.containsKey(type)) { throw new IllegalArgumentException("duplicate type declaration: "+type); } @@ -78,8 +83,8 @@ public static MessageType declare(Class type, ClassName parentType, File protoFi return t; } - public static MessageType baseClass(ClassName className, File protoFile) { - return new MessageType(Object.class, className, protoFile); + public static MessageType baseClass(ClassName className, String protoFilePath) { + return new MessageType(Object.class, className, protoFilePath); } Type javaType; diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java similarity index 71% rename from protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java rename to protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java index 98fd67376e..81390b573a 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/AnyType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java @@ -4,21 +4,20 @@ import com.squareup.javapoet.MethodSpec; import javax.lang.model.element.Modifier; +import java.lang.reflect.Type; import java.util.Set; -public class AnyType extends MessageType { - public AnyType() { - super(Object.class, ClassName.get("com.google.protobuf", "Value"), null); - } +public class ExternMessageType extends MessageType { + private String externProtoType; - @Override - public String getProtoType() { - return "google.protobuf.Value"; + public ExternMessageType(Type javaType, ClassName javaProtoType, String externProtoType, String protoFilePath) { + super(javaType, javaProtoType, protoFilePath); + this.externProtoType = externProtoType; } @Override - public void getDependencies(Set deps) { - deps.add("google/protobuf/struct.proto"); + public String getProtoType() { + return externProtoType; } @Override diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java index fab158f9fa..f87326fe5b 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java @@ -11,11 +11,11 @@ import java.util.Set; public class MessageType extends AbstractType { - private File protoFile; + private String protoFilePath; - public MessageType(Type javaType, ClassName javaProtoType, File protoFile) { + public MessageType(Type javaType, ClassName javaProtoType, String protoFilePath) { super(javaType, javaProtoType); - this.protoFile = protoFile; + this.protoFilePath = protoFilePath; } @Override @@ -24,15 +24,15 @@ public String getProtoType() { return String.join(".", classes.subList(1, classes.size())); } + public String getProtoFilePath() { + return protoFilePath; + } + @Override public TypeName getRawJavaType() { return getJavaProtoType(); } - public File getProtoFile() { - return protoFile; - } - @Override public void mapToProto(String field, MethodSpec.Builder method) { final String getter = fieldMethod("get", field); @@ -41,15 +41,26 @@ public void mapToProto(String field, MethodSpec.Builder method) { method.endControlFlow(); } + private boolean isEnum() { + Type clazz = getJavaType(); + return (clazz instanceof Class) && ((Class) clazz).isEnum(); + } + @Override public void mapFromProto(String field, MethodSpec.Builder method) { + if (!isEnum()) + method.beginControlFlow("if (from.$L())", fieldMethod("has", field)); + method.addStatement("to.$L( fromProto( from.$L() ) )", fieldMethod("set", field), fieldMethod("get", field)); + + if (!isEnum()) + method.endControlFlow(); } @Override public void getDependencies(Set deps) { - deps.add(getProtoFile().getFilePath()); + deps.add(protoFilePath); } @Override From 47607a32f51f4ec5de0ee33470a63e0c754c64c9 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 19 Jun 2018 15:18:38 +0200 Subject: [PATCH 059/163] grpc: Handle conversion of missing Message types --- .../conductor/grpc/AbstractProtoMapper.java | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index c1166111a9..e5018ff0ea 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -224,9 +224,15 @@ public EventHandlerPb.EventHandler.Action toProto(EventHandler.Action from) { public EventHandler.Action fromProto(EventHandlerPb.EventHandler.Action from) { EventHandler.Action to = new EventHandler.Action(); to.setAction( fromProto( from.getAction() ) ); - to.setStartWorkflow( fromProto( from.getStartWorkflow() ) ); - to.setCompleteTask( fromProto( from.getCompleteTask() ) ); - to.setFailTask( fromProto( from.getFailTask() ) ); + if (from.hasStartWorkflow()) { + to.setStartWorkflow( fromProto( from.getStartWorkflow() ) ); + } + if (from.hasCompleteTask()) { + to.setCompleteTask( fromProto( from.getCompleteTask() ) ); + } + if (from.hasFailTask()) { + to.setFailTask( fromProto( from.getFailTask() ) ); + } to.setExpandInlineJson( from.getExpandInlineJson() ); return to; } @@ -376,7 +382,9 @@ public Task fromProto(TaskPb.Task from) { outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutputData(outputDataMap); - to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); + if (from.hasWorkflowTask()) { + to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); + } to.setDomain( from.getDomain() ); return to; } @@ -743,7 +751,9 @@ public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { public SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { SubWorkflowParams to = new SubWorkflowParams(); to.setName( from.getName() ); - to.setVersion( fromProto( from.getVersion() ) ); + if (from.hasVersion()) { + to.setVersion( fromProto( from.getVersion() ) ); + } return to; } @@ -866,7 +876,9 @@ public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { to.setDefaultCase( from.getDefaultCaseList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setForkTasks( from.getForkTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setStartDelay( from.getStartDelay() ); - to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); + if (from.hasSubWorkflowParam()) { + to.setSubWorkflowParam( fromProto( from.getSubWorkflowParam() ) ); + } to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setSink( from.getSink() ); to.setOptional( from.getOptional() ); From 5042bb085132ec1c5a6c7d60ce076421ee453948 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 19 Jun 2018 16:33:03 +0200 Subject: [PATCH 060/163] grpc: Fix handling of optional types in GRPC interface --- .../conductor/client/grpc/MetadataClient.java | 18 ++++++---- .../conductor/client/grpc/TaskClient.java | 34 +++++++++++++------ .../conductor/client/grpc/WorkflowClient.java | 6 ++-- .../grpc/server/service/GRPCHelper.java | 18 ++++++++++ .../server/service/MetadataServiceImpl.java | 3 +- .../grpc/server/service/TaskServiceImpl.java | 20 +++++++---- .../server/service/WorkflowServiceImpl.java | 9 ++--- .../tests/integration/End2EndGrpcTests.java | 8 ++--- 8 files changed, 79 insertions(+), 37 deletions(-) diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java index d912d12c15..858f633a9f 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java @@ -8,7 +8,9 @@ import com.netflix.conductor.proto.WorkflowDefPb; import org.apache.commons.lang3.StringUtils; +import javax.annotation.Nullable; import java.util.List; +import java.util.Optional; public class MetadataClient extends ClientBase { private MetadataServiceGrpc.MetadataServiceBlockingStub stub; @@ -53,15 +55,17 @@ public void updateWorkflowDefs(List workflowDefs) { * @param version the version of the workflow def * @return Workflow definition for the given workflow and version */ - public WorkflowDef getWorkflowDef(String name, Integer version) { + public WorkflowDef getWorkflowDef(String name, @Nullable Integer version) { Preconditions.checkArgument(StringUtils.isNotBlank(name), "name cannot be blank"); - WorkflowDefPb.WorkflowDef workflowDef = stub.getWorkflow( + + MetadataServicePb.GetWorkflowRequest.Builder request = MetadataServicePb.GetWorkflowRequest.newBuilder() - .setName(name) - .setVersion(version) - .build() - ); - return protoMapper.fromProto(workflowDef); + .setName(name); + + if (version != null) + request.setVersion(version); + + return protoMapper.fromProto(stub.getWorkflow(request.build())); } /** diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java index d9b1707040..c604cfa50c 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java @@ -11,6 +11,7 @@ import com.netflix.conductor.proto.TaskPb; import org.apache.commons.lang3.StringUtils; +import javax.annotation.Nullable; import java.util.*; import java.util.stream.Collectors; @@ -93,10 +94,21 @@ public Iterator batchPollTasksByTaskTypeAsync(String taskType, String work * @param count number of tasks to retrieve * @return Returns the list of PENDING tasks by type, starting with a given task Id. */ - public List getPendingTasksByType(String taskType, Optional startKey, Optional count) { + public List getPendingTasksByType(String taskType, @Nullable String startKey, @Nullable Integer count) { Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - // TODO - return null; + + TaskServicePb.TasksInProgressRequest.Builder request = TaskServicePb.TasksInProgressRequest.newBuilder(); + request.setTaskType(taskType); + if (startKey != null) + request.setStartKey(startKey); + if (count != null) + request.setCount(count); + + return stub.getTasksInProgress(request.build()) + .getTasksList() + .stream() + .map(protoMapper::fromProto) + .collect(Collectors.toList()); } /** @@ -136,15 +148,15 @@ public void updateTask(TaskResult taskResult) { * @param workerId user identified worker. * @return true if the task was found with the given ID and acknowledged. False otherwise. If the server returns false, the client should NOT attempt to ack again. */ - public boolean ack(String taskId, String workerId) { - // TODO: Optional + public boolean ack(String taskId, @Nullable String workerId) { Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); - return stub.ackTask( - TaskServicePb.AckTaskRequest.newBuilder() - .setTaskId(taskId) - .setWorkerId(workerId) - .build() - ).getAck(); + + TaskServicePb.AckTaskRequest.Builder request = TaskServicePb.AckTaskRequest.newBuilder(); + request.setTaskId(taskId); + if (workerId != null) + request.setWorkerId(workerId); + + return stub.ackTask(request.build()).getAck(); } /** diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java index 2274e81c1f..0f2d114e8a 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java @@ -9,6 +9,7 @@ import com.netflix.conductor.proto.WorkflowPb; import org.apache.commons.lang3.StringUtils; +import javax.annotation.Nullable; import java.util.*; import java.util.stream.Collectors; @@ -17,6 +18,7 @@ public class WorkflowClient extends ClientBase { public WorkflowClient(String address, int port) { super(address, port); + this.stub = WorkflowServiceGrpc.newBlockingStub(this.channel); } /** @@ -105,13 +107,13 @@ public void deleteWorkflow(String workflowId, boolean archiveWorkflow) { * @param version the version of the wokflow definition. Defaults to 1. * @return the list of running workflow instances */ - public List getRunningWorkflow(String workflowName, Optional version) { + public List getRunningWorkflow(String workflowName, @Nullable Integer version) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank"); WorkflowServicePb.GetRunningWorkflowsResponse workflows = stub.getRunningWorkflows( WorkflowServicePb.GetRunningWorkflowsRequest.newBuilder() .setName(workflowName) - .setVersion(version.orElse(1)) + .setVersion(version == null ? 1 : version) .build() ); return workflows.getWorkflowIdsList(); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java index 0777eaba87..6015783c04 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java @@ -10,6 +10,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils; import org.slf4j.Logger; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import java.util.Arrays; public class GRPCHelper { @@ -95,4 +97,20 @@ void emptyResponse(StreamObserver response) { response.onNext(Empty.getDefaultInstance()); response.onCompleted(); } + + String optional(@Nonnull String str) { + return str.isEmpty() ? null : str; + } + + String optionalOr(@Nonnull String str, String defaults) { + return str.isEmpty() ? defaults : str; + } + + Integer optional(@Nonnull Integer i) { + return i == 0 ? null : i; + } + + Integer optionalOr(@Nonnull Integer i, int defaults) { + return i == 0 ? defaults : i; + } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java index 167df56172..7cfd60f80d 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java @@ -55,8 +55,7 @@ public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, Stream @Override public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { - // TODO: req.getVersion optional - WorkflowDef def = service.getWorkflowDef(req.getName(), req.getVersion()); + WorkflowDef def = service.getWorkflowDef(req.getName(), grpcHelper.optional(req.getVersion())); if (def != null) { response.onNext(protoMapper.toProto(def)); response.onCompleted(); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java index 1d34307530..f968af8105 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java @@ -49,7 +49,8 @@ public TaskServiceImpl(ExecutionService taskService, QueueDAO queues, Configurat @Override public void poll(TaskServicePb.PollRequest req, StreamObserver response) { try { - List tasks = taskService.poll(req.getTaskType(), req.getWorkerId(), req.getDomain(), 1, POLL_TIMEOUT_MS); + List tasks = taskService.poll(req.getTaskType(), req.getWorkerId(), + grpcHelper.optional(req.getDomain()), 1, POLL_TIMEOUT_MS); if (!tasks.isEmpty()) { TaskPb.Task t = protoMapper.toProto(tasks.get(0)); response.onNext(t); @@ -62,8 +63,8 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver resp @Override public void batchPoll(TaskServicePb.BatchPollRequest req, StreamObserver response) { - final int count = (req.getCount() == 0) ? 1 : req.getCount(); - final int timeout = (req.getTimeout() == 0) ? POLL_TIMEOUT_MS : req.getTimeout(); + final int count = grpcHelper.optionalOr(req.getCount(), 1); + final int timeout = grpcHelper.optionalOr(req.getTimeout(), POLL_TIMEOUT_MS); if (timeout > MAX_POLL_TIMEOUT_MS) { response.onError(Status.INVALID_ARGUMENT @@ -74,8 +75,11 @@ public void batchPoll(TaskServicePb.BatchPollRequest req, StreamObserver polledTasks = taskService.poll(req.getTaskType(), req.getWorkerId(), req.getDomain(), count, timeout); + List polledTasks = taskService.poll(req.getTaskType(), req.getWorkerId(), + grpcHelper.optional(req.getDomain()), count, timeout); + logger.info("polled tasks: "+polledTasks); polledTasks.stream().map(protoMapper::toProto).forEach(response::onNext); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -83,13 +87,15 @@ public void batchPoll(TaskServicePb.BatchPollRequest req, StreamObserver response) { - final int count = (req.getCount() != 0) ? req.getCount() : MAX_TASK_COUNT; + final String startKey = grpcHelper.optional(req.getStartKey()); + final int count = grpcHelper.optionalOr(req.getCount(), MAX_TASK_COUNT); try { response.onNext( TaskServicePb.TasksInProgressResponse.newBuilder().addAllTasks( - taskService.getTasks(req.getTaskType(), req.getStartKey(), count).stream() - .map(protoMapper::toProto)::iterator + taskService.getTasks(req.getTaskType(), startKey, count) + .stream() + .map(protoMapper::toProto)::iterator ).build() ); response.onCompleted(); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index 659cb7c6ed..2b878578e9 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -55,8 +55,9 @@ private WorkflowServicePb.WorkflowId newWorkflowId(String id) { @Override public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, StreamObserver response) { - StartWorkflowRequest request = protoMapper.fromProto(pbRequest); - WorkflowDef def = metadata.getWorkflowDef(request.getName(), request.getVersion()); + // TODO: better handling of optional 'version' + final StartWorkflowRequest request = protoMapper.fromProto(pbRequest); + WorkflowDef def = metadata.getWorkflowDef(request.getName(), grpcHelper.optional(request.getVersion())); if(def == null){ response.onError(Status.NOT_FOUND .withDescription("No such workflow found by name="+request.getName()) @@ -234,9 +235,9 @@ public void terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest req, St private void doSearch(boolean searchByTask, SearchPb.SearchRequest req, StreamObserver response) { final int start = req.getStart(); - final int size = (req.getSize() != 0) ? req.getSize() : maxSearchSize; + final int size = grpcHelper.optionalOr(req.getSize(), maxSearchSize); final List sort = convertSort(req.getSort()); - final String freeText = req.getFreeText().isEmpty() ? "*" : req.getFreeText(); + final String freeText = grpcHelper.optionalOr(req.getFreeText(), "*"); final String query = req.getQuery(); if (size > maxSearchSize) { diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index e9a32cfac5..2a155f4ac5 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -139,7 +139,7 @@ public void testAll() throws Exception { assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); assertEquals(workflowId, wf.getWorkflowId()); - List runningIds = wc.getRunningWorkflow(def.getName(), Optional.of(def.getVersion())); + List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); assertNotNull(runningIds); assertEquals(1, runningIds.size()); assertEquals(workflowId, runningIds.get(0)); @@ -156,7 +156,7 @@ public void testAll() throws Exception { Boolean acked = tc.ack(task.getTaskId(), "test"); assertNotNull(acked); - assertTrue(acked.booleanValue()); + assertTrue(acked); task.getOutputData().put("key1", "value1"); task.setStatus(Status.COMPLETED); @@ -180,12 +180,12 @@ public void testAll() throws Exception { assertEquals(task.getTaskId(), taskById.getTaskId()); - List getTasks = tc.getPendingTasksByType(t0.getName(), null, Optional.of(1)); + List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); assertNotNull(getTasks); assertEquals(0, getTasks.size()); //getTasks only gives pending tasks - getTasks = tc.getPendingTasksByType(t1.getName(), null, Optional.of(1)); + getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); assertNotNull(getTasks); assertEquals(1, getTasks.size()); From dcfb63c172a0162a60445f0a326613b6137de694 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 19 Jun 2018 17:01:24 +0200 Subject: [PATCH 061/163] grpc-client: Implement search on the WorkflowClient --- .../conductor/common/run/WorkflowSummary.java | 50 ++++++++++++++++++- .../conductor/client/grpc/WorkflowClient.java | 47 +++++++++++++++++ .../conductor/grpc/AbstractProtoMapper.java | 19 +++++++ .../tests/integration/End2EndGrpcTests.java | 4 +- 4 files changed, 117 insertions(+), 3 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java index 9048ff19c2..289666bb9c 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java +++ b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java @@ -32,7 +32,7 @@ * * @author Viren */ -@ProtoMessage(fromProto = false) +@ProtoMessage public class WorkflowSummary { /** @@ -231,4 +231,52 @@ public String getFailedReferenceTaskNames() { public void setFailedReferenceTaskNames(String failedReferenceTaskNames) { this.failedReferenceTaskNames = failedReferenceTaskNames; } + + public void setWorkflowType(String workflowType) { + this.workflowType = workflowType; + } + + public void setVersion(int version) { + this.version = version; + } + + public void setWorkflowId(String workflowId) { + this.workflowId = workflowId; + } + + public void setCorrelationId(String correlationId) { + this.correlationId = correlationId; + } + + public void setStartTime(String startTime) { + this.startTime = startTime; + } + + public void setUpdateTime(String updateTime) { + this.updateTime = updateTime; + } + + public void setEndTime(String endTime) { + this.endTime = endTime; + } + + public void setStatus(WorkflowStatus status) { + this.status = status; + } + + public void setInput(String input) { + this.input = input; + } + + public void setOutput(String output) { + this.output = output; + } + + public void setReasonForIncompletion(String reasonForIncompletion) { + this.reasonForIncompletion = reasonForIncompletion; + } + + public void setExecutionTime(long executionTime) { + this.executionTime = executionTime; + } } diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java index 0f2d114e8a..4e92ac341c 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java @@ -3,12 +3,16 @@ import com.google.common.base.Preconditions; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.grpc.SearchPb; import com.netflix.conductor.grpc.WorkflowServiceGrpc; import com.netflix.conductor.grpc.WorkflowServicePb; import com.netflix.conductor.proto.WorkflowPb; import org.apache.commons.lang3.StringUtils; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.*; import java.util.stream.Collectors; @@ -258,4 +262,47 @@ public void terminateWorkflow(String workflowId, String reason) { .build() ); } + + /** + * Search for workflows based on payload + * + * @param query the search query + * @return the {@link SearchResult} containing the {@link WorkflowSummary} that match the query + */ + public SearchResult search(@Nonnull String query) { + return search(null, null, null, null, query); + } + + /** + * Paginated search for workflows based on payload + * + * @param start start value of page + * @param size number of workflows to be returned + * @param sort sort order + * @param freeText additional free text query + * @param query the search query + * @return the {@link SearchResult} containing the {@link WorkflowSummary} that match the query + */ + public SearchResult search( + @Nullable Integer start, @Nullable Integer size, + @Nullable String sort, @Nullable String freeText, @Nonnull String query) { + Preconditions.checkNotNull(query, "query cannot be null"); + + SearchPb.SearchRequest.Builder request = SearchPb.SearchRequest.newBuilder(); + request.setQuery(query); + if (start != null) + request.setStart(start); + if (size != null) + request.setSize(size); + if (sort != null) + request.setSort(sort); + if (freeText != null) + request.setFreeText(freeText); + + SearchPb.WorkflowSummarySearchResult result = stub.search(request.build()); + return new SearchResult( + result.getTotalHits(), + result.getResultsList().stream().map(protoMapper::fromProto).collect(Collectors.toList()) + ); + } } diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index e5018ff0ea..27136e1d33 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -1117,6 +1117,25 @@ public WorkflowSummaryPb.WorkflowSummary toProto(WorkflowSummary from) { return to.build(); } + public WorkflowSummary fromProto(WorkflowSummaryPb.WorkflowSummary from) { + WorkflowSummary to = new WorkflowSummary(); + to.setWorkflowType( from.getWorkflowType() ); + to.setVersion( from.getVersion() ); + to.setWorkflowId( from.getWorkflowId() ); + to.setCorrelationId( from.getCorrelationId() ); + to.setStartTime( from.getStartTime() ); + to.setUpdateTime( from.getUpdateTime() ); + to.setEndTime( from.getEndTime() ); + to.setStatus( fromProto( from.getStatus() ) ); + to.setInput( from.getInput() ); + to.setOutput( from.getOutput() ); + to.setReasonForIncompletion( from.getReasonForIncompletion() ); + to.setExecutionTime( from.getExecutionTime() ); + to.setEvent( from.getEvent() ); + to.setFailedReferenceTaskNames( from.getFailedReferenceTaskNames() ); + return to; + } + public abstract WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List in); public abstract List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList in); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 2a155f4ac5..3d96177da7 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -34,8 +34,10 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; +import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; +import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.grpc.server.GRPCServer; import com.netflix.conductor.grpc.server.GRPCServerProvider; import com.netflix.conductor.tests.utils.TestEnvironment; @@ -195,7 +197,6 @@ public void testAll() throws Exception { assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); assertEquals(workflowId, pending.getWorkflowInstanceId()); - /* Thread.sleep(1000); SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); assertNotNull(searchResult); @@ -211,6 +212,5 @@ public void testAll() throws Exception { assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(1, wf.getTasks().size()); - */ } } From 224f95838e7164908cd423790be09f81d3772f4d Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 20 Jun 2018 15:31:34 +0200 Subject: [PATCH 062/163] common: Do not use a wrapped Boolean on WorkflowTask It's not clear why a `Boolean` was being used instead of a `boolean`, but the case where the `null` value for `optional` is actually relevant is not used anywhere, and the getter in `isOptional` explicitly discards the null value by doing a null check. --- .../conductor/common/metadata/workflow/WorkflowTask.java | 6 +++--- .../com/netflix/conductor/grpc/AbstractProtoMapper.java | 4 +--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index e3a8d34f6c..dfe269bf0e 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -134,7 +134,7 @@ public void setTasks(List tasks) { private String sink; @ProtoField(id = 18) - private Boolean optional; + private boolean optional = false; /** * @return the name @@ -391,7 +391,7 @@ public void setSink(String sink) { * * @return If the task is optional. When set to true, the workflow execution continues even when the task is in failed status. */ - public Boolean isOptional() { + public boolean isOptional() { return optional; } @@ -399,7 +399,7 @@ public Boolean isOptional() { * * @param optional when set to true, the task is marked as optional */ - public void setOptional(Boolean optional) { + public void setOptional(boolean optional) { this.optional = optional; } diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 27136e1d33..d8a44cec0b 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -846,9 +846,7 @@ public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { if (from.getSink() != null) { to.setSink( from.getSink() ); } - if (from.isOptional() != null) { - to.setOptional( from.isOptional() ); - } + to.setOptional( from.isOptional() ); return to.build(); } From 2ee880f941ca666f7e29a6cee05e5b55b814e91e Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 21 Jun 2018 12:35:08 +0200 Subject: [PATCH 063/163] Move shared code into a shared module. There was overlapping/copied code in the es2 and es5 modules. I have moved them into a shared elasticsearch module to ensure consistency. --- elasticsearch/build.gradle | 10 + .../query/parser/AbstractNode.java | 2 +- .../query/parser/BooleanOp.java | 2 +- .../query/parser/ComparisonOp.java | 2 +- .../query/parser/ConstValue.java | 2 +- .../parser/FunctionThrowingException.java | 2 +- .../query/parser/ListConst.java | 2 +- .../elasticsearch}/query/parser/Name.java | 2 +- .../query/parser/ParserException.java | 2 +- .../elasticsearch}/query/parser/Range.java | 2 +- .../query/parser/AbstractParserTest.java | 2 +- .../query/parser/TestBooleanOp.java | 5 +- .../query/parser/TestComparisonOp.java | 5 +- .../query/parser/TestConstValue.java | 7 +- .../elasticsearch}/query/parser/TestName.java | 4 +- es2-persistence/build.gradle | 12 +- .../dao/es/index/ElasticSearchDAO.java | 9 +- .../dao/es/index/query/parser/ConstValue.java | 135 --- .../dao/es/index/query/parser/Expression.java | 4 + .../index/query/parser/GroupedExpression.java | 3 + .../dao/es/index/query/parser/ListConst.java | 79 -- .../dao/es/index/query/parser/Name.java | 49 -- .../dao/es/index/query/parser/NameValue.java | 8 + .../index/query/parser/ParserException.java | 36 - .../dao/es/index/query/parser/Range.java | 90 -- .../query/parser/AbstractParserTest.java | 34 - .../es/index/query/parser/TestBooleanOp.java | 51 -- .../index/query/parser/TestComparisonOp.java | 50 -- .../es/index/query/parser/TestConstValue.java | 102 --- .../es/index/query/parser/TestExpression.java | 9 +- .../dao/es/index/query/parser/TestName.java | 39 - es5-persistence/build.gradle | 11 +- .../dao/es5/index/ElasticSearchDAOV5.java | 822 +++++++++--------- .../es5/index/query/parser/AbstractNode.java | 187 ---- .../dao/es5/index/query/parser/BooleanOp.java | 64 -- .../es5/index/query/parser/ComparisonOp.java | 78 -- .../es5/index/query/parser/Expression.java | 10 +- .../parser/FunctionThrowingException.java | 30 - .../index/query/parser/GroupedExpression.java | 88 +- .../dao/es5/index/query/parser/NameValue.java | 189 ++-- .../index/query/parser/TestExpression.java | 9 +- settings.gradle | 4 +- 42 files changed, 641 insertions(+), 1612 deletions(-) create mode 100644 elasticsearch/build.gradle rename {es2-persistence/src/main/java/com/netflix/conductor/dao/es/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/AbstractNode.java (98%) rename {es2-persistence/src/main/java/com/netflix/conductor/dao/es/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/BooleanOp.java (96%) rename {es2-persistence/src/main/java/com/netflix/conductor/dao/es/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/ComparisonOp.java (97%) rename {es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/ConstValue.java (98%) rename {es2-persistence/src/main/java/com/netflix/conductor/dao/es/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/FunctionThrowingException.java (92%) rename {es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/ListConst.java (96%) rename {es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/Name.java (94%) rename {es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/ParserException.java (93%) rename {es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/main/java/com/netflix/conductor/elasticsearch}/query/parser/Range.java (96%) rename {es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/test/java/com/netflix/conductor/elasticsearch}/query/parser/AbstractParserTest.java (94%) rename {es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/test/java/com/netflix/conductor/elasticsearch}/query/parser/TestBooleanOp.java (87%) rename {es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/test/java/com/netflix/conductor/elasticsearch}/query/parser/TestComparisonOp.java (87%) rename {es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/test/java/com/netflix/conductor/elasticsearch}/query/parser/TestConstValue.java (90%) rename {es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index => elasticsearch/src/test/java/com/netflix/conductor/elasticsearch}/query/parser/TestName.java (89%) delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ConstValue.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ListConst.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Name.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ParserException.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Range.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/AbstractParserTest.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestBooleanOp.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestComparisonOp.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestConstValue.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestName.java delete mode 100644 es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractNode.java delete mode 100644 es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/BooleanOp.java delete mode 100644 es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ComparisonOp.java delete mode 100644 es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FunctionThrowingException.java diff --git a/elasticsearch/build.gradle b/elasticsearch/build.gradle new file mode 100644 index 0000000000..29a8e087ce --- /dev/null +++ b/elasticsearch/build.gradle @@ -0,0 +1,10 @@ +dependencies { + compile project(':conductor-core') + + compile "commons-io:commons-io:${revCommonsIo}" + compile "com.google.inject:guice:${revGuice}" + + //ES5 Dependency + compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" + compile "org.apache.logging.log4j:log4j-core:${revLog4jCore}" +} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/AbstractNode.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java similarity index 98% rename from es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/AbstractNode.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java index 4588a2ee24..1ca29e9587 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/AbstractNode.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; import java.math.BigDecimal; diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/BooleanOp.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java similarity index 96% rename from es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/BooleanOp.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java index 99c66e08e3..f8f2f0862f 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/BooleanOp.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ComparisonOp.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java similarity index 97% rename from es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ComparisonOp.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java index 12ecf80f27..e1eebed806 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ComparisonOp.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ConstValue.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java similarity index 98% rename from es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ConstValue.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java index 3612714890..9e081e0518 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ConstValue.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FunctionThrowingException.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java similarity index 92% rename from es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FunctionThrowingException.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java index b32b7f0733..82ec52472d 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FunctionThrowingException.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; /** * @author Viren diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ListConst.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java similarity index 96% rename from es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ListConst.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java index 22bbbb23a5..29f0443fde 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ListConst.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; import java.util.LinkedList; diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Name.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java similarity index 94% rename from es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Name.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java index b7b9fb9a18..7831a57a80 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Name.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ParserException.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java similarity index 93% rename from es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ParserException.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java index 445e3090fd..02f226a907 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ParserException.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; /** * @author Viren diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Range.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java similarity index 96% rename from es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Range.java rename to elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java index 25e585a50f..896db71296 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Range.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.InputStream; diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractParserTest.java b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java similarity index 94% rename from es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractParserTest.java rename to elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java index 0a48142f7d..cd4c318a80 100644 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractParserTest.java +++ b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java @@ -16,7 +16,7 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestBooleanOp.java b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java similarity index 87% rename from es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestBooleanOp.java rename to elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java index 428fdd2c67..ba37f189d7 100644 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestBooleanOp.java +++ b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java @@ -16,7 +16,10 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; + +import com.netflix.conductor.elasticsearch.query.parser.BooleanOp; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; import org.junit.Test; diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestComparisonOp.java b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java similarity index 87% rename from es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestComparisonOp.java rename to elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java index 633d3fb02a..88dce7583f 100644 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestComparisonOp.java +++ b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java @@ -16,7 +16,10 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; + +import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; import org.junit.Test; diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestConstValue.java b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java similarity index 90% rename from es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestConstValue.java rename to elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java index bae8477014..bd6b6d5d3a 100644 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestConstValue.java +++ b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java @@ -16,7 +16,12 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; + +import com.netflix.conductor.elasticsearch.query.parser.ConstValue; +import com.netflix.conductor.elasticsearch.query.parser.ListConst; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; +import com.netflix.conductor.elasticsearch.query.parser.Range; import org.junit.Test; diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestName.java b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java similarity index 89% rename from es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestName.java rename to elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java index 7bd4c9ee8b..10d570a585 100644 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestName.java +++ b/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java @@ -16,7 +16,9 @@ /** * */ -package com.netflix.conductor.dao.es5.index.query.parser; +package com.netflix.conductor.elasticsearch.query.parser; + +import com.netflix.conductor.elasticsearch.query.parser.Name; import org.junit.Test; diff --git a/es2-persistence/build.gradle b/es2-persistence/build.gradle index 2bfcd96526..23b8f117cd 100644 --- a/es2-persistence/build.gradle +++ b/es2-persistence/build.gradle @@ -9,16 +9,12 @@ configurations { } dependencies { - compile project(':conductor-core') - - //ES5 Dependency - compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" - compile "org.apache.logging.log4j:log4j-core:${revLog4jCore}" - - compileOnly "commons-io:commons-io:${revCommonsIo}" + compile project(':conductor-elasticsearch') compileOnly "org.elasticsearch:elasticsearch:${revElasticSearch2}" compileOnly "com.github.rholder:guava-retrying:${revGuavaRetrying}" + + testCompile project(':conductor-elasticsearch').sourceSets.test.output } // Drop the classifier and delete jar task actions to replace the regular jar artifact with the shadow artifact @@ -38,4 +34,4 @@ jar.dependsOn shadowJar configureRelocationShadowJar { prefix = 'conductor' -} \ No newline at end of file +} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java index 512cfcb1a8..cf73506aba 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java +++ b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java @@ -19,7 +19,6 @@ package com.netflix.conductor.dao.es.index; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Predicate; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.Task; @@ -34,9 +33,10 @@ import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.es.index.query.parser.Expression; -import com.netflix.conductor.dao.es.index.query.parser.ParserException; import com.netflix.conductor.dao.es.utils.RetryUtil; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; import com.netflix.conductor.metrics.Monitors; + import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; @@ -67,8 +67,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; -import javax.inject.Singleton; import java.io.InputStream; import java.text.SimpleDateFormat; import java.time.LocalDate; @@ -88,6 +86,9 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import javax.inject.Inject; +import javax.inject.Singleton; + /** * @author Viren * diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ConstValue.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ConstValue.java deleted file mode 100644 index 5ba72cccc8..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ConstValue.java +++ /dev/null @@ -1,135 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import java.io.InputStream; - - -/** - * @author Viren - * Constant value can be: - *
    - *
  1. List of values (a,b,c) - *
  2. Range of values (m AND n) - *
  3. A value (x) - *
  4. A value is either a string or a number - *
- * - */ -public class ConstValue extends AbstractNode { - - public static enum SystemConsts { - NULL("null"), NOT_NULL("not null"); - private String value; - SystemConsts(String value){ - this.value = value; - } - - public String value(){ - return value; - } - } - - private Object value; - - private SystemConsts sysConsts; - - public ConstValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(4); - String sp = new String(peeked).trim(); - //Read a constant value (number or a string) - if(peeked[0] == '"' || peeked[0] == '\''){ - this.value = readString(is); - } else if(sp.toLowerCase().startsWith("not")){ - this.value = SystemConsts.NOT_NULL.value(); - sysConsts = SystemConsts.NOT_NULL; - read(SystemConsts.NOT_NULL.value().length()); - } else if(sp.equalsIgnoreCase(SystemConsts.NULL.value())){ - this.value = SystemConsts.NULL.value(); - sysConsts = SystemConsts.NULL; - read(SystemConsts.NULL.value().length()); - } else{ - this.value = readNumber(is); - } - } - - private String readNumber(InputStream is) throws Exception { - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - is.mark(1); - char c = (char) is.read(); - if(!isNumeric(c)){ - is.reset(); - break; - }else{ - sb.append(c); - } - } - String numValue = sb.toString().trim(); - return numValue; - } - /** - * Reads an escaped string - * @throws Exception - */ - private String readString(InputStream is) throws Exception { - char delim = (char)read(1)[0]; - StringBuilder sb = new StringBuilder(); - boolean valid = false; - while(is.available() > 0){ - char c = (char) is.read(); - if(c == delim){ - valid = true; - break; - } else if(c == '\\'){ - // read the next character as part of the value - c = (char) is.read(); - sb.append(c); - } else{ - sb.append(c); - } - } - if(!valid){ - throw new ParserException("String constant is not quoted with <" + delim + "> : " + sb.toString()); - } - return "\"" + sb.toString() + "\""; - } - - public Object getValue(){ - return value; - } - - @Override - public String toString(){ - return ""+value; - } - - public boolean isSysConstant(){ - return this.sysConsts != null; - } - - public SystemConsts getSysConstant(){ - return this.sysConsts; - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java index 1229cdfb38..63bae25b10 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java +++ b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java @@ -18,6 +18,10 @@ */ package com.netflix.conductor.dao.es.index.query.parser; +import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; +import com.netflix.conductor.elasticsearch.query.parser.BooleanOp; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; + import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java index 31bb3296db..e5dfa94357 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java +++ b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java @@ -18,6 +18,9 @@ */ package com.netflix.conductor.dao.es.index.query.parser; +import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; + import org.elasticsearch.index.query.QueryBuilder; import java.io.InputStream; diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ListConst.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ListConst.java deleted file mode 100644 index 964c69652a..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ListConst.java +++ /dev/null @@ -1,79 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import java.io.InputStream; -import java.util.LinkedList; -import java.util.List; - - -/** - * @author Viren - * List of constants - * - */ -public class ListConst extends AbstractNode { - - private List values; - - public ListConst(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - this.values = readList(); - } - - private List readList() throws Exception { - List list = new LinkedList(); - boolean valid = false; - char c; - - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - c = (char) is.read(); - if(c == ')'){ - valid = true; - break; - }else if(c == ','){ - list.add(sb.toString().trim()); - sb = new StringBuilder(); - }else{ - sb.append(c); - } - } - list.add(sb.toString().trim()); - if(!valid){ - throw new ParserException("Expected ')' but never encountered in the stream"); - } - return list; - } - - public List getList(){ - return (List) values; - } - - @Override - public String toString(){ - return values.toString(); - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Name.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Name.java deleted file mode 100644 index 1ab0f23d61..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Name.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import java.io.InputStream; - -/** - * @author Viren - * Represents the name of the field to be searched against. - */ -public class Name extends AbstractNode { - - private String value; - - public Name(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.value = readToken(); - } - - @Override - public String toString(){ - return value; - } - - public String getName(){ - return value; - } - -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java index cc15d01811..5e5edc5199 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java +++ b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java @@ -18,6 +18,14 @@ */ package com.netflix.conductor.dao.es.index.query.parser; +import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; +import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp; +import com.netflix.conductor.elasticsearch.query.parser.ConstValue; +import com.netflix.conductor.elasticsearch.query.parser.ListConst; +import com.netflix.conductor.elasticsearch.query.parser.Name; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; +import com.netflix.conductor.elasticsearch.query.parser.Range; + import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ParserException.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ParserException.java deleted file mode 100644 index bbf1f63f0e..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/ParserException.java +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -/** - * @author Viren - * - */ -@SuppressWarnings("serial") -public class ParserException extends Exception { - - public ParserException(String message) { - super(message); - } - - public ParserException(String message, Throwable cause) { - super(message, cause); - } - -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Range.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Range.java deleted file mode 100644 index 78005be516..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Range.java +++ /dev/null @@ -1,90 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import java.io.InputStream; - - -/** - * @author Viren - * - */ -public class Range extends AbstractNode { - - private String low; - - private String high; - - public Range(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.low = readNumber(is); - - skipWhitespace(); - byte[] peeked = read(3); - assertExpected(peeked, "AND"); - skipWhitespace(); - - String num = readNumber(is); - if(num == null || "".equals(num)){ - throw new ParserException("Missing the upper range value..."); - } - this.high = num; - - } - - private String readNumber(InputStream is) throws Exception { - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - is.mark(1); - char c = (char) is.read(); - if(!isNumeric(c)){ - is.reset(); - break; - }else{ - sb.append(c); - } - } - String numValue = sb.toString().trim(); - return numValue; - } - - - /** - * @return the low - */ - public String getLow() { - return low; - } - - /** - * @return the high - */ - public String getHigh() { - return high; - } - - @Override - public String toString(){ - return low + " AND " + high; - } -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/AbstractParserTest.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/AbstractParserTest.java deleted file mode 100644 index 8a5412dc97..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/AbstractParserTest.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -/** - * @author Viren - * - */ -public abstract class AbstractParserTest { - - protected InputStream getInputStream(String expression) { - return new BufferedInputStream(new ByteArrayInputStream(expression.getBytes())); - } - -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestBooleanOp.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestBooleanOp.java deleted file mode 100644 index 2f235dbba9..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestBooleanOp.java +++ /dev/null @@ -1,51 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * @author Viren - * - */ -public class TestBooleanOp extends AbstractParserTest { - - @Test - public void test() throws Exception { - String[] tests = new String[]{"AND", "OR"}; - for(String test : tests){ - BooleanOp name = new BooleanOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } - } - - @Test(expected=ParserException.class) - public void testInvalid() throws Exception { - String test = "<"; - BooleanOp name = new BooleanOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - - } -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestComparisonOp.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestComparisonOp.java deleted file mode 100644 index c9feeb6d75..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestComparisonOp.java +++ /dev/null @@ -1,50 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * @author Viren - * - */ -public class TestComparisonOp extends AbstractParserTest { - - @Test - public void test() throws Exception { - String[] tests = new String[]{"<",">","=","!=","IN"}; - for(String test : tests){ - ComparisonOp name = new ComparisonOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } - } - - @Test(expected=ParserException.class) - public void testInvalidOp() throws Exception { - String test = "AND"; - ComparisonOp name = new ComparisonOp(getInputStream(test)); - String nameVal = name.getOperator(); - assertNotNull(nameVal); - assertEquals(test, nameVal); - } -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestConstValue.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestConstValue.java deleted file mode 100644 index 2a6676e3ba..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestConstValue.java +++ /dev/null @@ -1,102 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import org.junit.Test; - -import java.util.List; - -import static org.junit.Assert.*; - -/** - * @author Viren - * - */ -public class TestConstValue extends AbstractParserTest { - - @Test - public void testStringConst() throws Exception { - String test = "'string value'"; - String expected = test.replaceAll("'", "\""); //Quotes are removed but then the result is double quoted. - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertEquals(expected, cv.getValue()); - assertTrue(cv.getValue() instanceof String); - - test = "\"string value\""; - cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertEquals(expected, cv.getValue()); - assertTrue(cv.getValue() instanceof String); - } - - @Test - public void testSystemConst() throws Exception { - String test = "null"; - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertTrue(cv.getValue() instanceof String); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NULL); - test = "null"; - - test = "not null"; - cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); - } - - @Test(expected=ParserException.class) - public void testInvalid() throws Exception { - String test = "'string value"; - new ConstValue(getInputStream(test)); - } - - - @Test - public void testNumConst() throws Exception { - String test = "12345.89"; - ConstValue cv = new ConstValue(getInputStream(test)); - assertNotNull(cv.getValue()); - assertTrue(cv.getValue() instanceof String); //Numeric values are stored as string as we are just passing thru them to ES - assertEquals(test, cv.getValue()); - } - - @Test - public void testRange() throws Exception { - String test = "50 AND 100"; - Range range = new Range(getInputStream(test)); - assertEquals("50", range.getLow()); - assertEquals("100", range.getHigh()); - } - - @Test(expected=ParserException.class) - public void testBadRange() throws Exception { - String test = "50 AND"; - new Range(getInputStream(test)); - } - - @Test - public void testArray() throws Exception { - String test = "(1, 3, 'name', 'value2')"; - ListConst lc = new ListConst(getInputStream(test)); - List list = lc.getList(); - assertEquals(4, list.size()); - assertTrue(list.contains("1")); - assertEquals("'value2'", list.get(3)); //Values are preserved as it is... - } -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java index 25cc6b6ab1..5c8c1d568e 100644 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java +++ b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java @@ -17,13 +17,20 @@ * */ +import com.netflix.conductor.elasticsearch.query.parser.AbstractParserTest; +import com.netflix.conductor.elasticsearch.query.parser.ConstValue; + import org.junit.Test; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.InputStream; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; /** * @author Viren diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestName.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestName.java deleted file mode 100644 index af61f92d25..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestName.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import org.junit.Test; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; - -/** - * @author Viren - * - */ -public class TestName extends AbstractParserTest { - - @Test - public void test() throws Exception{ - String test = "metadata.en_US.lang "; - Name name = new Name(getInputStream(test)); - String nameVal = name.getName(); - assertNotNull(nameVal); - assertEquals(test.trim(), nameVal); - } -} diff --git a/es5-persistence/build.gradle b/es5-persistence/build.gradle index 05e0cd6935..93e6f03474 100644 --- a/es5-persistence/build.gradle +++ b/es5-persistence/build.gradle @@ -1,13 +1,8 @@ dependencies { - - compile project(':conductor-core') - compile "com.google.inject:guice:${revGuice}" - - //ES5 Dependency - compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" - compile "org.apache.logging.log4j:log4j-core:${revLog4jCore}" + compile project(':conductor-elasticsearch') compile "org.elasticsearch:elasticsearch:${revElasticSearch5}" compile "org.elasticsearch.client:transport:${revElasticSearch5}" - compile "commons-io:commons-io:${revCommonsIo}" + + testCompile project(':conductor-elasticsearch').sourceSets.test.output } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java index fff4e82335..bb2306a95a 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java @@ -1,20 +1,17 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.dao.es5.index; @@ -27,15 +24,16 @@ import com.netflix.conductor.common.run.TaskSummary; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.common.utils.RetryUtil; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.ApplicationException.Code; -import com.netflix.conductor.common.utils.RetryUtil; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.es5.index.query.parser.Expression; -import com.netflix.conductor.dao.es5.index.query.parser.ParserException; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; import com.netflix.conductor.metrics.Monitors; + import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; import org.elasticsearch.ResourceAlreadyExistsException; @@ -67,8 +65,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; -import javax.inject.Singleton; import java.io.InputStream; import java.text.SimpleDateFormat; import java.time.LocalDate; @@ -88,6 +84,9 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import javax.inject.Inject; +import javax.inject.Singleton; + /** * @author Viren */ @@ -95,405 +94,406 @@ @Singleton public class ElasticSearchDAOV5 implements IndexDAO { - private static Logger logger = LoggerFactory.getLogger(ElasticSearchDAOV5.class); - - private static final String WORKFLOW_DOC_TYPE = "workflow"; - - private static final String TASK_DOC_TYPE = "task"; - - private static final String LOG_DOC_TYPE = "task"; - - private static final String EVENT_DOC_TYPE = "event"; - - private static final String MSG_DOC_TYPE = "message"; - - private static final String className = ElasticSearchDAOV5.class.getSimpleName(); - - private static final int RETRY_COUNT = 3; - - private String indexName; - - private String logIndexName; - - private String logIndexPrefix; - - private ObjectMapper objectMapper; - - private Client elasticSearchClient; - - - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - + private static Logger logger = LoggerFactory.getLogger(ElasticSearchDAOV5.class); + + private static final String WORKFLOW_DOC_TYPE = "workflow"; + + private static final String TASK_DOC_TYPE = "task"; + + private static final String LOG_DOC_TYPE = "task"; + + private static final String EVENT_DOC_TYPE = "event"; + + private static final String MSG_DOC_TYPE = "message"; + + private static final String className = ElasticSearchDAOV5.class.getSimpleName(); + + private static final int RETRY_COUNT = 3; + + private String indexName; + + private String logIndexName; + + private String logIndexPrefix; + + private ObjectMapper objectMapper; + + private Client elasticSearchClient; + + + private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); + private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMww"); - private final ExecutorService executorService; - + private final ExecutorService executorService; + static { - SIMPLE_DATE_FORMAT.setTimeZone(GMT); + SIMPLE_DATE_FORMAT.setTimeZone(GMT); + } + + @Inject + public ElasticSearchDAOV5(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { + this.objectMapper = objectMapper; + this.elasticSearchClient = elasticSearchClient; + this.indexName = config.getProperty("workflow.elasticsearch.index.name", null); + + try { + + initIndex(); + updateIndexName(config); + Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> updateIndexName(config), 0, 1, TimeUnit.HOURS); + + } catch (Exception e) { + logger.error(e.getMessage(), e); + } + + int corePoolSize = 6; + int maximumPoolSize = 12; + long keepAliveTime = 1L; + this.executorService = new ThreadPoolExecutor(corePoolSize, + maximumPoolSize, + keepAliveTime, + TimeUnit.MINUTES, + new LinkedBlockingQueue<>()); + } + + private void updateIndexName(Configuration config) { + this.logIndexPrefix = config.getProperty("workflow.elasticsearch.tasklog.index.name", "task_log"); + this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); + + try { + elasticSearchClient.admin().indices().prepareGetIndex().addIndices(logIndexName).execute().actionGet(); + } catch (IndexNotFoundException infe) { + try { + elasticSearchClient.admin().indices().prepareCreate(logIndexName).execute().actionGet(); + } catch (ResourceAlreadyExistsException ilee) { + + } catch (Exception e) { + logger.error("Failed to update log index name: {}", logIndexName, e); + } + } + } + + /** + * Initializes the index with required templates and mappings. + */ + private void initIndex() throws Exception { + + //0. Add the index template + GetIndexTemplatesResponse result = elasticSearchClient.admin().indices().prepareGetTemplates("wfe_template").execute().actionGet(); + if (result.getIndexTemplates().isEmpty()) { + logger.info("Creating the index template 'wfe_template'"); + InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/template.json"); + byte[] templateSource = IOUtils.toByteArray(stream); + + try { + elasticSearchClient.admin().indices().preparePutTemplate("wfe_template").setSource(templateSource, XContentType.JSON).execute().actionGet(); + } catch (Exception e) { + logger.error("Failed to init index template", e); + } + } + + //1. Create the required index + try { + elasticSearchClient.admin().indices().prepareGetIndex().addIndices(indexName).execute().actionGet(); + } catch (IndexNotFoundException infe) { + try { + elasticSearchClient.admin().indices().prepareCreate(indexName).execute().actionGet(); + } catch (ResourceAlreadyExistsException done) { + } + } + + //2. Mapping for the workflow document type + GetMappingsResponse response = elasticSearchClient.admin().indices().prepareGetMappings(indexName).addTypes(WORKFLOW_DOC_TYPE).execute().actionGet(); + if (response.mappings().isEmpty()) { + logger.info("Adding the workflow type mappings"); + InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/wfe_type.json"); + byte[] bytes = IOUtils.toByteArray(stream); + String source = new String(bytes); + try { + elasticSearchClient.admin().indices().preparePutMapping(indexName).setType(WORKFLOW_DOC_TYPE).setSource(source).execute().actionGet(); + } catch (Exception e) { + logger.error("Failed to init index mappings", e); + } + } + } + + @Override + public void indexWorkflow(Workflow workflow) { + try { + + String id = workflow.getWorkflowId(); + WorkflowSummary summary = new WorkflowSummary(workflow); + byte[] doc = objectMapper.writeValueAsBytes(summary); + + UpdateRequest req = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, id); + req.doc(doc, XContentType.JSON); + req.upsert(doc, XContentType.JSON); + req.retryOnConflict(5); + updateWithRetry(req, "Index workflow into doc_type workflow"); + + } catch (Throwable e) { + logger.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); + } + } + + @Override + public CompletableFuture asyncIndexWorkflow(Workflow workflow) { + return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); + } + + @Override + public void indexTask(Task task) { + try { + + String id = task.getTaskId(); + TaskSummary summary = new TaskSummary(task); + byte[] doc = objectMapper.writeValueAsBytes(summary); + + UpdateRequest req = new UpdateRequest(indexName, TASK_DOC_TYPE, id); + req.doc(doc, XContentType.JSON); + req.upsert(doc, XContentType.JSON); + updateWithRetry(req, "Index workflow into doc_type workflow"); + + } catch (Throwable e) { + logger.error("Failed to index task: {}", task.getTaskId(), e); + } + } + + @Override + public CompletableFuture asyncIndexTask(Task task) { + return CompletableFuture.runAsync(() -> indexTask(task), executorService); + } + + + @Override + public void addTaskExecutionLogs(List taskExecLogs) { + if (taskExecLogs.isEmpty()) { + return; + } + try { + BulkRequestBuilder bulkRequestBuilder = elasticSearchClient.prepareBulk(); + for (TaskExecLog log : taskExecLogs) { + IndexRequest request = new IndexRequest(logIndexName, LOG_DOC_TYPE); + request.source(objectMapper.writeValueAsBytes(log), XContentType.JSON); + bulkRequestBuilder.add(request); + } + new RetryUtil().retryOnException(() -> bulkRequestBuilder.execute().actionGet(), null, + BulkResponse::hasFailures, RETRY_COUNT, "Indexing all execution logs into doc_type task", "addTaskExecutionLogs"); + } catch (Throwable e) { + List taskIds = taskExecLogs.stream().map(TaskExecLog::getTaskId).collect(Collectors.toList()); + logger.error("Failed to index task execution logs for tasks: ", taskIds, e); + } + } + + @Override + public CompletableFuture asyncAddTaskExecutionLogs(List logs) { + return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), executorService); + } + + @Override + public List getTaskExecutionLogs(String taskId) { + try { + Expression expression = Expression.fromString("taskId='" + taskId + "'"); + QueryBuilder queryBuilder = expression.getFilterBuilder(); + + BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); + QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); + BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); + + final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName).setQuery(fq).setTypes(TASK_DOC_TYPE); + SearchResponse response = srb.execute().actionGet(); + SearchHit[] hits = response.getHits().getHits(); + List logs = new ArrayList<>(hits.length); + for (SearchHit hit : hits) { + String source = hit.getSourceAsString(); + TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); + logs.add(tel); + } + + return logs; + + } catch (Exception e) { + logger.error("Failed to get task execution logs for task: {}", taskId, e); + } + + return null; + } + + @Override + public void addMessage(String queue, Message message) { + Map doc = new HashMap<>(); + doc.put("messageId", message.getId()); + doc.put("payload", message.getPayload()); + doc.put("queue", queue); + doc.put("created", System.currentTimeMillis()); + IndexRequest request = new IndexRequest(logIndexName, MSG_DOC_TYPE); + request.source(doc); + try { + new RetryUtil<>().retryOnException(() -> elasticSearchClient.index(request).actionGet(), null, + null, RETRY_COUNT, "Indexing document in for docType: message", "addMessage"); + } catch (Throwable e) { + logger.error("Failed to index message: {}", message.getId(), e); + } + } + + @Override + public void addEventExecution(EventExecution eventExecution) { + try { + byte[] doc = objectMapper.writeValueAsBytes(eventExecution); + String id = eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution.getMessageId() + "." + eventExecution.getId(); + UpdateRequest req = new UpdateRequest(logIndexName, EVENT_DOC_TYPE, id); + req.doc(doc, XContentType.JSON); + req.upsert(doc, XContentType.JSON); + req.retryOnConflict(5); + updateWithRetry(req, "Update Event execution for doc_type event"); + } catch (Throwable e) { + logger.error("Failed to index event execution: {}", eventExecution.getId(), e); + } + } + + @Override + public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { + return CompletableFuture.runAsync(() -> addEventExecution(eventExecution), executorService); + } + + private void updateWithRetry(UpdateRequest request, String operationDescription) { + try { + new RetryUtil().retryOnException(() -> elasticSearchClient.update(request).actionGet(), null, + null, RETRY_COUNT, operationDescription, "updateWithRetry"); + } catch (Exception e) { + Monitors.error(className, "index"); + logger.error("Failed to index {} for request type: {}", request.index(), request.type(), e); + } + } + + @Override + public SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort) { + try { + + return search(query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); + + } catch (ParserException e) { + throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); + } + } + + @Override + public SearchResult searchTasks(String query, String freeText, int start, int count, List sort) { + try { + return search(query, start, count, sort, freeText, TASK_DOC_TYPE); + } catch (ParserException e) { + throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); + } + } + + @Override + public void removeWorkflow(String workflowId) { + try { + DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); + DeleteResponse response = elasticSearchClient.delete(request).actionGet(); + if (response.getResult() == DocWriteResponse.Result.DELETED) { + logger.error("Index removal failed - document not found by id: {}", workflowId); + } + } catch (Throwable e) { + logger.error("Failed to remove workflow {} from index", workflowId, e); + Monitors.error(className, "remove"); + } + } + + @Override + public CompletableFuture asyncRemoveWorkflow(String workflowId) { + return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); + } + + @Override + public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { + if (keys.length != values.length) { + throw new IllegalArgumentException("Number of keys and values should be same."); + } + + UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); + Map source = IntStream.range(0, keys.length).boxed() + .collect(Collectors.toMap(i -> keys[i], i -> values[i])); + request.doc(source); + logger.debug("Updating workflow {} with {}", workflowInstanceId, source); + new RetryUtil<>().retryOnException(() -> elasticSearchClient.update(request), null, null, RETRY_COUNT, + "Updating index for doc_type workflow", "updateWorkflow"); + } + + @Override + public CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { + return CompletableFuture.runAsync(() -> updateWorkflow(workflowInstanceId, keys, values), executorService); + } + + @Override + public String get(String workflowInstanceId, String fieldToGet) { + Object value = null; + GetRequest request = new GetRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId).storedFields(fieldToGet); + GetResponse response = elasticSearchClient.get(request).actionGet(); + Map fields = response.getFields(); + if (fields == null) { + return null; + } + GetField field = fields.get(fieldToGet); + if (field != null) value = field.getValue(); + if (value != null) { + return value.toString(); + } + return null; + } + + private SearchResult search(String structuredQuery, int start, int size, List sortOptions, String freeTextQuery, String docType) throws ParserException { + QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); + if (StringUtils.isNotEmpty(structuredQuery)) { + Expression expression = Expression.fromString(structuredQuery); + queryBuilder = expression.getFilterBuilder(); + } + + BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); + QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); + BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); + final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName).setQuery(fq).setTypes(docType).storedFields("_id").setFrom(start).setSize(size); + if (sortOptions != null) { + sortOptions.forEach(sortOption -> { + SortOrder order = SortOrder.ASC; + String field = sortOption; + int indx = sortOption.indexOf(':'); + if (indx > 0) { //Can't be 0, need the field name at-least + field = sortOption.substring(0, indx); + order = SortOrder.valueOf(sortOption.substring(indx + 1)); + } + srb.addSort(field, order); + }); + } + List result = new LinkedList(); + SearchResponse response = srb.get(); + response.getHits().forEach(hit -> { + result.add(hit.getId()); + }); + long count = response.getHits().getTotalHits(); + return new SearchResult(count, result); + } + + @Override + public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { + QueryBuilder q = QueryBuilders.boolQuery() + .must(QueryBuilders.rangeQuery("endTime").lt(LocalDate.now().minusDays(archiveTtlDays))) + .should(QueryBuilders.termQuery("status", "COMPLETED")) + .should(QueryBuilders.termQuery("status", "FAILED")) + .mustNot(QueryBuilders.existsQuery("archived")) + .minimumShouldMatch(1); + SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) + .setTypes("workflow") + .setQuery(q) + .setSize(1000); + + SearchResponse response = s.execute().actionGet(); + SearchHits hits = response.getHits(); + List ids = new LinkedList<>(); + for (SearchHit hit : hits.getHits()) { + ids.add(hit.getId()); + } + return ids; } - - @Inject - public ElasticSearchDAOV5(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - this.elasticSearchClient = elasticSearchClient; - this.indexName = config.getProperty("workflow.elasticsearch.index.name", null); - - try { - - initIndex(); - updateIndexName(config); - Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> updateIndexName(config), 0, 1, TimeUnit.HOURS); - - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - - int corePoolSize = 6; - int maximumPoolSize = 12; - long keepAliveTime = 1L; - this.executorService = new ThreadPoolExecutor(corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>()); - } - - private void updateIndexName(Configuration config) { - this.logIndexPrefix = config.getProperty("workflow.elasticsearch.tasklog.index.name", "task_log"); - this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - try { - elasticSearchClient.admin().indices().prepareGetIndex().addIndices(logIndexName).execute().actionGet(); - } catch (IndexNotFoundException infe) { - try { - elasticSearchClient.admin().indices().prepareCreate(logIndexName).execute().actionGet(); - } catch (ResourceAlreadyExistsException ilee) { - - } catch (Exception e) { - logger.error("Failed to update log index name: {}", logIndexName, e); - } - } - } - - /** - * Initializes the index with required templates and mappings. - */ - private void initIndex() throws Exception { - - //0. Add the index template - GetIndexTemplatesResponse result = elasticSearchClient.admin().indices().prepareGetTemplates("wfe_template").execute().actionGet(); - if(result.getIndexTemplates().isEmpty()) { - logger.info("Creating the index template 'wfe_template'"); - InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/template.json"); - byte[] templateSource = IOUtils.toByteArray(stream); - - try { - elasticSearchClient.admin().indices().preparePutTemplate("wfe_template").setSource(templateSource, XContentType.JSON).execute().actionGet(); - }catch(Exception e) { - logger.error("Failed to init index template", e); - } - } - - //1. Create the required index - try { - elasticSearchClient.admin().indices().prepareGetIndex().addIndices(indexName).execute().actionGet(); - }catch(IndexNotFoundException infe) { - try { - elasticSearchClient.admin().indices().prepareCreate(indexName).execute().actionGet(); - }catch(ResourceAlreadyExistsException done) {} - } - - //2. Mapping for the workflow document type - GetMappingsResponse response = elasticSearchClient.admin().indices().prepareGetMappings(indexName).addTypes(WORKFLOW_DOC_TYPE).execute().actionGet(); - if(response.mappings().isEmpty()) { - logger.info("Adding the workflow type mappings"); - InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/wfe_type.json"); - byte[] bytes = IOUtils.toByteArray(stream); - String source = new String(bytes); - try { - elasticSearchClient.admin().indices().preparePutMapping(indexName).setType(WORKFLOW_DOC_TYPE).setSource(source).execute().actionGet(); - }catch(Exception e) { - logger.error("Failed to init index mappings", e); - } - } - } - - @Override - public void indexWorkflow(Workflow workflow) { - try { - - String id = workflow.getWorkflowId(); - WorkflowSummary summary = new WorkflowSummary(workflow); - byte[] doc = objectMapper.writeValueAsBytes(summary); - - UpdateRequest req = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - req.retryOnConflict(5); - updateWithRetry(req, "Index workflow into doc_type workflow"); - - } catch (Throwable e) { - logger.error("Failed to index workflow: {}", workflow.getWorkflowId(), e); - } - } - - @Override - public CompletableFuture asyncIndexWorkflow(Workflow workflow) { - return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); - } - - @Override - public void indexTask(Task task) { - try { - - String id = task.getTaskId(); - TaskSummary summary = new TaskSummary(task); - byte[] doc = objectMapper.writeValueAsBytes(summary); - - UpdateRequest req = new UpdateRequest(indexName, TASK_DOC_TYPE, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - updateWithRetry(req, "Index workflow into doc_type workflow"); - - } catch (Throwable e) { - logger.error("Failed to index task: {}", task.getTaskId(), e); - } - } - - @Override - public CompletableFuture asyncIndexTask(Task task) { - return CompletableFuture.runAsync(() -> indexTask(task), executorService); - } - - - @Override - public void addTaskExecutionLogs(List taskExecLogs) { - if (taskExecLogs.isEmpty()) { - return; - } - try { - BulkRequestBuilder bulkRequestBuilder = elasticSearchClient.prepareBulk(); - for (TaskExecLog log : taskExecLogs) { - IndexRequest request = new IndexRequest(logIndexName, LOG_DOC_TYPE); - request.source(objectMapper.writeValueAsBytes(log), XContentType.JSON); - bulkRequestBuilder.add(request); - } - new RetryUtil().retryOnException(() -> bulkRequestBuilder.execute().actionGet(), null , - BulkResponse::hasFailures, RETRY_COUNT, "Indexing all execution logs into doc_type task", "addTaskExecutionLogs"); - } catch (Throwable e) { - List taskIds = taskExecLogs.stream().map(TaskExecLog::getTaskId).collect(Collectors.toList()); - logger.error("Failed to index task execution logs for tasks: ", taskIds, e); - } - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), executorService); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - try { - Expression expression = Expression.fromString("taskId='" + taskId + "'"); - QueryBuilder queryBuilder = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName).setQuery(fq).setTypes(TASK_DOC_TYPE); - SearchResponse response = srb.execute().actionGet(); - SearchHit[] hits = response.getHits().getHits(); - List logs = new ArrayList<>(hits.length); - for(SearchHit hit : hits) { - String source = hit.getSourceAsString(); - TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); - logs.add(tel); - } - - return logs; - - }catch(Exception e) { - logger.error("Failed to get task execution logs for task: {}", taskId, e); - } - - return null; - } - - @Override - public void addMessage(String queue, Message message) { - Map doc = new HashMap<>(); - doc.put("messageId", message.getId()); - doc.put("payload", message.getPayload()); - doc.put("queue", queue); - doc.put("created", System.currentTimeMillis()); - IndexRequest request = new IndexRequest(logIndexName, MSG_DOC_TYPE); - request.source(doc); - try { - new RetryUtil<>().retryOnException(() -> elasticSearchClient.index(request).actionGet(), null, - null, RETRY_COUNT, "Indexing document in for docType: message", "addMessage"); - } catch (Throwable e) { - logger.error("Failed to index message: {}", message.getId(), e); - } - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - try { - byte[] doc = objectMapper.writeValueAsBytes(eventExecution); - String id = eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution.getMessageId() + "." + eventExecution.getId(); - UpdateRequest req = new UpdateRequest(logIndexName, EVENT_DOC_TYPE, id); - req.doc(doc, XContentType.JSON); - req.upsert(doc, XContentType.JSON); - req.retryOnConflict(5); - updateWithRetry(req, "Update Event execution for doc_type event"); - } catch (Throwable e) { - logger.error("Failed to index event execution: {}", eventExecution.getId(), e); - } - } - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return CompletableFuture.runAsync(() -> addEventExecution(eventExecution), executorService); - } - - private void updateWithRetry(UpdateRequest request, String operationDescription) { - try { - new RetryUtil().retryOnException(() -> elasticSearchClient.update(request).actionGet(), null, - null, RETRY_COUNT, operationDescription, "updateWithRetry"); - } catch (Exception e) { - Monitors.error(className, "index"); - logger.error("Failed to index {} for request type: {}", request.index(), request.type(), e); - } - } - - @Override - public SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort) { - try { - - return search(query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); - - } catch (ParserException e) { - throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - @Override - public SearchResult searchTasks(String query, String freeText, int start, int count, List sort) { - try { - return search(query, start, count, sort, freeText, TASK_DOC_TYPE); - } catch (ParserException e) { - throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - @Override - public void removeWorkflow(String workflowId) { - try { - DeleteRequest request = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); - DeleteResponse response = elasticSearchClient.delete(request).actionGet(); - if (response.getResult() == DocWriteResponse.Result.DELETED) { - logger.error("Index removal failed - document not found by id: {}", workflowId); - } - } catch (Throwable e) { - logger.error("Failed to remove workflow {} from index", workflowId, e); - Monitors.error(className, "remove"); - } - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - if (keys.length != values.length) { - throw new IllegalArgumentException("Number of keys and values should be same."); - } - - UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); - Map source = IntStream.range(0, keys.length).boxed() - .collect(Collectors.toMap(i -> keys[i], i -> values[i])); - request.doc(source); - logger.debug("Updating workflow {} with {}", workflowInstanceId, source); - new RetryUtil<>().retryOnException(() -> elasticSearchClient.update(request), null, null, RETRY_COUNT, - "Updating index for doc_type workflow", "updateWorkflow"); - } - - @Override - public CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - return CompletableFuture.runAsync(() -> updateWorkflow(workflowInstanceId, keys, values), executorService); - } - - @Override - public String get(String workflowInstanceId, String fieldToGet) { - Object value = null; - GetRequest request = new GetRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId).storedFields(fieldToGet); - GetResponse response = elasticSearchClient.get(request).actionGet(); - Map fields = response.getFields(); - if(fields == null) { - return null; - } - GetField field = fields.get(fieldToGet); - if(field != null) value = field.getValue(); - if(value != null) { - return value.toString(); - } - return null; - } - - private SearchResult search(String structuredQuery, int start, int size, List sortOptions, String freeTextQuery, String docType) throws ParserException { - QueryBuilder queryBuilder = QueryBuilders.matchAllQuery(); - if(StringUtils.isNotEmpty(structuredQuery)) { - Expression expression = Expression.fromString(structuredQuery); - queryBuilder = expression.getFilterBuilder(); - } - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(queryBuilder); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName).setQuery(fq).setTypes(docType).storedFields("_id").setFrom(start).setSize(size); - if(sortOptions != null){ - sortOptions.forEach(sortOption -> { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int indx = sortOption.indexOf(':'); - if(indx > 0){ //Can't be 0, need the field name at-least - field = sortOption.substring(0, indx); - order = SortOrder.valueOf(sortOption.substring(indx+1)); - } - srb.addSort(field, order); - }); - } - List result = new LinkedList(); - SearchResponse response = srb.get(); - response.getHits().forEach(hit -> { - result.add(hit.getId()); - }); - long count = response.getHits().getTotalHits(); - return new SearchResult(count, result); - } - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - QueryBuilder q = QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("endTime").lt(LocalDate.now().minusDays(archiveTtlDays))) - .should(QueryBuilders.termQuery("status", "COMPLETED")) - .should(QueryBuilders.termQuery("status", "FAILED")) - .mustNot(QueryBuilders.existsQuery("archived")) - .minimumShouldMatch(1); - SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) - .setTypes("workflow") - .setQuery(q) - .setSize(1000); - - SearchResponse response = s.execute().actionGet(); - SearchHits hits = response.getHits(); - List ids = new LinkedList<>(); - for (SearchHit hit : hits.getHits()) { - ids.add(hit.getId()); - } - return ids; - } } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractNode.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractNode.java deleted file mode 100644 index 8cd7741940..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/AbstractNode.java +++ /dev/null @@ -1,187 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import java.io.InputStream; -import java.math.BigDecimal; -import java.util.HashSet; -import java.util.Set; -import java.util.regex.Pattern; - - -/** - * @author Viren - * - */ -public abstract class AbstractNode { - - public static final Pattern WHITESPACE = Pattern.compile("\\s"); - - protected static Set comparisonOprs = new HashSet(); - - static { - comparisonOprs.add('>'); - comparisonOprs.add('<'); - comparisonOprs.add('='); - } - - protected InputStream is; - - - - protected AbstractNode(InputStream is) throws ParserException { - this.is = is; - this.parse(); - } - - protected boolean isNumber(String test){ - try{ - //If you can convert to a big decimal value, then it is a number. - new BigDecimal(test); - return true; - - }catch(NumberFormatException e){ - //Ignore - } - return false; - } - - protected boolean isBoolOpr(byte[] buffer){ - if(buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R'){ - return true; - }else if(buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D'){ - return true; - } - return false; - } - - protected boolean isComparisonOpr(byte[] buffer){ - if(buffer[0] == 'I' && buffer[1] == 'N'){ - return true; - }else if(buffer[0] == '!' && buffer[1] == '='){ - return true; - }else{ - return comparisonOprs.contains((char)buffer[0]); - } - - } - - protected byte[] peek(int length) throws Exception { - return read(length, true); - } - - protected byte[] read(int length) throws Exception { - return read(length, false); - } - - protected String readToken() throws Exception { - skipWhitespace(); - StringBuilder sb = new StringBuilder(); - while(is.available() > 0){ - char c = (char) peek(1)[0]; - if(c == ' ' || c == '\t' || c == '\n' || c == '\r'){ - is.skip(1); - break; - }else if(c == '=' || c == '>' || c == '<' || c == '!'){ - //do not skip - break; - } - sb.append(c); - is.skip(1); - } - return sb.toString().trim(); - } - - protected boolean isNumeric(char c) { - if (c == '-' || c == 'e' || (c >= '0' && c <= '9') || c == '.'){ - return true; - } - return false; - } - - protected void assertExpected(byte[] found, String expected) throws ParserException { - assertExpected(new String(found), expected); - } - - protected void assertExpected(String found, String expected) throws ParserException { - if(!found.equals(expected)){ - throw new ParserException("Expected " + expected + ", found " + found); - } - } - protected void assertExpected(char found, char expected) throws ParserException { - if(found != expected){ - throw new ParserException("Expected " + expected + ", found " + found); - } - } - - protected static void efor(int length, FunctionThrowingException consumer) throws Exception { - for(int i = 0; i < length; i++){ - consumer.accept(i); - } - } - - protected abstract void _parse() throws Exception; - - //Public stuff here - private void parse() throws ParserException { - //skip white spaces - skipWhitespace(); - try{ - _parse(); - }catch(Exception e){ - System.out.println("\t" + this.getClass().getSimpleName() + "->" + this.toString()); - if(!(e instanceof ParserException)){ - throw new ParserException("Error parsing", e); - }else{ - throw (ParserException)e; - } - } - skipWhitespace(); - } - - //Private methods - - private byte[] read(int length, boolean peekOnly) throws Exception { - byte[] buf = new byte[length]; - if(peekOnly){ - is.mark(length); - } - efor(length, (Integer c)-> buf[c] = (byte) is.read()); - if(peekOnly){ - is.reset(); - } - return buf; - } - - protected void skipWhitespace() throws ParserException { - try{ - while(is.available() > 0){ - byte c = peek(1)[0]; - if(c == ' ' || c == '\t' || c == '\n' || c == '\r'){ - //skip - read(1); - }else{ - break; - } - } - }catch(Exception e){ - throw new ParserException(e.getMessage(), e); - } - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/BooleanOp.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/BooleanOp.java deleted file mode 100644 index 95808989d7..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/BooleanOp.java +++ /dev/null @@ -1,64 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import java.io.InputStream; - -/** - * @author Viren - * - */ -public class BooleanOp extends AbstractNode { - - private String value; - - public BooleanOp(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] buffer = peek(3); - if(buffer.length > 1 && buffer[0] == 'O' && buffer[1] == 'R'){ - this.value = "OR"; - }else if(buffer.length > 2 && buffer[0] == 'A' && buffer[1] == 'N' && buffer[2] == 'D'){ - this.value = "AND"; - }else { - throw new ParserException("No valid boolean operator found..."); - } - read(this.value.length()); - } - - @Override - public String toString(){ - return " " + value + " "; - } - - public String getOperator(){ - return value; - } - - public boolean isAnd(){ - return "AND".equals(value); - } - - public boolean isOr(){ - return "OR".equals(value); - } -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ComparisonOp.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ComparisonOp.java deleted file mode 100644 index 55a86cc8f1..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/ComparisonOp.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -import java.io.InputStream; - -/** - * @author Viren - */ -public class ComparisonOp extends AbstractNode { - - public static enum Operators { - BETWEEN("BETWEEN"), EQUALS("="), LESS_THAN("<"), GREATER_THAN(">"), IN("IN"), NOT_EQUALS("!="), IS("IS"); - - private String value; - Operators(String value){ - this.value = value; - } - - public String value(){ - return value; - } - } - - private static final int betwnLen = Operators.BETWEEN.value().length(); - - private String value; - - public ComparisonOp(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(betwnLen); - if(peeked[0] == '=' || peeked[0] == '>' || peeked[0] == '<'){ - this.value = new String(peeked, 0, 1); - }else if(peeked[0] == 'I' && peeked[1] == 'N'){ - this.value = "IN"; - }else if(peeked[0] == 'I' && peeked[1] == 'S'){ - this.value = "IS"; - }else if(peeked[0] == '!' && peeked[1] == '='){ - this.value = "!="; - }else if(peeked.length == betwnLen && new String(peeked).equals(Operators.BETWEEN.value())){ - this.value = Operators.BETWEEN.value(); - }else{ - throw new ParserException("Expecting an operator (=, >, <, !=, BETWEEN, IN), but found none. Peeked=>" + new String(peeked)); - } - - read(this.value.length()); - } - - @Override - public String toString(){ - return " " + value + " "; - } - - public String getOperator(){ - return value; - } - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java index de6dbba0e2..a2d170491f 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/Expression.java @@ -18,13 +18,17 @@ */ package com.netflix.conductor.dao.es5.index.query.parser; -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; +import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; +import com.netflix.conductor.elasticsearch.query.parser.BooleanOp; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import java.io.BufferedInputStream; +import java.io.ByteArrayInputStream; +import java.io.InputStream; + /** * @author Viren * diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FunctionThrowingException.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FunctionThrowingException.java deleted file mode 100644 index b0624fc2ff..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/FunctionThrowingException.java +++ /dev/null @@ -1,30 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es5.index.query.parser; - -/** - * @author Viren - * - */ -@FunctionalInterface -public interface FunctionThrowingException { - - void accept(T t) throws Exception; - -} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java index e325d505cc..3b59eaa4fe 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/GroupedExpression.java @@ -1,67 +1,67 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.dao.es5.index.query.parser; -import java.io.InputStream; +import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; import org.elasticsearch.index.query.QueryBuilder; +import java.io.InputStream; + /** * @author Viren - * + * */ public class GroupedExpression extends AbstractNode implements FilterProvider { - private Expression expression; - - public GroupedExpression(InputStream is) throws ParserException { - super(is); - } + private Expression expression; + + public GroupedExpression(InputStream is) throws ParserException { + super(is); + } + + @Override + protected void _parse() throws Exception { + byte[] peeked = read(1); + assertExpected(peeked, "("); + + this.expression = new Expression(is); + + peeked = read(1); + assertExpected(peeked, ")"); + + } + + @Override + public String toString() { + return "(" + expression + ")"; + } - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - - this.expression = new Expression(is); - - peeked = read(1); - assertExpected(peeked, ")"); - - } - - @Override - public String toString(){ - return "(" + expression + ")"; - } + /** + * @return the expression + */ + public Expression getExpression() { + return expression; + } - /** - * @return the expression - */ - public Expression getExpression() { - return expression; - } + @Override + public QueryBuilder getFilterBuilder() { + return expression.getFilterBuilder(); + } - @Override - public QueryBuilder getFilterBuilder() { - return expression.getFilterBuilder(); - } - } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java index 7b8127b99a..05cd829b50 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/query/parser/NameValue.java @@ -1,29 +1,33 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.dao.es5.index.query.parser; -import java.io.InputStream; +import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; +import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp; +import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp.Operators; +import com.netflix.conductor.elasticsearch.query.parser.ConstValue; +import com.netflix.conductor.elasticsearch.query.parser.ListConst; +import com.netflix.conductor.elasticsearch.query.parser.Name; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; +import com.netflix.conductor.elasticsearch.query.parser.Range; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import com.netflix.conductor.dao.es5.index.query.parser.ComparisonOp.Operators; +import java.io.InputStream; /** * @author Viren @@ -36,84 +40,85 @@ */ public class NameValue extends AbstractNode implements FilterProvider { - private Name name; - - private ComparisonOp op; - - private ConstValue value; - - private Range range; - - private ListConst valueList; - - public NameValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.name = new Name(is); - this.op = new ComparisonOp(is); - - if(this.op.getOperator().equals(Operators.BETWEEN.value())){ - this.range = new Range(is); - }if(this.op.getOperator().equals(Operators.IN.value())){ - this.valueList = new ListConst(is); - }else{ - this.value = new ConstValue(is); - } - } - - @Override - public String toString() { - return "" + name + op + value; - } - - /** - * @return the name - */ - public Name getName() { - return name; - } - - /** - * @return the op - */ - public ComparisonOp getOp() { - return op; - } - - /** - * @return the value - */ - public ConstValue getValue() { - return value; - } - - @Override - public QueryBuilder getFilterBuilder(){ - if(op.getOperator().equals(Operators.EQUALS.value())){ - return QueryBuilders.queryStringQuery(name.getName() + ":" + value.getValue().toString()); - }else if(op.getOperator().equals(Operators.BETWEEN.value())){ - return QueryBuilders.rangeQuery(name.getName()).from(range.getLow()).to(range.getHigh()); - }else if(op.getOperator().equals(Operators.IN.value())){ - return QueryBuilders.termsQuery(name.getName(), valueList.getList()); - }else if(op.getOperator().equals(Operators.NOT_EQUALS.value())){ - return QueryBuilders.queryStringQuery("NOT " + name.getName() + ":" + value.getValue().toString()); - }else if(op.getOperator().equals(Operators.GREATER_THAN.value())){ - return QueryBuilders.rangeQuery(name.getName()).from(value.getValue()).includeLower(false).includeUpper(false); - }else if(op.getOperator().equals(Operators.IS.value())){ - if(value.getSysConstant().equals(ConstValue.SystemConsts.NULL)){ - return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).mustNot(QueryBuilders.existsQuery(name.getName()))); - } else if(value.getSysConstant().equals(ConstValue.SystemConsts.NOT_NULL)){ - return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(QueryBuilders.existsQuery(name.getName()))); - } - }else if(op.getOperator().equals(Operators.LESS_THAN.value())){ - return QueryBuilders.rangeQuery(name.getName()).to(value.getValue()).includeLower(false).includeUpper(false); - } - - throw new IllegalStateException("Incorrect/unsupported operators"); - } - - + private Name name; + + private ComparisonOp op; + + private ConstValue value; + + private Range range; + + private ListConst valueList; + + public NameValue(InputStream is) throws ParserException { + super(is); + } + + @Override + protected void _parse() throws Exception { + this.name = new Name(is); + this.op = new ComparisonOp(is); + + if (this.op.getOperator().equals(Operators.BETWEEN.value())) { + this.range = new Range(is); + } + if (this.op.getOperator().equals(Operators.IN.value())) { + this.valueList = new ListConst(is); + } else { + this.value = new ConstValue(is); + } + } + + @Override + public String toString() { + return "" + name + op + value; + } + + /** + * @return the name + */ + public Name getName() { + return name; + } + + /** + * @return the op + */ + public ComparisonOp getOp() { + return op; + } + + /** + * @return the value + */ + public ConstValue getValue() { + return value; + } + + @Override + public QueryBuilder getFilterBuilder() { + if (op.getOperator().equals(Operators.EQUALS.value())) { + return QueryBuilders.queryStringQuery(name.getName() + ":" + value.getValue().toString()); + } else if (op.getOperator().equals(Operators.BETWEEN.value())) { + return QueryBuilders.rangeQuery(name.getName()).from(range.getLow()).to(range.getHigh()); + } else if (op.getOperator().equals(Operators.IN.value())) { + return QueryBuilders.termsQuery(name.getName(), valueList.getList()); + } else if (op.getOperator().equals(Operators.NOT_EQUALS.value())) { + return QueryBuilders.queryStringQuery("NOT " + name.getName() + ":" + value.getValue().toString()); + } else if (op.getOperator().equals(Operators.GREATER_THAN.value())) { + return QueryBuilders.rangeQuery(name.getName()).from(value.getValue()).includeLower(false).includeUpper(false); + } else if (op.getOperator().equals(Operators.IS.value())) { + if (value.getSysConstant().equals(ConstValue.SystemConsts.NULL)) { + return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).mustNot(QueryBuilders.existsQuery(name.getName()))); + } else if (value.getSysConstant().equals(ConstValue.SystemConsts.NOT_NULL)) { + return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(QueryBuilders.existsQuery(name.getName()))); + } + } else if (op.getOperator().equals(Operators.LESS_THAN.value())) { + return QueryBuilders.rangeQuery(name.getName()).to(value.getValue()).includeLower(false).includeUpper(false); + } + + throw new IllegalStateException("Incorrect/unsupported operators"); + } + + } diff --git a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java b/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java index e76bf6669a..3a282864b4 100644 --- a/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java +++ b/es5-persistence/src/test/java/com/netflix/conductor/dao/es5/index/query/parser/TestExpression.java @@ -18,13 +18,20 @@ */ package com.netflix.conductor.dao.es5.index.query.parser; +import com.netflix.conductor.elasticsearch.query.parser.AbstractParserTest; +import com.netflix.conductor.elasticsearch.query.parser.ConstValue; + import org.junit.Test; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.InputStream; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; /** * @author Viren diff --git a/settings.gradle b/settings.gradle index ea19e2b69b..dce6513d5b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,7 +1,7 @@ rootProject.name='conductor' -include 'client','common','contribs','core','es2-persistence','es5-persistence','jersey','mysql-persistence' -include 'redis-persistence','server','test-harness','ui' +include 'client','common','contribs','core', 'elasticsearch', 'es2-persistence','es5-persistence','jersey' +include 'mysql-persistence', 'redis-persistence','server','test-harness','ui' include 'protogen' include 'grpc', 'grpc-server', 'grpc-client' From 899e480dacab1cf4085ea2a1d3ffee3419b44043 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 21 Jun 2018 15:11:46 +0200 Subject: [PATCH 064/163] Re-wire embedded elastic search startup and create a specific configuration. --- .../ElasticSearchConfiguration.java | 25 ++ .../elasticsearch/ElasticSearchModule.java | 10 + .../elasticsearch/EmbeddedElasticSearch.java | 47 +++ .../EmbeddedElasticSearchProvider.java | 8 + ...mPropertiesElasticSearchConfiguration.java | 7 + .../dao/es/EmbeddedElasticSearch.java | 141 ------- ...SearchDAO.java => ElasticSearchV2DAO.java} | 12 +- .../es2/ElasticSearchV2Module.java} | 13 +- .../es2/EmbeddedElasticSearchV2.java | 102 ++++++ .../es2/EmbeddedElasticSearchV2Provider.java | 28 ++ .../dao/es5/EmbeddedElasticSearchV5.java | 164 --------- .../es5/ElasticSearchV5Module.java} | 27 +- .../es5/EmbeddedElasticSearchV5.java | 123 +++++++ .../es5/EmbeddedElasticSearchV5Provider.java | 28 ++ .../com/netflix/conductor/bootstrap/Main.java | 10 + .../conductor/bootstrap/ModulesProvider.java | 39 +- test-harness/build.gradle | 41 ++- .../tests/integration/End2EndGrpcTests.java | 345 +++++++++--------- .../tests/integration/End2EndTests.java | 327 +++++++++-------- 19 files changed, 795 insertions(+), 702 deletions(-) create mode 100644 elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java create mode 100644 elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java create mode 100644 elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java create mode 100644 elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java create mode 100644 elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/EmbeddedElasticSearch.java rename es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/{ElasticSearchDAO.java => ElasticSearchV2DAO.java} (97%) rename es2-persistence/src/main/java/com/netflix/conductor/{dao/es/index/ElasticSearchModule.java => elasticsearch/es2/ElasticSearchV2Module.java} (81%) create mode 100644 es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java create mode 100644 es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java delete mode 100644 es5-persistence/src/main/java/com/netflix/conductor/dao/es5/EmbeddedElasticSearchV5.java rename es5-persistence/src/main/java/com/netflix/conductor/{dao/es5/index/ElasticSearchModuleV5.java => elasticsearch/es5/ElasticSearchV5Module.java} (84%) create mode 100644 es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java create mode 100644 es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java new file mode 100644 index 0000000000..e465cbfc30 --- /dev/null +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -0,0 +1,25 @@ +package com.netflix.conductor.elasticsearch; + +import com.netflix.conductor.core.config.Configuration; + +public interface ElasticSearchConfiguration extends Configuration { + String ELASTIC_SEARCH_VERSION_PROPERTY_NAME = "workflow.elasticsearch.version"; + int ELASTIC_SEARCH_VERSION_DEFAULT_VALUE = 2; + + String ELASTIC_SEARCH_URL_PROPERTY_NAME = "workflow.elasticsearch.url"; + String ELASTIC_SEARCH_URL_DEFAULT_VALUE = "localhost:9300"; + + String ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.index.name"; + String ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE = "conductor"; + + default int getVersion(){ + return getIntProperty(ELASTIC_SEARCH_VERSION_PROPERTY_NAME, ELASTIC_SEARCH_VERSION_DEFAULT_VALUE); + } + default String getURL(){ + return getProperty(ELASTIC_SEARCH_URL_PROPERTY_NAME, ELASTIC_SEARCH_URL_DEFAULT_VALUE); + } + + default String getIndexName(){ + return getProperty(ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME, ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE); + } +} diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java new file mode 100644 index 0000000000..91d5a5c304 --- /dev/null +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java @@ -0,0 +1,10 @@ +package com.netflix.conductor.elasticsearch; + +import com.google.inject.AbstractModule; + +public class ElasticSearchModule extends AbstractModule { + @Override + protected void configure() { + bind(ElasticSearchConfiguration.class).to(SystemPropertiesElasticSearchConfiguration.class); + } +} diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java new file mode 100644 index 0000000000..2317076761 --- /dev/null +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java @@ -0,0 +1,47 @@ +package com.netflix.conductor.elasticsearch; + +import com.netflix.conductor.service.Lifecycle; + +import org.apache.commons.io.FileUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; + +public interface EmbeddedElasticSearch extends Lifecycle { + Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearch.class); + String ES_PATH_DATA = "path.data"; + String ES_PATH_HOME = "path.home"; + + int DEFAULT_PORT = 9200; + String DEFAULT_CLUSTER_NAME = "elasticsearch_test"; + String DEFAULT_HOST = "127.0.0.1"; + String DEFAULT_SETTING_FILE = "embedded-es.yml"; + + default void cleanDataDir(String path) { + try { + logger.info("Deleting contents of data dir {}", path); + File f = new File(path); + if (f.exists()) { + FileUtils.cleanDirectory(new File(path)); + } + } catch (IOException e) { + logger.error("Failed to delete ES data dir"); + } + } + + default File createDataDir(String dataDirLoc) throws IOException { + Path dataDirPath = FileSystems.getDefault().getPath(dataDirLoc); + Files.createDirectories(dataDirPath); + return dataDirPath.toFile(); + } + + default File setupDataDir(String path) throws IOException { + cleanDataDir(path); + return createDataDir(path); + } +} diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java new file mode 100644 index 0000000000..8f22fbd633 --- /dev/null +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java @@ -0,0 +1,8 @@ +package com.netflix.conductor.elasticsearch; + +import java.util.Optional; + +import javax.inject.Provider; + +public interface EmbeddedElasticSearchProvider extends Provider> { +} diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java new file mode 100644 index 0000000000..33b59d982e --- /dev/null +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java @@ -0,0 +1,7 @@ +package com.netflix.conductor.elasticsearch; + +import com.netflix.conductor.core.config.SystemPropertiesConfiguration; + +public class SystemPropertiesElasticSearchConfiguration + extends SystemPropertiesConfiguration implements ElasticSearchConfiguration { +} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/EmbeddedElasticSearch.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/EmbeddedElasticSearch.java deleted file mode 100644 index 31574a84b0..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/EmbeddedElasticSearch.java +++ /dev/null @@ -1,141 +0,0 @@ -/** - * Copyright 2017 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.netflix.conductor.dao.es; - -import org.apache.commons.io.FileUtils; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.Node; -import org.elasticsearch.node.NodeBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; - -public class EmbeddedElasticSearch { - - private static final String ES_PATH_DATA = "path.data"; - - private static final String ES_PATH_HOME = "path.home"; - - private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearch.class); - - public static final int DEFAULT_PORT = 9200; - public static final String DEFAULT_CLUSTER_NAME = "elasticsearch_test"; - public static final String DEFAULT_HOST = "127.0.0.1"; - public static final String DEFAULT_SETTING_FILE = "embedded-es.yml"; - - private static Node instance; - private static Client client; - private static File dataDir; - - public static void start() throws Exception { - start(DEFAULT_CLUSTER_NAME, DEFAULT_HOST, DEFAULT_PORT, true); - } - - public static synchronized void start(String clusterName, String host, int port, boolean enableTransportClient) throws Exception{ - - if (instance != null && !instance.isClosed()) { - logger.info("Elastic Search is already running on port {}", getPort()); - return; - } - - final Settings settings = getSettings(clusterName, host, port, enableTransportClient); - setupDataDir(settings); - - logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); - instance = NodeBuilder.nodeBuilder().data(true).local(enableTransportClient ? false : true).settings(settings).client(false).node(); - instance.start(); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - instance.close(); - } - }); - logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); - client = instance.client(); - } - - private static void setupDataDir(Settings settings) { - String path = settings.get(ES_PATH_DATA); - cleanDataDir(path); - createDataDir(path); - } - - public static void cleanDataDir(String path) { - try { - logger.info("Deleting contents of data dir {}", path); - File f = new File(path); - if (f.exists()) { - FileUtils.cleanDirectory(new File(path)); - } - } catch (IOException e) { - logger.error("Failed to delete ES data dir"); - } - } - - private static Settings getSettings(String clusterName, String host, int port, boolean enableTransportClient) throws IOException { - dataDir = Files.createTempDirectory(clusterName+"_"+System.currentTimeMillis()+"data").toFile(); - File homeDir = Files.createTempDirectory(clusterName+"_"+System.currentTimeMillis()+"-home").toFile(); - return Settings.builder() - .put("cluster.name", clusterName) - .put("http.host", host) - .put("http.port", port) - .put(ES_PATH_DATA, dataDir.getAbsolutePath()) - .put(ES_PATH_HOME, homeDir.getAbsolutePath()) - .put("http.enabled", true) - .put("script.inline", "on") - .put("script.indexed", "on") - .build(); - } - - private static void createDataDir(String dataDirLoc) { - try { - Path dataDirPath = FileSystems.getDefault().getPath(dataDirLoc); - Files.createDirectories(dataDirPath); - dataDir = dataDirPath.toFile(); - } catch (IOException e) { - logger.error("Failed to create data dir"); - } - } - - public static Client getClient() { - if (instance == null || instance.isClosed()) { - logger.error("Embedded ElasticSearch is not Initialized and started, please call start() method first"); - return null; - } - return client; - } - - private static String getPort() { - return instance.settings().get("http.port"); - } - - public static synchronized void stop() { - - if (instance != null && !instance.isClosed()) { - String port = getPort(); - logger.info("Stopping Elastic Search"); - instance.close(); - logger.info("Elastic Search on port {} stopped", port); - } - - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java similarity index 97% rename from es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java rename to es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java index cf73506aba..979402d6aa 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchDAO.java +++ b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java @@ -95,9 +95,9 @@ */ @Trace @Singleton -public class ElasticSearchDAO implements IndexDAO { +public class ElasticSearchV2DAO implements IndexDAO { - private static Logger logger = LoggerFactory.getLogger(ElasticSearchDAO.class); + private static Logger logger = LoggerFactory.getLogger(ElasticSearchV2DAO.class); private static final String WORKFLOW_DOC_TYPE = "workflow"; @@ -109,7 +109,7 @@ public class ElasticSearchDAO implements IndexDAO { private static final String MSG_DOC_TYPE = "message"; - private static final String className = ElasticSearchDAO.class.getSimpleName(); + private static final String className = ElasticSearchV2DAO.class.getSimpleName(); private static final int RETRY_COUNT = 3; @@ -135,7 +135,7 @@ public class ElasticSearchDAO implements IndexDAO { } @Inject - public ElasticSearchDAO(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { + public ElasticSearchV2DAO(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { this.objectMapper = objectMapper; this.elasticSearchClient = elasticSearchClient; this.indexName = config.getProperty("workflow.elasticsearch.index.name", null); @@ -185,7 +185,7 @@ private void initIndex() throws Exception { GetIndexTemplatesResponse result = elasticSearchClient.admin().indices().prepareGetTemplates("wfe_template").execute().actionGet(); if(result.getIndexTemplates().isEmpty()) { logger.info("Creating the index template 'wfe_template'"); - InputStream stream = ElasticSearchDAO.class.getResourceAsStream("/template.json"); + InputStream stream = ElasticSearchV2DAO.class.getResourceAsStream("/template.json"); byte[] templateSource = IOUtils.toByteArray(stream); try { @@ -208,7 +208,7 @@ private void initIndex() throws Exception { GetMappingsResponse response = elasticSearchClient.admin().indices().prepareGetMappings(indexName).addTypes(WORKFLOW_DOC_TYPE).execute().actionGet(); if(response.mappings().isEmpty()) { logger.info("Adding the workflow type mappings"); - InputStream stream = ElasticSearchDAO.class.getResourceAsStream("/wfe_type.json"); + InputStream stream = ElasticSearchV2DAO.class.getResourceAsStream("/wfe_type.json"); byte[] bytes = IOUtils.toByteArray(stream); String source = new String(bytes); try { diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchModule.java b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java similarity index 81% rename from es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchModule.java rename to es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java index 3b37a550c0..141c2da2c4 100644 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchModule.java +++ b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java @@ -13,13 +13,16 @@ /** * */ -package com.netflix.conductor.dao.es.index; +package com.netflix.conductor.elasticsearch.es2; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.dao.es.index.ElasticSearchV2DAO; +import com.netflix.conductor.elasticsearch.ElasticSearchModule; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -36,9 +39,9 @@ /** * @author Viren Provider for the elasticsearch transport client */ -public class ElasticSearchModule extends AbstractModule { +public class ElasticSearchV2Module extends AbstractModule { - private static Logger log = LoggerFactory.getLogger(ElasticSearchModule.class); + private static Logger log = LoggerFactory.getLogger(ElasticSearchV2Module.class); @Provides @Singleton @@ -68,6 +71,8 @@ public Client getClient(Configuration config) throws Exception { @Override protected void configure() { - bind(IndexDAO.class).to(ElasticSearchDAO.class); + install(new ElasticSearchModule()); + bind(IndexDAO.class).to(ElasticSearchV2DAO.class); + bind(EmbeddedElasticSearchProvider.class).to(EmbeddedElasticSearchV2Provider.class); } } diff --git a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java new file mode 100644 index 0000000000..71b6ee72c2 --- /dev/null +++ b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java @@ -0,0 +1,102 @@ +/** + * Copyright 2017 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.elasticsearch.es2; + +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.Node; +import org.elasticsearch.node.NodeBuilder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; + +public class EmbeddedElasticSearchV2 implements EmbeddedElasticSearch { + + private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearchV2.class); + + private Node instance; + private Client client; + private File dataDir; + + @Override + public void start() throws Exception { + start(DEFAULT_CLUSTER_NAME, DEFAULT_HOST, DEFAULT_PORT, true); + } + + public synchronized void start(String clusterName, String host, int port, boolean enableTransportClient) throws Exception { + + if (instance != null && !instance.isClosed()) { + logger.info("Elastic Search is already running on port {}", getPort()); + return; + } + + final Settings settings = getSettings(clusterName, host, port, enableTransportClient); + setupDataDir(settings.get(ES_PATH_DATA)); + + logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); + instance = NodeBuilder.nodeBuilder().data(true).local(enableTransportClient ? false : true).settings(settings).client(false).node(); + instance.start(); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + instance.close(); + } + }); + logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); + client = instance.client(); + } + + private Settings getSettings(String clusterName, String host, int port, boolean enableTransportClient) throws IOException { + dataDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "data").toFile(); + File homeDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "-home").toFile(); + return Settings.builder() + .put("cluster.name", clusterName) + .put("http.host", host) + .put("http.port", port) + .put(ES_PATH_DATA, dataDir.getAbsolutePath()) + .put(ES_PATH_HOME, homeDir.getAbsolutePath()) + .put("http.enabled", true) + .put("script.inline", "on") + .put("script.indexed", "on") + .build(); + } + + public Client getClient() { + if (instance == null || instance.isClosed()) { + logger.error("Embedded ElasticSearch is not Initialized and started, please call start() method first"); + return null; + } + return client; + } + + private String getPort() { + return instance.settings().get("http.port"); + } + + @Override + public synchronized void stop() { + + if (instance != null && !instance.isClosed()) { + String port = getPort(); + logger.info("Stopping Elastic Search"); + instance.close(); + logger.info("Elastic Search on port {} stopped", port); + } + + } +} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java new file mode 100644 index 0000000000..11f2cc2c68 --- /dev/null +++ b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java @@ -0,0 +1,28 @@ +package com.netflix.conductor.elasticsearch.es2; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; + +import java.util.Optional; + +import javax.inject.Inject; + +public class EmbeddedElasticSearchV2Provider implements EmbeddedElasticSearchProvider { + private final ElasticSearchConfiguration configuration; + + @Inject + public EmbeddedElasticSearchV2Provider(ElasticSearchConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public Optional get() { + return isMemoryAndVersion() ? Optional.of(new EmbeddedElasticSearchV2()) : Optional.empty(); + } + + private boolean isMemoryAndVersion(){ + return configuration.getVersion() == 2 && configuration.getDB().equals(Configuration.DB.MEMORY); + } +} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/EmbeddedElasticSearchV5.java deleted file mode 100644 index c106ff006d..0000000000 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/EmbeddedElasticSearchV5.java +++ /dev/null @@ -1,164 +0,0 @@ -/** - * Copyright 2017 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package com.netflix.conductor.dao.es5; - -import org.apache.commons.io.FileUtils; -import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.Node; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; - -import java.util.Collection; - -import org.elasticsearch.node.InternalSettingsPreparer; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.transport.Netty4Plugin; - -import static java.util.Collections.singletonList; - - -public class EmbeddedElasticSearchV5 { - - private static final String ES_PATH_DATA = "path.data"; - - private static final String ES_PATH_HOME = "path.home"; - - private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearchV5.class); - - public static final int DEFAULT_PORT = 9200; - public static final String DEFAULT_CLUSTER_NAME = "elasticsearch_test"; - public static final String DEFAULT_HOST = "127.0.0.1"; - public static final String DEFAULT_SETTING_FILE = "embedded-es.yml"; - - private static Node instance; - private static Client client; - private static File dataDir; - - private static class PluginConfigurableNode extends Node { - public PluginConfigurableNode(Settings preparedSettings, Collection> classpathPlugins) { - super(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), classpathPlugins); - } - } - - public static void start() throws Exception { - start(DEFAULT_CLUSTER_NAME, DEFAULT_HOST, DEFAULT_PORT, true); - } - - public static synchronized void start(String clusterName, String host, int port, boolean enableTransportClient) throws Exception { - - if (instance != null && !instance.isClosed()) { - logger.info("Elastic Search is already running on port {}", getPort()); - return; - } - - final Settings settings = getSettings(clusterName, host, port, enableTransportClient); - setupDataDir(settings); - - logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); - instance = new PluginConfigurableNode(settings, singletonList(Netty4Plugin.class)); - instance.start(); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - try { - instance.close(); - } catch (IOException e) { - logger.error("Error closing ElasticSearch"); - } - } - }); - logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); - client = instance.client(); - } - - private static void setupDataDir(Settings settings) { - String path = settings.get(ES_PATH_DATA); - cleanDataDir(path); - createDataDir(path); - } - - public static void cleanDataDir(String path) { - try { - logger.info("Deleting contents of data dir {}", path); - File f = new File(path); - if (f.exists()) { - FileUtils.cleanDirectory(new File(path)); - } - } catch (IOException e) { - logger.error("Failed to delete ES data dir"); - } - } - - private static Settings getSettings(String clusterName, String host, int port, boolean enableTransportClient) throws IOException { - dataDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "data").toFile(); - File homeDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "-home").toFile(); - Settings.Builder settingsBuilder = Settings.builder() - .put("cluster.name", clusterName) - .put("http.host", host) - .put("http.port", port) - .put(ES_PATH_DATA, dataDir.getAbsolutePath()) - .put(ES_PATH_HOME, homeDir.getAbsolutePath()) - .put("http.enabled", true) - .put("script.inline", true) - .put("script.stored", true) - .put("node.data", true) - .put("http.enabled", true) - .put("http.type", "netty4") - .put("transport.type", "netty4"); - - return settingsBuilder.build(); - } - - private static void createDataDir(String dataDirLoc) { - try { - Path dataDirPath = FileSystems.getDefault().getPath(dataDirLoc); - Files.createDirectories(dataDirPath); - dataDir = dataDirPath.toFile(); - } catch (IOException e) { - logger.error("Failed to create data dir"); - } - } - - public static Client getClient() { - if (instance == null || instance.isClosed()) { - logger.error("Embedded ElasticSearch is not Initialized and started, please call start() method first"); - return null; - } - return client; - } - - private static String getPort() { - return instance.settings().get("http.port"); - } - - public static synchronized void stop() throws Exception { - - if (instance != null && !instance.isClosed()) { - String port = getPort(); - logger.info("Stopping Elastic Search"); - instance.close(); - logger.info("Elastic Search on port {} stopped", port); - } - - } -} \ No newline at end of file diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchModuleV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java similarity index 84% rename from es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchModuleV5.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java index ac63184934..d543691767 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchModuleV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java @@ -16,32 +16,37 @@ /** * */ -package com.netflix.conductor.dao.es5.index; +package com.netflix.conductor.elasticsearch.es5; -import java.net.InetAddress; - -import javax.inject.Singleton; +import com.google.inject.AbstractModule; +import com.google.inject.Provides; +import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.dao.es5.index.ElasticSearchDAOV5; +import com.netflix.conductor.elasticsearch.ElasticSearchModule; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; + import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.transport.client.PreBuiltTransportClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.elasticsearch.transport.client.PreBuiltTransportClient; -import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.netflix.conductor.core.config.Configuration; + +import java.net.InetAddress; + +import javax.inject.Singleton; /** * @author Viren * Provider for the elasticsearch transport client */ -public class ElasticSearchModuleV5 extends AbstractModule { +public class ElasticSearchV5Module extends AbstractModule { - private static Logger log = LoggerFactory.getLogger(ElasticSearchModuleV5.class); + private static Logger log = LoggerFactory.getLogger(ElasticSearchV5Module.class); @Provides @Singleton @@ -72,6 +77,8 @@ public Client getClient(Configuration config) throws Exception { @Override protected void configure() { + install(new ElasticSearchModule()); bind(IndexDAO.class).to(ElasticSearchDAOV5.class); + bind(EmbeddedElasticSearchProvider.class).to(EmbeddedElasticSearchV5Provider.class); } } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java new file mode 100644 index 0000000000..73a3b3db7c --- /dev/null +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java @@ -0,0 +1,123 @@ +/** + * Copyright 2017 Netflix, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package com.netflix.conductor.elasticsearch.es5; + +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; + +import org.elasticsearch.client.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.node.InternalSettingsPreparer; +import org.elasticsearch.node.Node; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.transport.Netty4Plugin; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.util.Collection; + +import static java.util.Collections.singletonList; + + +public class EmbeddedElasticSearchV5 implements EmbeddedElasticSearch { + + private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearchV5.class); + + private Node instance; + private Client client; + private File dataDir; + + private class PluginConfigurableNode extends Node { + public PluginConfigurableNode(Settings preparedSettings, Collection> classpathPlugins) { + super(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), classpathPlugins); + } + } + + @Override + public void start() throws Exception { + start(DEFAULT_CLUSTER_NAME, DEFAULT_HOST, DEFAULT_PORT, true); + } + + public synchronized void start(String clusterName, String host, int port, boolean enableTransportClient) throws Exception { + + if (instance != null && !instance.isClosed()) { + logger.info("Elastic Search is already running on port {}", getPort()); + return; + } + + final Settings settings = getSettings(clusterName, host, port, enableTransportClient); + dataDir = setupDataDir(settings.get(ES_PATH_DATA)); + + logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); + instance = new PluginConfigurableNode(settings, singletonList(Netty4Plugin.class)); + instance.start(); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + try { + instance.close(); + } catch (IOException e) { + logger.error("Error closing ElasticSearch"); + } + } + }); + logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); + client = instance.client(); + } + + private Settings getSettings(String clusterName, String host, int port, boolean enableTransportClient) throws IOException { + dataDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "data").toFile(); + File homeDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "-home").toFile(); + Settings.Builder settingsBuilder = Settings.builder() + .put("cluster.name", clusterName) + .put("http.host", host) + .put("http.port", port) + .put(ES_PATH_DATA, dataDir.getAbsolutePath()) + .put(ES_PATH_HOME, homeDir.getAbsolutePath()) + .put("http.enabled", true) + .put("script.inline", true) + .put("script.stored", true) + .put("node.data", true) + .put("http.enabled", true) + .put("http.type", "netty4") + .put("transport.type", "netty4"); + + return settingsBuilder.build(); + } + + public Client getClient() { + if (instance == null || instance.isClosed()) { + logger.error("Embedded ElasticSearch is not Initialized and started, please call start() method first"); + return null; + } + return client; + } + + private String getPort() { + return instance.settings().get("http.port"); + } + + @Override + public synchronized void stop() throws Exception { + + if (instance != null && !instance.isClosed()) { + String port = getPort(); + logger.info("Stopping Elastic Search"); + instance.close(); + logger.info("Elastic Search on port {} stopped", port); + } + + } +} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java new file mode 100644 index 0000000000..486e4a7498 --- /dev/null +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java @@ -0,0 +1,28 @@ +package com.netflix.conductor.elasticsearch.es5; + +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; + +import java.util.Optional; + +import javax.inject.Inject; + +public class EmbeddedElasticSearchV5Provider implements EmbeddedElasticSearchProvider { + private final ElasticSearchConfiguration configuration; + + @Inject + public EmbeddedElasticSearchV5Provider(ElasticSearchConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public Optional get() { + return isMemoryAndVersion() ? Optional.of(new EmbeddedElasticSearchV5()) : Optional.empty(); + } + + private boolean isMemoryAndVersion(){ + return configuration.getVersion() == 5 && configuration.getDB().equals(Configuration.DB.MEMORY); + } +} diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java index a517b38385..9cbd9d4d8c 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java @@ -18,6 +18,7 @@ import com.google.inject.Guice; import com.google.inject.Injector; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import com.netflix.conductor.grpc.server.GRPCServerProvider; import com.netflix.conductor.jetty.server.JettyServerProvider; @@ -46,6 +47,15 @@ public static void main(String[] args) throws Exception { ModulesProvider modulesProvider = bootstrapInjector.getInstance(ModulesProvider.class); Injector serverInjector = Guice.createInjector(modulesProvider.get()); + + serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().ifPresent(search -> { + try { + search.start(); + } catch (Exception ioe) { + System.exit(3); + } + }); + System.out.println("\n\n\n"); System.out.println(" _ _ "); System.out.println(" ___ ___ _ __ __| |_ _ ___| |_ ___ _ __ "); diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java index fac24e6860..8738ed7fb3 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java @@ -5,10 +5,9 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.RedisWorkflowModule; -import com.netflix.conductor.dao.es.EmbeddedElasticSearch; -import com.netflix.conductor.dao.es.index.ElasticSearchModule; -import com.netflix.conductor.dao.es5.EmbeddedElasticSearchV5; -import com.netflix.conductor.dao.es5.index.ElasticSearchModuleV5; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; +import com.netflix.conductor.elasticsearch.es2.ElasticSearchV2Module; +import com.netflix.conductor.elasticsearch.es5.ElasticSearchV5Module; import com.netflix.conductor.mysql.MySQLWorkflowModule; import com.netflix.conductor.server.DynomiteClusterModule; import com.netflix.conductor.server.JerseyModule; @@ -74,33 +73,10 @@ private List selectModulesToLoad() { logger.info("Starting conductor server using MySQL data store", database); break; case MEMORY: - // TODO This ES logic should probably live elsewhere. - try { - if ( - configuration.getIntProperty( - "workflow.elasticsearch.version", - 2 - ) == 5) { - EmbeddedElasticSearchV5.start(); - } else { - // Use ES2 as default. - EmbeddedElasticSearch.start(); - } - if (System.getProperty("workflow.elasticsearch.url") == null) { - System.setProperty("workflow.elasticsearch.url", "localhost:9300"); - } - if (System.getProperty("workflow.elasticsearch.index.name") == null) { - System.setProperty("workflow.elasticsearch.index.name", "conductor"); - } - } catch (Exception e) { - logger.error("Error starting embedded elasticsearch. Search functionality will be impacted: " + e.getMessage(), e); - } - modules.add(new LocalRedisModule()); modules.add(new RedisWorkflowModule()); logger.info("Starting conductor server using in memory data store"); break; - case REDIS_CLUSTER: modules.add(new RedisClusterModule()); modules.add(new RedisWorkflowModule()); @@ -108,10 +84,13 @@ private List selectModulesToLoad() { break; } - if (configuration.getIntProperty("workflow.elasticsearch.version", 2) == 5) { - modules.add(new ElasticSearchModuleV5()); + if (configuration.getIntProperty( + ElasticSearchConfiguration.ELASTIC_SEARCH_VERSION_PROPERTY_NAME, + 2 + ) == 5) { + modules.add(new ElasticSearchV5Module()); } else { - modules.add(new ElasticSearchModule()); + modules.add(new ElasticSearchV2Module()); } if (configuration.getJerseyEnabled()) { diff --git a/test-harness/build.gradle b/test-harness/build.gradle index 06cddb16e8..9f37bf8410 100644 --- a/test-harness/build.gradle +++ b/test-harness/build.gradle @@ -1,28 +1,33 @@ configurations.all { resolutionStrategy { - force 'com.fasterxml.jackson.core:jackson-core:2.7.5' + force 'com.fasterxml.jackson.core:jackson-core:2.7.5' } } dependencies { - testCompile project(':conductor-core') - testCompile project(':conductor-jersey') - testCompile project(':conductor-redis-persistence').sourceSets.test.output - testCompile project(':conductor-client') - testCompile project(':conductor-server') - testCompile project(':conductor-grpc-client') - testCompile project(':conductor-grpc-server') - testCompile "org.elasticsearch:elasticsearch:${revElasticSearch5}" - testCompile "org.eclipse.jetty:jetty-server:${revJetteyServer}" - testCompile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" - testCompile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" - testCompile "com.sun.jersey.contribs:jersey-guice:${revJerseyGuice}" + testCompile project(':conductor-core') + testCompile project(':conductor-jersey') + testCompile project(':conductor-redis-persistence').sourceSets.test.output + testCompile project(':conductor-client') + testCompile project(':conductor-server') + testCompile project(':conductor-grpc-client') + testCompile project(':conductor-grpc-server') + testCompile "org.elasticsearch:elasticsearch:${revElasticSearch5}" + testCompile "org.eclipse.jetty:jetty-server:${revJetteyServer}" + testCompile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" + testCompile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" + testCompile "com.sun.jersey.contribs:jersey-guice:${revJerseyGuice}" - testCompile "com.google.inject.extensions:guice-servlet:${revGuiceServlet}" - testCompile "io.swagger:swagger-jersey-jaxrs:${revSwaggerJersey}" + testCompile "com.google.inject.extensions:guice-servlet:${revGuiceServlet}" + testCompile "io.swagger:swagger-jersey-jaxrs:${revSwaggerJersey}" +} + +test { + // Because tests in the module bind to ports they shouldn't be executed in parallel. + maxParallelForks = 1 } task server(type: JavaExec) { - main = 'com.netflix.conductor.demo.Main' - classpath = sourceSets.test.runtimeClasspath -} \ No newline at end of file + main = 'com.netflix.conductor.demo.Main' + classpath = sourceSets.test.runtimeClasspath +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 3d96177da7..98f162586a 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -1,25 +1,23 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.tests.integration; import com.google.inject.Guice; import com.google.inject.Injector; + import com.netflix.conductor.bootstrap.BootstrapModule; import com.netflix.conductor.bootstrap.ModulesProvider; import com.netflix.conductor.client.grpc.MetadataClient; @@ -38,9 +36,12 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import com.netflix.conductor.grpc.server.GRPCServer; import com.netflix.conductor.grpc.server.GRPCServerProvider; import com.netflix.conductor.tests.utils.TestEnvironment; + import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -49,168 +50,176 @@ import java.util.List; import java.util.Optional; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; /** * @author Viren * */ public class End2EndGrpcTests { - private static TaskClient tc; - private static WorkflowClient wc; - private static MetadataClient mc; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - System.setProperty("conductor.grpc.server.enabled", "true"); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - Optional server = serverInjector.getInstance(GRPCServerProvider.class).get(); - assertTrue("failed to instantiate GRPCServer", server.isPresent()); - server.get().start(); - - tc = new TaskClient("localhost", 8090); - wc = new WorkflowClient("localhost", 8090); - mc = new MetadataClient("localhost", 8090); - } - - @AfterClass - public static void teardown() { - TestEnvironment.teardown(); - } - - @Test - public void testAll() throws Exception { - assertNotNull(tc); - List defs = new LinkedList<>(); - for(int i = 0; i < 5; i++) { - TaskDef def = new TaskDef("t" + i, "task " + i); - def.setTimeoutPolicy(TimeoutPolicy.RETRY); - defs.add(def); - } - mc.registerTaskDefs(defs); - - for(int i = 0; i < 5; i++) { - final String taskName = "t" + i; - TaskDef def = mc.getTaskDef(taskName); - assertNotNull(def); - assertEquals(taskName, def.getName()); - } - - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - WorkflowTask t0 = new WorkflowTask(); - t0.setName("t0"); - t0.setWorkflowTaskType(Type.SIMPLE); - t0.setTaskReferenceName("t0"); - - WorkflowTask t1 = new WorkflowTask(); - t1.setName("t1"); - t1.setWorkflowTaskType(Type.SIMPLE); - t1.setTaskReferenceName("t1"); - - - def.getTasks().add(t0); - def.getTasks().add(t1); - - mc.registerWorkflowDef(def); - WorkflowDef foundd = mc.getWorkflowDef(def.getName(), null); - assertNotNull(foundd); - assertEquals(def.getName(), foundd.getName()); - assertEquals(def.getVersion(), foundd.getVersion()); - - String correlationId = "test_corr_id"; - StartWorkflowRequest startWf = new StartWorkflowRequest(); - startWf.setName(def.getName()); - startWf.setCorrelationId(correlationId); - - String workflowId = wc.startWorkflow(startWf); - assertNotNull(workflowId); - System.out.println("Started workflow id="+workflowId); - - Workflow wf = wc.getWorkflow(workflowId, false); - assertEquals(0, wf.getTasks().size()); - assertEquals(workflowId, wf.getWorkflowId()); - - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(1, wf.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); - assertEquals(workflowId, wf.getWorkflowId()); - - List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); - assertNotNull(runningIds); - assertEquals(1, runningIds.size()); - assertEquals(workflowId, runningIds.get(0)); - - List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); - assertNotNull(polled); - assertEquals(0, polled.size()); - - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertEquals(1, polled.size()); - assertEquals(t0.getName(), polled.get(0).getTaskDefName()); - Task task = polled.get(0); - - Boolean acked = tc.ack(task.getTaskId(), "test"); - assertNotNull(acked); - assertTrue(acked); - - task.getOutputData().put("key1", "value1"); - task.setStatus(Status.COMPLETED); - tc.updateTask(new TaskResult(task)); - - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertTrue(polled.toString(), polled.isEmpty()); - - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(2, wf.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); - assertEquals(t1.getTaskReferenceName(), wf.getTasks().get(1).getReferenceTaskName()); - assertEquals(Status.COMPLETED, wf.getTasks().get(0).getStatus()); - assertEquals(Status.SCHEDULED, wf.getTasks().get(1).getStatus()); - - Task taskById = tc.getTaskDetails(task.getTaskId()); - assertNotNull(taskById); - assertEquals(task.getTaskId(), taskById.getTaskId()); - - - List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); - assertNotNull(getTasks); - assertEquals(0, getTasks.size()); //getTasks only gives pending tasks - - - getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); - assertNotNull(getTasks); - assertEquals(1, getTasks.size()); - - - Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); - assertNotNull(pending); - assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); - assertEquals(workflowId, pending.getWorkflowInstanceId()); - - Thread.sleep(1000); - SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); - assertNotNull(searchResult); - assertEquals(1, searchResult.getTotalHits()); - - wc.terminateWorkflow(workflowId, "terminate reason"); - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); - - wc.restart(workflowId); - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(1, wf.getTasks().size()); - } + private static TaskClient tc; + private static WorkflowClient wc; + private static MetadataClient mc; + private static EmbeddedElasticSearch search; + + @BeforeClass + public static void setup() throws Exception { + TestEnvironment.setup(); + System.setProperty("conductor.grpc.server.enabled", "true"); + + Injector bootInjector = Guice.createInjector(new BootstrapModule()); + Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); + + search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); + search.start(); + + Optional server = serverInjector.getInstance(GRPCServerProvider.class).get(); + assertTrue("failed to instantiate GRPCServer", server.isPresent()); + server.get().start(); + + tc = new TaskClient("localhost", 8090); + wc = new WorkflowClient("localhost", 8090); + mc = new MetadataClient("localhost", 8090); + } + + @AfterClass + public static void teardown() throws Exception { + TestEnvironment.teardown(); + search.stop(); + } + + @Test + public void testAll() throws Exception { + assertNotNull(tc); + List defs = new LinkedList<>(); + for (int i = 0; i < 5; i++) { + TaskDef def = new TaskDef("t" + i, "task " + i); + def.setTimeoutPolicy(TimeoutPolicy.RETRY); + defs.add(def); + } + mc.registerTaskDefs(defs); + + for (int i = 0; i < 5; i++) { + final String taskName = "t" + i; + TaskDef def = mc.getTaskDef(taskName); + assertNotNull(def); + assertEquals(taskName, def.getName()); + } + + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + WorkflowTask t0 = new WorkflowTask(); + t0.setName("t0"); + t0.setWorkflowTaskType(Type.SIMPLE); + t0.setTaskReferenceName("t0"); + + WorkflowTask t1 = new WorkflowTask(); + t1.setName("t1"); + t1.setWorkflowTaskType(Type.SIMPLE); + t1.setTaskReferenceName("t1"); + + + def.getTasks().add(t0); + def.getTasks().add(t1); + + mc.registerWorkflowDef(def); + WorkflowDef foundd = mc.getWorkflowDef(def.getName(), null); + assertNotNull(foundd); + assertEquals(def.getName(), foundd.getName()); + assertEquals(def.getVersion(), foundd.getVersion()); + + String correlationId = "test_corr_id"; + StartWorkflowRequest startWf = new StartWorkflowRequest(); + startWf.setName(def.getName()); + startWf.setCorrelationId(correlationId); + + String workflowId = wc.startWorkflow(startWf); + assertNotNull(workflowId); + System.out.println("Started workflow id=" + workflowId); + + Workflow wf = wc.getWorkflow(workflowId, false); + assertEquals(0, wf.getTasks().size()); + assertEquals(workflowId, wf.getWorkflowId()); + + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(1, wf.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); + assertEquals(workflowId, wf.getWorkflowId()); + + List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); + assertNotNull(runningIds); + assertEquals(1, runningIds.size()); + assertEquals(workflowId, runningIds.get(0)); + + List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); + assertNotNull(polled); + assertEquals(0, polled.size()); + + polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + assertNotNull(polled); + assertEquals(1, polled.size()); + assertEquals(t0.getName(), polled.get(0).getTaskDefName()); + Task task = polled.get(0); + + Boolean acked = tc.ack(task.getTaskId(), "test"); + assertNotNull(acked); + assertTrue(acked); + + task.getOutputData().put("key1", "value1"); + task.setStatus(Status.COMPLETED); + tc.updateTask(new TaskResult(task)); + + polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + assertNotNull(polled); + assertTrue(polled.toString(), polled.isEmpty()); + + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(2, wf.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); + assertEquals(t1.getTaskReferenceName(), wf.getTasks().get(1).getReferenceTaskName()); + assertEquals(Status.COMPLETED, wf.getTasks().get(0).getStatus()); + assertEquals(Status.SCHEDULED, wf.getTasks().get(1).getStatus()); + + Task taskById = tc.getTaskDetails(task.getTaskId()); + assertNotNull(taskById); + assertEquals(task.getTaskId(), taskById.getTaskId()); + + + List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); + assertNotNull(getTasks); + assertEquals(0, getTasks.size()); //getTasks only gives pending tasks + + + getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); + assertNotNull(getTasks); + assertEquals(1, getTasks.size()); + + + Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); + assertNotNull(pending); + assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); + assertEquals(workflowId, pending.getWorkflowInstanceId()); + + Thread.sleep(1000); + SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); + assertNotNull(searchResult); + assertEquals(1, searchResult.getTotalHits()); + + wc.terminateWorkflow(workflowId, "terminate reason"); + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); + + wc.restart(workflowId); + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(1, wf.getTasks().size()); + } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index c7752102c6..2d3c518b8d 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -1,20 +1,17 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.tests.integration; @@ -37,9 +34,11 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import com.netflix.conductor.jetty.server.JettyServer; - import com.netflix.conductor.tests.utils.TestEnvironment; + import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -57,154 +56,160 @@ * */ public class End2EndTests { - private static TaskClient tc; - private static WorkflowClient wc; - - @BeforeClass - public static void setup() throws Exception { - TestEnvironment.setup(); - - Injector bootInjector = Guice.createInjector(new BootstrapModule()); - Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); - JettyServer server = new JettyServer(8080, false); - server.start(); - - tc = new TaskClient(); - tc.setRootURI("http://localhost:8080/api/"); - - wc = new WorkflowClient(); - wc.setRootURI("http://localhost:8080/api/"); - } - - @AfterClass - public static void teardown() { - TestEnvironment.teardown(); - } - - @Test - public void testAll() throws Exception { - assertNotNull(tc); - List defs = new LinkedList<>(); - for(int i = 0; i < 5; i++) { - TaskDef def = new TaskDef("t" + i, "task " + i); - def.setTimeoutPolicy(TimeoutPolicy.RETRY); - defs.add(def); - } - tc.registerTaskDefs(defs); - List found = tc.getTaskDef(); - assertNotNull(found); - assertEquals(defs.size(), found.size()); - - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - WorkflowTask t0 = new WorkflowTask(); - t0.setName("t0"); - t0.setWorkflowTaskType(Type.SIMPLE); - t0.setTaskReferenceName("t0"); - - WorkflowTask t1 = new WorkflowTask(); - t1.setName("t1"); - t1.setWorkflowTaskType(Type.SIMPLE); - t1.setTaskReferenceName("t1"); - - - def.getTasks().add(t0); - def.getTasks().add(t1); - - wc.registerWorkflow(def); - WorkflowDef foundd = wc.getWorkflowDef(def.getName(), null); - assertNotNull(foundd); - assertEquals(def.getName(), foundd.getName()); - assertEquals(def.getVersion(), foundd.getVersion()); - - String correlationId = "test_corr_id"; - String workflowId = wc.startWorkflow(def.getName(), null, correlationId, new HashMap<>()); - assertNotNull(workflowId); - System.out.println(workflowId); - - Workflow wf = wc.getWorkflow(workflowId, false); - assertEquals(0, wf.getTasks().size()); - assertEquals(workflowId, wf.getWorkflowId()); - - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(1, wf.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); - assertEquals(workflowId, wf.getWorkflowId()); - - List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); - assertNotNull(runningIds); - assertEquals(1, runningIds.size()); - assertEquals(workflowId, runningIds.get(0)); - - List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); - assertNotNull(polled); - assertEquals(0, polled.size()); - - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertEquals(1, polled.size()); - assertEquals(t0.getName(), polled.get(0).getTaskDefName()); - Task task = polled.get(0); - - Boolean acked = tc.ack(task.getTaskId(), "test"); - assertNotNull(acked); - assertTrue(acked.booleanValue()); - - task.getOutputData().put("key1", "value1"); - task.setStatus(Status.COMPLETED); - tc.updateTask(new TaskResult(task), task.getTaskType()); - - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); - assertNotNull(polled); - assertTrue(polled.toString(), polled.isEmpty()); - - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(2, wf.getTasks().size()); - assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); - assertEquals(t1.getTaskReferenceName(), wf.getTasks().get(1).getReferenceTaskName()); - assertEquals(Task.Status.COMPLETED, wf.getTasks().get(0).getStatus()); - assertEquals(Task.Status.SCHEDULED, wf.getTasks().get(1).getStatus()); - - Task taskById = tc.getTaskDetails(task.getTaskId()); - assertNotNull(taskById); - assertEquals(task.getTaskId(), taskById.getTaskId()); - - - List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); - assertNotNull(getTasks); - assertEquals(0, getTasks.size()); //getTasks only gives pending tasks - - - getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); - assertNotNull(getTasks); - assertEquals(1, getTasks.size()); - - - Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); - assertNotNull(pending); - assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); - assertEquals(workflowId, pending.getWorkflowInstanceId()); - - Thread.sleep(1000); - SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); - assertNotNull(searchResult); - assertEquals(1, searchResult.getTotalHits()); - - wc.terminateWorkflow(workflowId, "terminate reason"); - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); - - wc.restart(workflowId); - wf = wc.getWorkflow(workflowId, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - assertEquals(1, wf.getTasks().size()); - - } - + private static TaskClient tc; + private static WorkflowClient wc; + private static EmbeddedElasticSearch search; + + @BeforeClass + public static void setup() throws Exception { + TestEnvironment.setup(); + + Injector bootInjector = Guice.createInjector(new BootstrapModule()); + Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); + + search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); + search.start(); + + JettyServer server = new JettyServer(8080, false); + server.start(); + + tc = new TaskClient(); + tc.setRootURI("http://localhost:8080/api/"); + + wc = new WorkflowClient(); + wc.setRootURI("http://localhost:8080/api/"); + } + + @AfterClass + public static void teardown() throws Exception { + TestEnvironment.teardown(); + search.stop(); + } + + @Test + public void testAll() throws Exception { + assertNotNull(tc); + List defs = new LinkedList<>(); + for (int i = 0; i < 5; i++) { + TaskDef def = new TaskDef("t" + i, "task " + i); + def.setTimeoutPolicy(TimeoutPolicy.RETRY); + defs.add(def); + } + tc.registerTaskDefs(defs); + List found = tc.getTaskDef(); + assertNotNull(found); + assertEquals(defs.size(), found.size()); + + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + WorkflowTask t0 = new WorkflowTask(); + t0.setName("t0"); + t0.setWorkflowTaskType(Type.SIMPLE); + t0.setTaskReferenceName("t0"); + + WorkflowTask t1 = new WorkflowTask(); + t1.setName("t1"); + t1.setWorkflowTaskType(Type.SIMPLE); + t1.setTaskReferenceName("t1"); + + + def.getTasks().add(t0); + def.getTasks().add(t1); + + wc.registerWorkflow(def); + WorkflowDef foundd = wc.getWorkflowDef(def.getName(), null); + assertNotNull(foundd); + assertEquals(def.getName(), foundd.getName()); + assertEquals(def.getVersion(), foundd.getVersion()); + + String correlationId = "test_corr_id"; + String workflowId = wc.startWorkflow(def.getName(), null, correlationId, new HashMap<>()); + assertNotNull(workflowId); + System.out.println(workflowId); + + Workflow wf = wc.getWorkflow(workflowId, false); + assertEquals(0, wf.getTasks().size()); + assertEquals(workflowId, wf.getWorkflowId()); + + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(1, wf.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); + assertEquals(workflowId, wf.getWorkflowId()); + + List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); + assertNotNull(runningIds); + assertEquals(1, runningIds.size()); + assertEquals(workflowId, runningIds.get(0)); + + List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); + assertNotNull(polled); + assertEquals(0, polled.size()); + + polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + assertNotNull(polled); + assertEquals(1, polled.size()); + assertEquals(t0.getName(), polled.get(0).getTaskDefName()); + Task task = polled.get(0); + + Boolean acked = tc.ack(task.getTaskId(), "test"); + assertNotNull(acked); + assertTrue(acked.booleanValue()); + + task.getOutputData().put("key1", "value1"); + task.setStatus(Status.COMPLETED); + tc.updateTask(new TaskResult(task), task.getTaskType()); + + polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + assertNotNull(polled); + assertTrue(polled.toString(), polled.isEmpty()); + + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(2, wf.getTasks().size()); + assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); + assertEquals(t1.getTaskReferenceName(), wf.getTasks().get(1).getReferenceTaskName()); + assertEquals(Task.Status.COMPLETED, wf.getTasks().get(0).getStatus()); + assertEquals(Task.Status.SCHEDULED, wf.getTasks().get(1).getStatus()); + + Task taskById = tc.getTaskDetails(task.getTaskId()); + assertNotNull(taskById); + assertEquals(task.getTaskId(), taskById.getTaskId()); + + + List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); + assertNotNull(getTasks); + assertEquals(0, getTasks.size()); //getTasks only gives pending tasks + + + getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); + assertNotNull(getTasks); + assertEquals(1, getTasks.size()); + + + Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); + assertNotNull(pending); + assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); + assertEquals(workflowId, pending.getWorkflowInstanceId()); + + Thread.sleep(1000); + SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); + assertNotNull(searchResult); + assertEquals(1, searchResult.getTotalHits()); + + wc.terminateWorkflow(workflowId, "terminate reason"); + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); + + wc.restart(workflowId); + wf = wc.getWorkflow(workflowId, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(1, wf.getTasks().size()); + + } + } From 4ce3b8360f240842c5348beb5b8b05c2a36f1015 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Fri, 22 Jun 2018 11:41:21 +0200 Subject: [PATCH 065/163] Remove support for Elastic Search 2.x Delete the ES 2.x module and related configuration items. --- .../config/config-mysql-grpc.properties | 1 - .../ElasticSearchConfiguration.java | 5 - es2-persistence/README.md | 3 - es2-persistence/build.gradle | 37 - es2-persistence/dependencies.lock | 854 ------------------ .../dao/es/index/ElasticSearchV2DAO.java | 504 ----------- .../dao/es/index/query/parser/Expression.java | 126 --- .../es/index/query/parser/FilterProvider.java | 35 - .../index/query/parser/GroupedExpression.java | 70 -- .../dao/es/index/query/parser/NameValue.java | 125 --- .../conductor/dao/es/utils/RetryUtil.java | 126 --- .../es2/ElasticSearchV2Module.java | 78 -- .../es2/EmbeddedElasticSearchV2.java | 102 --- .../es2/EmbeddedElasticSearchV2Provider.java | 28 - .../es/index/query/parser/TestExpression.java | 159 ---- .../query/parser/TestGroupedExpression.java | 32 - es5-persistence/README.md | 1 - .../es5/EmbeddedElasticSearchV5Provider.java | 6 +- server/README.md | 1 - server/build.gradle | 2 - .../conductor/bootstrap/ModulesProvider.java | 19 +- server/src/main/resources/server.properties | 5 +- settings.gradle | 2 +- .../tests/utils/TestEnvironment.java | 1 - versionsOfDependencies.gradle | 1 - 25 files changed, 10 insertions(+), 2313 deletions(-) delete mode 100644 es2-persistence/README.md delete mode 100644 es2-persistence/build.gradle delete mode 100644 es2-persistence/dependencies.lock delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FilterProvider.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/dao/es/utils/RetryUtil.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java delete mode 100644 es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java delete mode 100644 es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestGroupedExpression.java diff --git a/docker/server/config/config-mysql-grpc.properties b/docker/server/config/config-mysql-grpc.properties index dc193c28bb..57de7df2d3 100755 --- a/docker/server/config/config-mysql-grpc.properties +++ b/docker/server/config/config-mysql-grpc.properties @@ -13,7 +13,6 @@ db=mysql jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch -workflow.elasticsearch.version=5 workflow.elasticsearch.url=elasticsearch:9300 # Name of the elasticsearch cluster diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index e465cbfc30..8a187f7821 100644 --- a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -3,8 +3,6 @@ import com.netflix.conductor.core.config.Configuration; public interface ElasticSearchConfiguration extends Configuration { - String ELASTIC_SEARCH_VERSION_PROPERTY_NAME = "workflow.elasticsearch.version"; - int ELASTIC_SEARCH_VERSION_DEFAULT_VALUE = 2; String ELASTIC_SEARCH_URL_PROPERTY_NAME = "workflow.elasticsearch.url"; String ELASTIC_SEARCH_URL_DEFAULT_VALUE = "localhost:9300"; @@ -12,9 +10,6 @@ public interface ElasticSearchConfiguration extends Configuration { String ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.index.name"; String ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE = "conductor"; - default int getVersion(){ - return getIntProperty(ELASTIC_SEARCH_VERSION_PROPERTY_NAME, ELASTIC_SEARCH_VERSION_DEFAULT_VALUE); - } default String getURL(){ return getProperty(ELASTIC_SEARCH_URL_PROPERTY_NAME, ELASTIC_SEARCH_URL_DEFAULT_VALUE); } diff --git a/es2-persistence/README.md b/es2-persistence/README.md deleted file mode 100644 index 2e7c86296e..0000000000 --- a/es2-persistence/README.md +++ /dev/null @@ -1,3 +0,0 @@ -## Usage - -Set `workflow.elasticsearch.version=2` in Server module's configuration options. diff --git a/es2-persistence/build.gradle b/es2-persistence/build.gradle deleted file mode 100644 index 23b8f117cd..0000000000 --- a/es2-persistence/build.gradle +++ /dev/null @@ -1,37 +0,0 @@ -plugins { - id 'com.github.johnrengelman.plugin-shadow' version '2.0.3' -} - -configurations { - // Prevent shaded dependencies from being published, while keeping them available to tests - shadow.extendsFrom compileOnly - testRuntime.extendsFrom compileOnly -} - -dependencies { - compile project(':conductor-elasticsearch') - - compileOnly "org.elasticsearch:elasticsearch:${revElasticSearch2}" - compileOnly "com.github.rholder:guava-retrying:${revGuavaRetrying}" - - testCompile project(':conductor-elasticsearch').sourceSets.test.output -} - -// Drop the classifier and delete jar task actions to replace the regular jar artifact with the shadow artifact -shadowJar { - configurations = [project.configurations.shadow] - classifier = null - - // Service files are not included by default. - mergeServiceFiles { - include 'META-INF/services/*' - include 'META-INF/maven/*' - } -} - -jar.enabled = false -jar.dependsOn shadowJar - -configureRelocationShadowJar { - prefix = 'conductor' -} diff --git a/es2-persistence/dependencies.lock b/es2-persistence/dependencies.lock deleted file mode 100644 index 056e4db432..0000000000 --- a/es2-persistence/dependencies.lock +++ /dev/null @@ -1,854 +0,0 @@ -{ - "compile": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "compileClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.8.1" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0", - "requested": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch:elasticsearch": { - "locked": "2.4.6", - "requested": "2.4.6" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "compileOnly": { - "com.github.rholder:guava-retrying": { - "locked": "2.0.0", - "requested": "2.0.0" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "org.elasticsearch:elasticsearch": { - "locked": "2.4.6", - "requested": "2.4.6" - } - }, - "default": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "runtime": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "runtimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "shadow": { - "com.github.rholder:guava-retrying": { - "locked": "2.0.0", - "requested": "2.0.0" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "org.elasticsearch:elasticsearch": { - "locked": "2.4.6", - "requested": "2.4.6" - } - }, - "testCompile": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "junit:junit-dep": { - "locked": "4.10", - "requested": "4.10" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.mockito:mockito-all": { - "locked": "1.10.0", - "requested": "1.10.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "testCompileClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "junit:junit-dep": { - "locked": "4.10", - "requested": "4.10" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.mockito:mockito-all": { - "locked": "1.10.0", - "requested": "1.10.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "testRuntime": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.8.1" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0", - "requested": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "junit:junit-dep": { - "locked": "4.10", - "requested": "4.10" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch:elasticsearch": { - "locked": "2.4.6", - "requested": "2.4.6" - }, - "org.mockito:mockito-all": { - "locked": "1.10.0", - "requested": "1.10.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - }, - "testRuntimeClasspath": { - "com.fasterxml.jackson.core:jackson-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.8.1" - }, - "com.fasterxml.jackson.core:jackson-databind": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.7.5" - }, - "com.github.rholder:guava-retrying": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "2.0.0", - "requested": "2.0.0" - }, - "com.google.inject.extensions:guice-multibindings": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.google.inject:guice": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "4.1.0" - }, - "com.jayway.jsonpath:json-path": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "2.2.0" - }, - "com.netflix.conductor:conductor-common": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "project": true - }, - "com.netflix.conductor:conductor-core": { - "project": true - }, - "com.netflix.servo:servo-core": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.12.17" - }, - "com.netflix.spectator:spectator-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "0.68.0" - }, - "commons-io:commons-io": { - "locked": "2.4", - "requested": "2.4" - }, - "io.reactivex:rxjava": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "1.2.2" - }, - "junit:junit-dep": { - "locked": "4.10", - "requested": "4.10" - }, - "org.apache.commons:commons-lang3": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-core" - ], - "locked": "3.0" - }, - "org.apache.logging.log4j:log4j-api": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.apache.logging.log4j:log4j-core": { - "locked": "2.9.1", - "requested": "2.9.1" - }, - "org.elasticsearch:elasticsearch": { - "locked": "2.4.6", - "requested": "2.4.6" - }, - "org.mockito:mockito-all": { - "locked": "1.10.0", - "requested": "1.10.0" - }, - "org.slf4j:slf4j-api": { - "firstLevelTransitive": [ - "com.netflix.conductor:conductor-common" - ], - "locked": "1.7.25" - } - } -} \ No newline at end of file diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java deleted file mode 100644 index 979402d6aa..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/ElasticSearchV2DAO.java +++ /dev/null @@ -1,504 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.annotations.Trace; -import com.netflix.conductor.common.metadata.events.EventExecution; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.TaskExecLog; -import com.netflix.conductor.common.run.SearchResult; -import com.netflix.conductor.common.run.TaskSummary; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.WorkflowSummary; -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.core.events.queue.Message; -import com.netflix.conductor.core.execution.ApplicationException; -import com.netflix.conductor.core.execution.ApplicationException.Code; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.es.index.query.parser.Expression; -import com.netflix.conductor.dao.es.utils.RetryUtil; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import com.netflix.conductor.metrics.Monitors; - -import org.apache.commons.io.IOUtils; -import org.apache.commons.lang3.StringUtils; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.delete.DeleteRequest; -import org.elasticsearch.action.delete.DeleteResponse; -import org.elasticsearch.action.get.GetRequest; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequestBuilder; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.update.UpdateRequest; -import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.Client; -import org.elasticsearch.index.IndexNotFoundException; -import org.elasticsearch.index.get.GetField; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.indices.IndexAlreadyExistsException; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.sort.SortBuilders; -import org.elasticsearch.search.sort.SortOrder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.InputStream; -import java.text.SimpleDateFormat; -import java.time.LocalDate; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.TimeZone; -import java.util.concurrent.CompletableFuture; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.LinkedBlockingQueue; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; -import java.util.stream.IntStream; - -import javax.inject.Inject; -import javax.inject.Singleton; - -/** - * @author Viren - * - */ -@Trace -@Singleton -public class ElasticSearchV2DAO implements IndexDAO { - - private static Logger logger = LoggerFactory.getLogger(ElasticSearchV2DAO.class); - - private static final String WORKFLOW_DOC_TYPE = "workflow"; - - private static final String TASK_DOC_TYPE = "task"; - - private static final String LOG_DOC_TYPE = "task"; - - private static final String EVENT_DOC_TYPE = "event"; - - private static final String MSG_DOC_TYPE = "message"; - - private static final String className = ElasticSearchV2DAO.class.getSimpleName(); - - private static final int RETRY_COUNT = 3; - - private String indexName; - - private String logIndexName; - - private String logIndexPrefix; - - private ObjectMapper objectMapper; - - private Client elasticSearchClient; - - - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); - - private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMWW"); - - private final ExecutorService executorService; - - static { - SIMPLE_DATE_FORMAT.setTimeZone(GMT); - } - - @Inject - public ElasticSearchV2DAO(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { - this.objectMapper = objectMapper; - this.elasticSearchClient = elasticSearchClient; - this.indexName = config.getProperty("workflow.elasticsearch.index.name", null); - - try { - - initIndex(); - updateIndexName(config); - Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> updateIndexName(config), 0, 1, TimeUnit.HOURS); - - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - int corePoolSize = 6; - int maximumPoolSize = 12; - long keepAliveTime = 1L; - this.executorService = new ThreadPoolExecutor(corePoolSize, - maximumPoolSize, - keepAliveTime, - TimeUnit.MINUTES, - new LinkedBlockingQueue<>()); - } - - private void updateIndexName(Configuration config) { - this.logIndexPrefix = config.getProperty("workflow.elasticsearch.tasklog.index.name", "task_log"); - this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - try { - elasticSearchClient.admin().indices().prepareGetIndex().addIndices(logIndexName).execute().actionGet(); - } catch (IndexNotFoundException infe) { - try { - elasticSearchClient.admin().indices().prepareCreate(logIndexName).execute().actionGet(); - } catch (IndexAlreadyExistsException ignored) { - - } catch (Exception e) { - logger.error(e.getMessage(), e); - } - } - } - - /** - * Initializes the index with required templates and mappings. - */ - private void initIndex() throws Exception { - - //0. Add the index template - GetIndexTemplatesResponse result = elasticSearchClient.admin().indices().prepareGetTemplates("wfe_template").execute().actionGet(); - if(result.getIndexTemplates().isEmpty()) { - logger.info("Creating the index template 'wfe_template'"); - InputStream stream = ElasticSearchV2DAO.class.getResourceAsStream("/template.json"); - byte[] templateSource = IOUtils.toByteArray(stream); - - try { - elasticSearchClient.admin().indices().preparePutTemplate("wfe_template").setSource(templateSource).execute().actionGet(); - }catch(Exception e) { - logger.error(e.getMessage(), e); - } - } - - //1. Create the required index - try { - elasticSearchClient.admin().indices().prepareGetIndex().addIndices(indexName).execute().actionGet(); - }catch(IndexNotFoundException infe) { - try { - elasticSearchClient.admin().indices().prepareCreate(indexName).execute().actionGet(); - }catch(IndexAlreadyExistsException ignored) {} - } - - //2. Mapping for the workflow document type - GetMappingsResponse response = elasticSearchClient.admin().indices().prepareGetMappings(indexName).addTypes(WORKFLOW_DOC_TYPE).execute().actionGet(); - if(response.mappings().isEmpty()) { - logger.info("Adding the workflow type mappings"); - InputStream stream = ElasticSearchV2DAO.class.getResourceAsStream("/wfe_type.json"); - byte[] bytes = IOUtils.toByteArray(stream); - String source = new String(bytes); - try { - elasticSearchClient.admin().indices().preparePutMapping(indexName).setType(WORKFLOW_DOC_TYPE).setSource(source).execute().actionGet(); - }catch(Exception e) { - logger.error(e.getMessage(), e); - } - } - } - - @Override - public void indexWorkflow(Workflow workflow) { - try { - - String id = workflow.getWorkflowId(); - WorkflowSummary summary = new WorkflowSummary(workflow); - byte[] doc = objectMapper.writeValueAsBytes(summary); - UpdateRequest req = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, id); - req.doc(doc); - req.upsert(doc); - req.retryOnConflict(5); - updateWithRetry(req,"Index workflow into doc_type workflow"); - } catch (Throwable e) { - logger.error("Indexing failed {}", e.getMessage(), e); - } - } - - @Override - public CompletableFuture asyncIndexWorkflow(Workflow workflow) { - return CompletableFuture.runAsync(() -> indexWorkflow(workflow), executorService); - } - - @Override - public void indexTask(Task task) { - try { - String id = task.getTaskId(); - TaskSummary summary = new TaskSummary(task); - byte[] doc = objectMapper.writeValueAsBytes(summary); - - UpdateRequest req = new UpdateRequest(indexName, TASK_DOC_TYPE, id); - req.doc(doc); - req.upsert(doc); - updateWithRetry(req, "Index task into doc_type of task"); - } catch (Throwable e) { - logger.error("Indexing failed {}", e.getMessage(), e); - } - } - - @Override - public CompletableFuture asyncIndexTask(Task task) { - return CompletableFuture.runAsync(() -> indexTask(task), executorService); - } - - @Override - public void addTaskExecutionLogs(List taskExecLogs) { - - if (taskExecLogs.isEmpty()) { - return; - } - try { - BulkRequestBuilder bulkRequestBuilder = elasticSearchClient.prepareBulk(); - for (TaskExecLog taskExecLog : taskExecLogs) { - IndexRequest request = new IndexRequest(logIndexName, LOG_DOC_TYPE); - request.source(objectMapper.writeValueAsBytes(taskExecLog)); - bulkRequestBuilder.add(request); - } - new RetryUtil().retryOnException(() -> bulkRequestBuilder.execute().actionGet(), - null, BulkResponse::hasFailures, RETRY_COUNT,"Indexing all execution logs into doc_type task", "addTaskExecutionLogs"); - } catch (Throwable e) { - logger.error("Indexing failed {}", e.getMessage(), e); - } - - } - - @Override - public CompletableFuture asyncAddTaskExecutionLogs(List logs) { - return CompletableFuture.runAsync(() -> addTaskExecutionLogs(logs), executorService); - } - - @Override - public List getTaskExecutionLogs(String taskId) { - - try { - - QueryBuilder qf; - Expression expression = Expression.fromString("taskId='" + taskId + "'"); - qf = expression.getFilterBuilder(); - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(qf); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery("*"); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(logIndexPrefix + "*").setQuery(fq).setTypes(TASK_DOC_TYPE).addSort(SortBuilders.fieldSort("createdTime").order(SortOrder.ASC).unmappedType("long")); - SearchResponse response = srb.execute().actionGet(); - SearchHit[] hits = response.getHits().getHits(); - List logs = new ArrayList<>(hits.length); - for(SearchHit hit : hits) { - String source = hit.getSourceAsString(); - TaskExecLog tel = objectMapper.readValue(source, TaskExecLog.class); - logs.add(tel); - } - - return logs; - - }catch(Exception e) { - logger.error(e.getMessage(), e); - } - - return null; - } - - @Override - public void addMessage(String queue, Message msg) { - - // Run all indexing other than workflow indexing in a separate threadpool - Map doc = new HashMap<>(); - doc.put("messageId", msg.getId()); - doc.put("payload", msg.getPayload()); - doc.put("queue", queue); - doc.put("created", System.currentTimeMillis()); - IndexRequest request = new IndexRequest(logIndexName, MSG_DOC_TYPE); - request.source(doc); - new RetryUtil<>().retryOnException(() -> elasticSearchClient.index(request).actionGet(), null, - null, RETRY_COUNT, "Indexing document in for docType: message", "addMessage"); - - } - - @Override - public void addEventExecution(EventExecution eventExecution) { - try { - byte[] doc = objectMapper.writeValueAsBytes(eventExecution); - String id = eventExecution.getName() + "." + eventExecution.getEvent() + "." + eventExecution.getMessageId() + "." + eventExecution.getId(); - UpdateRequest req = new UpdateRequest(logIndexName, EVENT_DOC_TYPE, id); - req.doc(doc); - req.upsert(doc); - req.retryOnConflict(5); - updateWithRetry(req,"Update Event execution for doc_type event"); - } catch (Throwable e) { - logger.error("Indexing failed {}", e.getMessage(), e); - } - } - - - @Override - public CompletableFuture asyncAddEventExecution(EventExecution eventExecution) { - return CompletableFuture.runAsync(() -> addEventExecution(eventExecution), executorService); - } - - private void updateWithRetry(UpdateRequest request, String operationDescription) { - try { - new RetryUtil().retryOnException( - () -> elasticSearchClient.update(request).actionGet(), - null, - null, - RETRY_COUNT, - operationDescription, - "updateWithRetry" - ); - } catch (Exception e) { - Monitors.error(className, "index"); - logger.error("Indexing failed for {}, {}", request.index(), request.type(), e); - } - } - - @Override - public SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort) { - try { - return search(query, start, count, sort, freeText, WORKFLOW_DOC_TYPE); - } catch (ParserException e) { - throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - @Override - public SearchResult searchTasks(String query, String freeText, int start, int count, List sort) { - try { - return search(query, start, count, sort, freeText, TASK_DOC_TYPE); - } catch (ParserException e) { - throw new ApplicationException(Code.BACKEND_ERROR, e.getMessage(), e); - } - } - - @Override - public void removeWorkflow(String workflowId) { - try { - - DeleteRequest req = new DeleteRequest(indexName, WORKFLOW_DOC_TYPE, workflowId); - DeleteResponse response = elasticSearchClient.delete(req).actionGet(); - if (!response.isFound()) { - logger.error("Index removal failed - document not found by id " + workflowId); - } - } catch (Throwable e) { - logger.error("Index removal failed failed {}", e.getMessage(), e); - Monitors.error(className, "remove"); - } - } - - @Override - public CompletableFuture asyncRemoveWorkflow(String workflowId) { - return CompletableFuture.runAsync(() -> removeWorkflow(workflowId), executorService); - } - - @Override - public void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - if (keys.length != values.length) { - throw new IllegalArgumentException("Number of keys and values should be same."); - } - - UpdateRequest request = new UpdateRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId); - Map source = IntStream.range(0, keys.length).boxed() - .collect(Collectors.toMap(i -> keys[i], i -> values[i])); - request.doc(source); - logger.debug("Updating workflow {} with {}", workflowInstanceId, source); - new RetryUtil<>().retryOnException(() -> elasticSearchClient.update(request).actionGet(), null, null, - RETRY_COUNT, "Updating index for doc_type workflow", "updateWorkflow"); - - } - - @Override - public CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values) { - return CompletableFuture.runAsync(() -> updateWorkflow(workflowInstanceId, keys, values), executorService); - } - - @Override - public String get(String workflowInstanceId, String fieldToGet) { - Object value = null; - GetRequest request = new GetRequest(indexName, WORKFLOW_DOC_TYPE, workflowInstanceId).fields(fieldToGet); - GetResponse response = elasticSearchClient.get(request).actionGet(); - Map fields = response.getFields(); - if(fields == null) { - return null; - } - GetField field = fields.get(fieldToGet); - if(field != null) value = field.getValue(); - if(value != null) { - return value.toString(); - } - return null; - } - - private SearchResult search(String structuredQuery, int start, int size, List sortOptions, String freeTextQuery, String docType) throws ParserException { - QueryBuilder qf = QueryBuilders.matchAllQuery(); - if(StringUtils.isNotEmpty(structuredQuery)) { - Expression expression = Expression.fromString(structuredQuery); - qf = expression.getFilterBuilder(); - } - - BoolQueryBuilder filterQuery = QueryBuilders.boolQuery().must(qf); - QueryStringQueryBuilder stringQuery = QueryBuilders.queryStringQuery(freeTextQuery); - BoolQueryBuilder fq = QueryBuilders.boolQuery().must(stringQuery).must(filterQuery); - final SearchRequestBuilder srb = elasticSearchClient.prepareSearch(indexName).setQuery(fq).setTypes(docType).setNoFields().setFrom(start).setSize(size); - if(sortOptions != null){ - sortOptions.forEach(sortOption -> { - SortOrder order = SortOrder.ASC; - String field = sortOption; - int indx = sortOption.indexOf(':'); - if(indx > 0){ //Can't be 0, need the field name at-least - field = sortOption.substring(0, indx); - order = SortOrder.valueOf(sortOption.substring(indx+1)); - } - srb.addSort(field, order); - }); - } - List result = new LinkedList<>(); - SearchResponse response = srb.execute().actionGet(); - response.getHits().forEach(hit -> result.add(hit.getId())); - long count = response.getHits().getTotalHits(); - return new SearchResult<>(count, result); - } - - - @Override - public List searchArchivableWorkflows(String indexName, long archiveTtlDays) { - QueryBuilder q = QueryBuilders.boolQuery() - .must(QueryBuilders.rangeQuery("endTime").lt(LocalDate.now().minusDays(archiveTtlDays))) - .should(QueryBuilders.termQuery("status", "COMPLETED")) - .should(QueryBuilders.termQuery("status", "FAILED")) - .mustNot(QueryBuilders.existsQuery("archived")) - .minimumNumberShouldMatch(1); - SearchRequestBuilder s = elasticSearchClient.prepareSearch(indexName) - .setTypes("workflow") - .setQuery(q) - .setSize(1000); - - SearchResponse response = s.execute().actionGet(); - SearchHits hits = response.getHits(); - List ids = new LinkedList<>(); - for (SearchHit hit : hits.getHits()) { - ids.add(hit.getId()); - } - return ids; - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java deleted file mode 100644 index 63bae25b10..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/Expression.java +++ /dev/null @@ -1,126 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; -import com.netflix.conductor.elasticsearch.query.parser.BooleanOp; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; - -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -/** - * @author Viren - * - */ -public class Expression extends AbstractNode implements FilterProvider { - - private NameValue nameVal; - - private GroupedExpression ge; - - private BooleanOp op; - - private Expression rhs; - - public Expression(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = peek(1); - - if(peeked[0] == '('){ - this.ge = new GroupedExpression(is); - }else{ - this.nameVal = new NameValue(is); - } - - peeked = peek(3); - if( isBoolOpr(peeked) ){ - //we have an expression next - this.op = new BooleanOp(is); - this.rhs = new Expression(is); - } - } - - public boolean isBinaryExpr(){ - return this.op != null; - } - - public BooleanOp getOperator(){ - return this.op; - } - - public Expression getRightHandSide(){ - return this.rhs; - } - - public boolean isNameValue(){ - return this.nameVal != null; - } - - public NameValue getNameValue(){ - return this.nameVal; - } - - public GroupedExpression getGroupedExpression(){ - return this.ge; - } - - @Override - public QueryBuilder getFilterBuilder(){ - QueryBuilder lhs = null; - if(nameVal != null){ - lhs = nameVal.getFilterBuilder(); - }else{ - lhs = ge.getFilterBuilder(); - } - - if(this.isBinaryExpr()){ - QueryBuilder rhsFilter = rhs.getFilterBuilder(); - if(this.op.isAnd()){ - return QueryBuilders.boolQuery().must(lhs).must(rhsFilter); - }else{ - return QueryBuilders.boolQuery().should(lhs).should(rhsFilter); - } - }else{ - return lhs; - } - - } - - @Override - public String toString(){ - if(isBinaryExpr()){ - return "" + (nameVal==null?ge:nameVal) + op + rhs; - }else{ - return "" + (nameVal==null?ge:nameVal); - } - } - - public static Expression fromString(String value) throws ParserException{ - return new Expression(new BufferedInputStream(new ByteArrayInputStream(value.getBytes()))); - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FilterProvider.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FilterProvider.java deleted file mode 100644 index fd0a809a41..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/FilterProvider.java +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import org.elasticsearch.index.query.QueryBuilder; - -/** - * @author Viren - * - */ -public interface FilterProvider { - - /** - * - * @return FilterBuilder for elasticsearch - */ - public QueryBuilder getFilterBuilder(); - -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java deleted file mode 100644 index e5dfa94357..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/GroupedExpression.java +++ /dev/null @@ -1,70 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; - -import org.elasticsearch.index.query.QueryBuilder; - -import java.io.InputStream; - -/** - * @author Viren - * - */ -public class GroupedExpression extends AbstractNode implements FilterProvider { - - private Expression expression; - - public GroupedExpression(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - byte[] peeked = read(1); - assertExpected(peeked, "("); - - this.expression = new Expression(is); - - peeked = read(1); - assertExpected(peeked, ")"); - - } - - @Override - public String toString(){ - return "(" + expression + ")"; - } - - /** - * @return the expression - */ - public Expression getExpression() { - return expression; - } - - @Override - public QueryBuilder getFilterBuilder() { - return expression.getFilterBuilder(); - } - - -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java deleted file mode 100644 index 5e5edc5199..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/index/query/parser/NameValue.java +++ /dev/null @@ -1,125 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.dao.es.index.query.parser; - -import com.netflix.conductor.elasticsearch.query.parser.AbstractNode; -import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp; -import com.netflix.conductor.elasticsearch.query.parser.ConstValue; -import com.netflix.conductor.elasticsearch.query.parser.ListConst; -import com.netflix.conductor.elasticsearch.query.parser.Name; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import com.netflix.conductor.elasticsearch.query.parser.Range; - -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.index.query.QueryBuilders; - -import java.io.InputStream; - -/** - * @author Viren - *
- * Represents an expression of the form as below:
- * key OPR value 
- * OPR is the comparison operator which could be on the following:
- * 	>, <, = , !=, IN, BETWEEN
- * 
- */ -public class NameValue extends AbstractNode implements FilterProvider { - - private Name name; - - private ComparisonOp op; - - private ConstValue value; - - private Range range; - - private ListConst valueList; - - public NameValue(InputStream is) throws ParserException { - super(is); - } - - @Override - protected void _parse() throws Exception { - this.name = new Name(is); - this.op = new ComparisonOp(is); - - if(this.op.getOperator().equals(ComparisonOp.Operators.BETWEEN.value())){ - this.range = new Range(is); - }if(this.op.getOperator().equals(ComparisonOp.Operators.IN.value())){ - this.valueList = new ListConst(is); - }else{ - this.value = new ConstValue(is); - } - } - - @Override - public String toString() { - return "" + name + op + value; - } - - /** - * @return the name - */ - public Name getName() { - return name; - } - - /** - * @return the op - */ - public ComparisonOp getOp() { - return op; - } - - /** - * @return the value - */ - public ConstValue getValue() { - return value; - } - - @Override - public QueryBuilder getFilterBuilder(){ - if(op.getOperator().equals(ComparisonOp.Operators.EQUALS.value())){ - return QueryBuilders.queryStringQuery(name.getName() + ":" + value.getValue().toString()); - }else if(op.getOperator().equals(ComparisonOp.Operators.BETWEEN.value())){ - return QueryBuilders.rangeQuery(name.getName()).from(range.getLow()).to(range.getHigh()); - }else if(op.getOperator().equals(ComparisonOp.Operators.IN.value())){ - return QueryBuilders.termsQuery(name.getName(), valueList.getList()); - }else if(op.getOperator().equals(ComparisonOp.Operators.NOT_EQUALS.value())){ - return QueryBuilders.queryStringQuery("NOT " + name.getName() + ":" + value.getValue().toString()); - }else if(op.getOperator().equals(ComparisonOp.Operators.GREATER_THAN.value())){ - return QueryBuilders.rangeQuery(name.getName()).from(value.getValue()).includeLower(false).includeUpper(false); - }else if(op.getOperator().equals(ComparisonOp.Operators.IS.value())){ - if(value.getSysConstant().equals(ConstValue.SystemConsts.NULL)){ - return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).mustNot(QueryBuilders.existsQuery(name.getName()))); - } else if(value.getSysConstant().equals(ConstValue.SystemConsts.NOT_NULL)){ - return QueryBuilders.boolQuery().mustNot(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).must(QueryBuilders.existsQuery(name.getName()))); - } - }else if(op.getOperator().equals(ComparisonOp.Operators.LESS_THAN.value())){ - return QueryBuilders.rangeQuery(name.getName()).to(value.getValue()).includeLower(false).includeUpper(false); - } - - throw new IllegalStateException("Incorrect/unsupported operators"); - } - - -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/utils/RetryUtil.java b/es2-persistence/src/main/java/com/netflix/conductor/dao/es/utils/RetryUtil.java deleted file mode 100644 index e5d8c9c161..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/dao/es/utils/RetryUtil.java +++ /dev/null @@ -1,126 +0,0 @@ -/** - * Copyright 2018 Netflix, Inc. - *

- * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.netflix.conductor.dao.es.utils; - -import com.github.rholder.retry.Attempt; -import com.github.rholder.retry.BlockStrategies; -import com.github.rholder.retry.RetryException; -import com.github.rholder.retry.RetryListener; -import com.github.rholder.retry.Retryer; -import com.github.rholder.retry.RetryerBuilder; -import com.github.rholder.retry.StopStrategies; -import com.github.rholder.retry.WaitStrategies; -import com.google.common.base.Predicate; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Optional; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; - -/** - * Utility class that deals with retries in case of transient failures. - * - * Note: - * Create a new {@link RetryUtil} for every operation that needs to retried for the stated retries. - * - * Limitations: - *

    - *
  • - * The current implementation does not make a distinction between transient and non transient errors. - * There is no categorization of transient and non transient failure in Conductor. - * Once the exception hierarchy is available in Conductor, this class implementation can be changed to - * short circuit the non transient errors. - *
  • - *
  • - * Currently only couple of wait strategies are implemented {@link WaitStrategies#exponentialWait()} and - * {@link WaitStrategies#randomWait(long, TimeUnit)} with fixed attributes for each of the strategies. - *
  • - *
  • - * The retry limit is not configurable and is hard coded to 3 - *
  • - *
- * - * @param The type of the object that will be returned by the flaky supplier function - */ -public class RetryUtil { - - private static final Logger logger = LoggerFactory.getLogger(RetryUtil.class); - - private AtomicInteger internalNumberOfRetries = new AtomicInteger(); - - /** - * A helper method which has the ability to execute a flaky supplier function and retry in case of failures. - * - * @param supplierCommand: Any function that is flaky and needs multiple retries. - * @param throwablePredicate: A Guava {@link Predicate} housing the exceptional - * criteria to perform informed filtering before retrying. - * @param resultRetryPredicate: a predicate to be evaluated for a valid condition of the expected result - * @param retryCount: Number of times the function is to be retried before failure - * @param shortDescription: A short description of the function that will be used in logging and error propagation. - * The intention of this description is to provide context for Operability. - * @param operationName: The name of the function for traceability in logs - * @return an instance of return type of the supplierCommand - * @throws RuntimeException in case of failed attempts to get T, which needs to be returned by the supplierCommand. - * The instance of the returned exception has: - *
    - *
  • A message with shortDescription and operationName with the number of retries made
  • - *
  • And a reference to the original exception generated during the last {@link Attempt} of the retry
  • - *
- */ - public T retryOnException(Supplier supplierCommand, - Predicate throwablePredicate, - Predicate resultRetryPredicate, - int retryCount, - String shortDescription, String operationName) throws RuntimeException { - - Retryer retryer = RetryerBuilder.newBuilder() - .retryIfException(Optional.ofNullable(throwablePredicate).orElse(exception -> true)) - .retryIfResult(Optional.ofNullable(resultRetryPredicate).orElse(result -> false)) - .withWaitStrategy(WaitStrategies.join( - WaitStrategies.exponentialWait(1000, 90, TimeUnit.SECONDS), - WaitStrategies.randomWait(100, TimeUnit.MILLISECONDS, 500, TimeUnit.MILLISECONDS) - )) - .withStopStrategy(StopStrategies.stopAfterAttempt(retryCount)) - .withBlockStrategy(BlockStrategies.threadSleepStrategy()) - .withRetryListener(new RetryListener() { - @Override - public void onRetry(Attempt attempt) { - logger.debug("Attempt # {}, {} millis since first attempt. Operation: {}, description:{}", - attempt.getAttemptNumber(), attempt.getDelaySinceFirstAttempt(), operationName, shortDescription); - internalNumberOfRetries.incrementAndGet(); - } - }) - .build(); - - try { - return retryer.call(supplierCommand::get); - } catch (ExecutionException executionException) { - String errorMessage = String.format("Operation '%s:%s' failed for the %d time in RetryUtil", operationName, - shortDescription, internalNumberOfRetries.get()); - logger.debug(errorMessage); - throw new RuntimeException(errorMessage, executionException); - } catch (RetryException retryException) { - String errorMessage = String.format("Operation '%s:%s' failed after retrying %d times, retry limit %d", operationName, - shortDescription, internalNumberOfRetries.get(), 3); - logger.debug(errorMessage, retryException.getLastFailedAttempt().getExceptionCause()); - throw new RuntimeException(errorMessage, retryException.getLastFailedAttempt().getExceptionCause()); - } - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java deleted file mode 100644 index 141c2da2c4..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/ElasticSearchV2Module.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -/** - * - */ -package com.netflix.conductor.elasticsearch.es2; - -import com.google.inject.AbstractModule; -import com.google.inject.Provides; - -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.es.index.ElasticSearchV2DAO; -import com.netflix.conductor.elasticsearch.ElasticSearchModule; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; - -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.InetAddress; - -import javax.inject.Singleton; - - -/** - * @author Viren Provider for the elasticsearch transport client - */ -public class ElasticSearchV2Module extends AbstractModule { - - private static Logger log = LoggerFactory.getLogger(ElasticSearchV2Module.class); - - @Provides - @Singleton - public Client getClient(Configuration config) throws Exception { - - String clusterAddress = config.getProperty("workflow.elasticsearch.url", ""); - if (clusterAddress.equals("")) { - log.warn("workflow.elasticsearch.url is not set. Indexing will remain DISABLED."); - } - - Settings.Builder settings = Settings.settingsBuilder(); - settings.put("client.transport.ignore_cluster_name", true); - settings.put("client.transport.sniff", true); - - TransportClient tc = TransportClient.builder().settings(settings).build(); - String[] hosts = clusterAddress.split(","); - for (String host : hosts) { - String[] hostparts = host.split(":"); - String hostname = hostparts[0]; - int hostport = 9200; - if (hostparts.length == 2) hostport = Integer.parseInt(hostparts[1]); - tc.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(hostname), hostport)); - } - - return tc; - } - - @Override - protected void configure() { - install(new ElasticSearchModule()); - bind(IndexDAO.class).to(ElasticSearchV2DAO.class); - bind(EmbeddedElasticSearchProvider.class).to(EmbeddedElasticSearchV2Provider.class); - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java deleted file mode 100644 index 71b6ee72c2..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2.java +++ /dev/null @@ -1,102 +0,0 @@ -/** - * Copyright 2017 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on - * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the - * specific language governing permissions and limitations under the License. - */ -package com.netflix.conductor.elasticsearch.es2; - -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; - -import org.elasticsearch.client.Client; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.node.Node; -import org.elasticsearch.node.NodeBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; - -public class EmbeddedElasticSearchV2 implements EmbeddedElasticSearch { - - private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearchV2.class); - - private Node instance; - private Client client; - private File dataDir; - - @Override - public void start() throws Exception { - start(DEFAULT_CLUSTER_NAME, DEFAULT_HOST, DEFAULT_PORT, true); - } - - public synchronized void start(String clusterName, String host, int port, boolean enableTransportClient) throws Exception { - - if (instance != null && !instance.isClosed()) { - logger.info("Elastic Search is already running on port {}", getPort()); - return; - } - - final Settings settings = getSettings(clusterName, host, port, enableTransportClient); - setupDataDir(settings.get(ES_PATH_DATA)); - - logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); - instance = NodeBuilder.nodeBuilder().data(true).local(enableTransportClient ? false : true).settings(settings).client(false).node(); - instance.start(); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - instance.close(); - } - }); - logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); - client = instance.client(); - } - - private Settings getSettings(String clusterName, String host, int port, boolean enableTransportClient) throws IOException { - dataDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "data").toFile(); - File homeDir = Files.createTempDirectory(clusterName + "_" + System.currentTimeMillis() + "-home").toFile(); - return Settings.builder() - .put("cluster.name", clusterName) - .put("http.host", host) - .put("http.port", port) - .put(ES_PATH_DATA, dataDir.getAbsolutePath()) - .put(ES_PATH_HOME, homeDir.getAbsolutePath()) - .put("http.enabled", true) - .put("script.inline", "on") - .put("script.indexed", "on") - .build(); - } - - public Client getClient() { - if (instance == null || instance.isClosed()) { - logger.error("Embedded ElasticSearch is not Initialized and started, please call start() method first"); - return null; - } - return client; - } - - private String getPort() { - return instance.settings().get("http.port"); - } - - @Override - public synchronized void stop() { - - if (instance != null && !instance.isClosed()) { - String port = getPort(); - logger.info("Stopping Elastic Search"); - instance.close(); - logger.info("Elastic Search on port {} stopped", port); - } - - } -} diff --git a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java b/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java deleted file mode 100644 index 11f2cc2c68..0000000000 --- a/es2-persistence/src/main/java/com/netflix/conductor/elasticsearch/es2/EmbeddedElasticSearchV2Provider.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.netflix.conductor.elasticsearch.es2; - -import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; - -import java.util.Optional; - -import javax.inject.Inject; - -public class EmbeddedElasticSearchV2Provider implements EmbeddedElasticSearchProvider { - private final ElasticSearchConfiguration configuration; - - @Inject - public EmbeddedElasticSearchV2Provider(ElasticSearchConfiguration configuration) { - this.configuration = configuration; - } - - @Override - public Optional get() { - return isMemoryAndVersion() ? Optional.of(new EmbeddedElasticSearchV2()) : Optional.empty(); - } - - private boolean isMemoryAndVersion(){ - return configuration.getVersion() == 2 && configuration.getDB().equals(Configuration.DB.MEMORY); - } -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java deleted file mode 100644 index 5c8c1d568e..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestExpression.java +++ /dev/null @@ -1,159 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import com.netflix.conductor.elasticsearch.query.parser.AbstractParserTest; -import com.netflix.conductor.elasticsearch.query.parser.ConstValue; - -import org.junit.Test; - -import java.io.BufferedInputStream; -import java.io.ByteArrayInputStream; -import java.io.InputStream; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -/** - * @author Viren - * - */ -public class TestExpression extends AbstractParserTest { - - @Test - public void test() throws Exception{ - String test = "type='IMAGE' AND subType ='sdp' AND (metadata.width > 50 OR metadata.height > 50)"; - //test = "type='IMAGE' AND subType ='sdp'"; - //test = "(metadata.type = 'IMAGE')"; - InputStream is = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - Expression expr = new Expression(is); - - System.out.println(expr); - - assertTrue(expr.isBinaryExpr()); - assertNull(expr.getGroupedExpression()); - assertNotNull(expr.getNameValue()); - - NameValue nv = expr.getNameValue(); - assertEquals("type", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"IMAGE\"", nv.getValue().getValue()); - - Expression rhs = expr.getRightHandSide(); - assertNotNull(rhs); - assertTrue(rhs.isBinaryExpr()); - - nv = rhs.getNameValue(); - assertNotNull(nv); //subType = sdp - assertNull(rhs.getGroupedExpression()); - assertEquals("subType", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"sdp\"", nv.getValue().getValue()); - - assertEquals("AND", rhs.getOperator().getOperator()); - rhs = rhs.getRightHandSide(); - assertNotNull(rhs); - assertFalse(rhs.isBinaryExpr()); - GroupedExpression ge = rhs.getGroupedExpression(); - assertNotNull(ge); - expr = ge.getExpression(); - assertNotNull(expr); - - assertTrue(expr.isBinaryExpr()); - nv = expr.getNameValue(); - assertNotNull(nv); - assertEquals("metadata.width", nv.getName().getName()); - assertEquals(">", nv.getOp().getOperator()); - assertEquals("50", nv.getValue().getValue()); - - - - assertEquals("OR", expr.getOperator().getOperator()); - rhs = expr.getRightHandSide(); - assertNotNull(rhs); - assertFalse(rhs.isBinaryExpr()); - nv = rhs.getNameValue(); - assertNotNull(nv); - - assertEquals("metadata.height", nv.getName().getName()); - assertEquals(">", nv.getOp().getOperator()); - assertEquals("50", nv.getValue().getValue()); - - } - - @Test - public void testWithSysConstants() throws Exception{ - String test = "type='IMAGE' AND subType ='sdp' AND description IS null"; - InputStream is = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - Expression expr = new Expression(is); - - System.out.println(expr); - - assertTrue(expr.isBinaryExpr()); - assertNull(expr.getGroupedExpression()); - assertNotNull(expr.getNameValue()); - - NameValue nv = expr.getNameValue(); - assertEquals("type", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"IMAGE\"", nv.getValue().getValue()); - - Expression rhs = expr.getRightHandSide(); - assertNotNull(rhs); - assertTrue(rhs.isBinaryExpr()); - - nv = rhs.getNameValue(); - assertNotNull(nv); //subType = sdp - assertNull(rhs.getGroupedExpression()); - assertEquals("subType", nv.getName().getName()); - assertEquals("=", nv.getOp().getOperator()); - assertEquals("\"sdp\"", nv.getValue().getValue()); - - assertEquals("AND", rhs.getOperator().getOperator()); - rhs = rhs.getRightHandSide(); - assertNotNull(rhs); - assertFalse(rhs.isBinaryExpr()); - GroupedExpression ge = rhs.getGroupedExpression(); - assertNull(ge); - nv = rhs.getNameValue(); - assertNotNull(nv); - assertEquals("description", nv.getName().getName()); - assertEquals("IS", nv.getOp().getOperator()); - ConstValue cv = nv.getValue(); - assertNotNull(cv); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NULL); - - test = "description IS not null"; - is = new BufferedInputStream(new ByteArrayInputStream(test.getBytes())); - expr = new Expression(is); - - System.out.println(expr); - nv = expr.getNameValue(); - assertNotNull(nv); - assertEquals("description", nv.getName().getName()); - assertEquals("IS", nv.getOp().getOperator()); - cv = nv.getValue(); - assertNotNull(cv); - assertEquals(cv.getSysConstant(), ConstValue.SystemConsts.NOT_NULL); - - } - -} diff --git a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestGroupedExpression.java b/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestGroupedExpression.java deleted file mode 100644 index a7fc26b935..0000000000 --- a/es2-persistence/src/test/java/com/netflix/conductor/dao/es/index/query/parser/TestGroupedExpression.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.netflix.conductor.dao.es.index.query.parser; /** - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * - */ - -import org.junit.Test; - -/** - * @author Viren - * - */ -public class TestGroupedExpression { - - @Test - public void test(){ - - } -} diff --git a/es5-persistence/README.md b/es5-persistence/README.md index dce62966b2..272dd6d516 100644 --- a/es5-persistence/README.md +++ b/es5-persistence/README.md @@ -1,3 +1,2 @@ ## Usage -Set `workflow.elasticsearch.version=5` in Server module's configuration options. \ No newline at end of file diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java index 486e4a7498..943d0b5b9f 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java @@ -19,10 +19,10 @@ public EmbeddedElasticSearchV5Provider(ElasticSearchConfiguration configuration) @Override public Optional get() { - return isMemoryAndVersion() ? Optional.of(new EmbeddedElasticSearchV5()) : Optional.empty(); + return isEmbedded() ? Optional.of(new EmbeddedElasticSearchV5()) : Optional.empty(); } - private boolean isMemoryAndVersion(){ - return configuration.getVersion() == 5 && configuration.getDB().equals(Configuration.DB.MEMORY); + private boolean isEmbedded(){ + return configuration.getDB().equals(Configuration.DB.MEMORY); } } diff --git a/server/README.md b/server/README.md index 699f39c9db..6b4610f085 100644 --- a/server/README.md +++ b/server/README.md @@ -9,7 +9,6 @@ At the minimum, provide these options through VM or Config file: `workflow.elasticsearch.url` `workflow.elasticsearch.index.name` -`workflow.elasticsearch.version` (Defaults to 2.X; Provided implementations for major versions 2.X and 5.X) ### Database persistence model Possible values are memory, redis, redis_cluster and dynomite. diff --git a/server/build.gradle b/server/build.gradle index 6121bac609..49d99e7a99 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -25,7 +25,6 @@ dependencies { compile project(':conductor-redis-persistence') compile project(':conductor-mysql-persistence') compile project(':conductor-contribs') - compile project(':conductor-es2-persistence') compile project(':conductor-es5-persistence') compile project(':conductor-grpc-server') @@ -77,7 +76,6 @@ build.dependsOn('shadowJar') task server(type: JavaExec) { systemProperty 'workflow.elasticsearch.url', 'localhost:9300' // Switch between Elasticsearch versions 2 & 5 with major version number. - systemProperty 'workflow.elasticsearch.version', '2' systemProperty 'loadSample', 'true' systemProperties System.properties main = 'com.netflix.conductor.server.Main' diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java index 8738ed7fb3..26daab0580 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/ModulesProvider.java @@ -5,8 +5,6 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.RedisWorkflowModule; -import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; -import com.netflix.conductor.elasticsearch.es2.ElasticSearchV2Module; import com.netflix.conductor.elasticsearch.es5.ElasticSearchV5Module; import com.netflix.conductor.mysql.MySQLWorkflowModule; import com.netflix.conductor.server.DynomiteClusterModule; @@ -31,12 +29,12 @@ public class ModulesProvider implements Provider> { private static final Logger logger = LoggerFactory.getLogger(ModulesProvider.class); private final Configuration configuration; - + @Inject - public ModulesProvider(Configuration configuration){ + public ModulesProvider(Configuration configuration) { this.configuration = configuration; } - + @Override public List get() { List modulesToLoad = new ArrayList<>(); @@ -57,7 +55,7 @@ private List selectModulesToLoad() { final String message = "Invalid db name: " + configuration.getDBString() + ", supported values are: " + Arrays.toString(Configuration.DB.values()); logger.error(message); - throw new ProvisionException(message,ie); + throw new ProvisionException(message, ie); } switch (database) { @@ -84,14 +82,7 @@ private List selectModulesToLoad() { break; } - if (configuration.getIntProperty( - ElasticSearchConfiguration.ELASTIC_SEARCH_VERSION_PROPERTY_NAME, - 2 - ) == 5) { - modules.add(new ElasticSearchV5Module()); - } else { - modules.add(new ElasticSearchV2Module()); - } + modules.add(new ElasticSearchV5Module()); if (configuration.getJerseyEnabled()) { modules.add(new JerseyModule()); diff --git a/server/src/main/resources/server.properties b/server/src/main/resources/server.properties index edcbe4b3ae..c1ee3503b0 100644 --- a/server/src/main/resources/server.properties +++ b/server/src/main/resources/server.properties @@ -36,8 +36,5 @@ workflow.elasticsearch.url=localhost:9003 #Name of the elasticsearch cluster workflow.elasticsearch.index.name=conductor -#Elasticsearch major release version. -workflow.elasticsearch.version=2 - -# For a single node dynomite or redis server, make sure the value below is set to same as rack specified in the "workflow.dynomite.cluster.hosts" property. +# For a single node dynomite or redis server, make sure the value below is set to same as rack specified in the "workflow.dynomite.cluster.hosts" property. EC2_AVAILABILTY_ZONE=us-east-1c diff --git a/settings.gradle b/settings.gradle index dce6513d5b..102c295398 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,6 +1,6 @@ rootProject.name='conductor' -include 'client','common','contribs','core', 'elasticsearch', 'es2-persistence','es5-persistence','jersey' +include 'client','common','contribs','core', 'elasticsearch', 'es5-persistence','jersey' include 'mysql-persistence', 'redis-persistence','server','test-harness','ui' include 'protogen' include 'grpc', 'grpc-server', 'grpc-client' diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java index cb0dbe94ac..488d67365c 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java @@ -10,7 +10,6 @@ private static void setupSystemProperties() { System.setProperty("workflow.elasticsearch.index.name", "conductor"); System.setProperty("workflow.namespace.prefix", "integration-test"); System.setProperty("db", "memory"); - System.setProperty("workflow.elasticsearch.version", "5"); } public static void setup() { diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index b3dbcca896..43162730fc 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -9,7 +9,6 @@ ext { revDynoCore = '1.5.9' revDynoJedis = '1.5.9' revDynoQueues = '1.0.8' - revElasticSearch2 = '2.4.6' revElasticSearch5 = '5.6.8' revElasticSearch5Client = '5.6.8' revEurekaClient = '1.8.7' From 43157f78f67646d4a4f43e9570265809aca14d65 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Fri, 22 Jun 2018 12:49:17 +0200 Subject: [PATCH 066/163] Re-combine the elasticsearch module into the es5-persistence module. Now that the es2-persistence module has been removed there isn't really a need to have a higher level elastic search module for shared code. I left the more abstract package structure in place for now because I think that it still makes sense. --- elasticsearch/build.gradle | 4 ---- es5-persistence/build.gradle | 8 ++++++-- .../conductor/dao/es5/index/ElasticSearchDAOV5.java | 3 ++- .../elasticsearch/ElasticSearchConfiguration.java | 0 .../conductor/elasticsearch/ElasticSearchModule.java | 0 .../conductor/elasticsearch/EmbeddedElasticSearch.java | 0 .../elasticsearch/EmbeddedElasticSearchProvider.java | 0 .../SystemPropertiesElasticSearchConfiguration.java | 0 .../elasticsearch/query/parser/AbstractNode.java | 0 .../conductor/elasticsearch/query/parser/BooleanOp.java | 0 .../elasticsearch/query/parser/ComparisonOp.java | 0 .../conductor/elasticsearch/query/parser/ConstValue.java | 0 .../query/parser/FunctionThrowingException.java | 0 .../conductor/elasticsearch/query/parser/ListConst.java | 0 .../conductor/elasticsearch/query/parser/Name.java | 0 .../elasticsearch/query/parser/ParserException.java | 0 .../conductor/elasticsearch/query/parser/Range.java | 0 .../elasticsearch/query/parser/AbstractParserTest.java | 0 .../elasticsearch/query/parser/TestBooleanOp.java | 3 --- .../elasticsearch/query/parser/TestComparisonOp.java | 2 -- .../elasticsearch/query/parser/TestConstValue.java | 5 ----- .../conductor/elasticsearch/query/parser/TestName.java | 2 -- settings.gradle | 2 +- 23 files changed, 9 insertions(+), 20 deletions(-) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java (100%) rename {elasticsearch => es5-persistence}/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java (100%) rename {elasticsearch => es5-persistence}/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java (100%) rename {elasticsearch => es5-persistence}/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java (90%) rename {elasticsearch => es5-persistence}/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java (91%) rename {elasticsearch => es5-persistence}/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java (91%) rename {elasticsearch => es5-persistence}/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java (94%) diff --git a/elasticsearch/build.gradle b/elasticsearch/build.gradle index 29a8e087ce..5e0b388fea 100644 --- a/elasticsearch/build.gradle +++ b/elasticsearch/build.gradle @@ -1,10 +1,6 @@ dependencies { compile project(':conductor-core') - compile "commons-io:commons-io:${revCommonsIo}" compile "com.google.inject:guice:${revGuice}" - //ES5 Dependency - compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" - compile "org.apache.logging.log4j:log4j-core:${revLog4jCore}" } diff --git a/es5-persistence/build.gradle b/es5-persistence/build.gradle index 93e6f03474..7c0b7275d8 100644 --- a/es5-persistence/build.gradle +++ b/es5-persistence/build.gradle @@ -1,8 +1,12 @@ dependencies { - compile project(':conductor-elasticsearch') + compile project(':conductor-core') + + compile "commons-io:commons-io:${revCommonsIo}" compile "org.elasticsearch:elasticsearch:${revElasticSearch5}" compile "org.elasticsearch.client:transport:${revElasticSearch5}" - testCompile project(':conductor-elasticsearch').sourceSets.test.output + //ES5 Dependency + compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" + compile "org.apache.logging.log4j:log4j-core:${revLog4jCore}" } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java index bb2306a95a..728019d614 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java @@ -31,7 +31,6 @@ import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.es5.index.query.parser.Expression; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; import com.netflix.conductor.metrics.Monitors; import org.apache.commons.io.IOUtils; @@ -87,6 +86,8 @@ import javax.inject.Inject; import javax.inject.Singleton; +import com.netflix.conductor.elasticsearch.query.parser.ParserException; + /** * @author Viren */ diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearchProvider.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/SystemPropertiesElasticSearchConfiguration.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/AbstractNode.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/BooleanOp.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ComparisonOp.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ConstValue.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/FunctionThrowingException.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ListConst.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Name.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/ParserException.java diff --git a/elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java similarity index 100% rename from elasticsearch/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java rename to es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/query/parser/Range.java diff --git a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java similarity index 100% rename from elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java rename to es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/AbstractParserTest.java diff --git a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java similarity index 90% rename from elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java rename to es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java index ba37f189d7..9c0ef2acb0 100644 --- a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java +++ b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestBooleanOp.java @@ -18,9 +18,6 @@ */ package com.netflix.conductor.elasticsearch.query.parser; -import com.netflix.conductor.elasticsearch.query.parser.BooleanOp; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; - import org.junit.Test; import static org.junit.Assert.assertEquals; diff --git a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java similarity index 91% rename from elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java rename to es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java index 88dce7583f..39d954a0f8 100644 --- a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java +++ b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestComparisonOp.java @@ -18,8 +18,6 @@ */ package com.netflix.conductor.elasticsearch.query.parser; -import com.netflix.conductor.elasticsearch.query.parser.ComparisonOp; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; import org.junit.Test; diff --git a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java similarity index 91% rename from elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java rename to es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java index bd6b6d5d3a..8cc81641a3 100644 --- a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java +++ b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestConstValue.java @@ -18,11 +18,6 @@ */ package com.netflix.conductor.elasticsearch.query.parser; -import com.netflix.conductor.elasticsearch.query.parser.ConstValue; -import com.netflix.conductor.elasticsearch.query.parser.ListConst; -import com.netflix.conductor.elasticsearch.query.parser.ParserException; -import com.netflix.conductor.elasticsearch.query.parser.Range; - import org.junit.Test; import java.util.List; diff --git a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java similarity index 94% rename from elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java rename to es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java index 10d570a585..d3ea73c145 100644 --- a/elasticsearch/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java +++ b/es5-persistence/src/test/java/com/netflix/conductor/elasticsearch/query/parser/TestName.java @@ -18,8 +18,6 @@ */ package com.netflix.conductor.elasticsearch.query.parser; -import com.netflix.conductor.elasticsearch.query.parser.Name; - import org.junit.Test; import static org.junit.Assert.assertEquals; diff --git a/settings.gradle b/settings.gradle index 102c295398..a0e4a24ea8 100644 --- a/settings.gradle +++ b/settings.gradle @@ -1,6 +1,6 @@ rootProject.name='conductor' -include 'client','common','contribs','core', 'elasticsearch', 'es5-persistence','jersey' +include 'client','common','contribs','core', 'es5-persistence','jersey' include 'mysql-persistence', 'redis-persistence','server','test-harness','ui' include 'protogen' include 'grpc', 'grpc-server', 'grpc-client' From 13396c29fcaeb0151fe6fa505f0d85bfa1449d40 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Fri, 22 Jun 2018 16:32:11 +0200 Subject: [PATCH 067/163] Make the Embedded Elastic Search more configurable. This allows us to run the e2e test in parallel rather than serial and is also just more flexible. One current downside is that I explicitly set the Transport TCP port to the http port + 100. Ideally this would be configurable, or we would just stop using the Transport client all together since it is deprecated. --- .../ElasticSearchConfiguration.java | 46 ++++++++++++++++++- .../elasticsearch/EmbeddedElasticSearch.java | 7 --- .../es5/EmbeddedElasticSearchV5.java | 26 ++++++++--- .../es5/EmbeddedElasticSearchV5Provider.java | 10 +++- test-harness/build.gradle | 2 +- .../tests/integration/End2EndGrpcTests.java | 6 ++- .../tests/integration/End2EndTests.java | 3 ++ .../tests/utils/TestEnvironment.java | 1 - 8 files changed, 80 insertions(+), 21 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index 8a187f7821..f5bd8df0a7 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -10,11 +10,53 @@ public interface ElasticSearchConfiguration extends Configuration { String ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.index.name"; String ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE = "conductor"; - default String getURL(){ + String EMBEDDED_DATA_PATH_PROPERTY_NAME = "workflow.elasticsearch.embedded.data.path"; + String EMBEDDED_DATA_PATH_DEFAULT_VALUE = "path.data"; + + String EMBEDDED_HOME_PATH_PROPERTY_NAME = "workflow.elasticsearch.embedded.data.home"; + String EMBEDDED_HOME_PATH_DEFAULT_VALUE = "path.home"; + + String EMBEDDED_PORT_PROPERTY_NAME = "workflow.elasticsearch.embedded.port"; + int EMBEDDED_PORT_DEFAULT_VALUE = 9200; + + String EMBEDDED_CLUSTER_NAME_PROPERTY_NAME = "workflow.elasticsearch.embedded.cluster.name"; + String EMBEDDED_CLUSTER_NAME_DEFAULT_VALUE = "elasticsearch_test"; + + String EMBEDDED_HOST_PROPERTY_NAME = "workflow.elasticsearch.embedded.host"; + String EMBEDDED_HOST_DEFAULT_VALUE = "127.0.0.1"; + + String EMBEDDED_SETTINGS_FILE_PROPERTY_NAME = "workflow.elasticsearch.embedded.settings.file"; + String EMBEDDED_SETTINGS_FILE_DEFAULT_VALUE = "embedded-es.yml"; + + default String getURL() { return getProperty(ELASTIC_SEARCH_URL_PROPERTY_NAME, ELASTIC_SEARCH_URL_DEFAULT_VALUE); } - default String getIndexName(){ + default String getIndexName() { return getProperty(ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME, ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE); } + + default String getEmbeddedDataPath() { + return getProperty(EMBEDDED_DATA_PATH_PROPERTY_NAME, EMBEDDED_DATA_PATH_DEFAULT_VALUE); + } + + default String getEmbeddedHomePath() { + return getProperty(EMBEDDED_HOME_PATH_PROPERTY_NAME, EMBEDDED_HOME_PATH_DEFAULT_VALUE); + } + + default int getEmbeddedPort() { + return getIntProperty(EMBEDDED_PORT_PROPERTY_NAME, EMBEDDED_PORT_DEFAULT_VALUE); + } + + default String getEmbeddedClusterName() { + return getProperty(EMBEDDED_CLUSTER_NAME_PROPERTY_NAME, EMBEDDED_CLUSTER_NAME_DEFAULT_VALUE); + } + + default String getEmbeddedHost() { + return getProperty(EMBEDDED_HOST_PROPERTY_NAME, EMBEDDED_HOST_DEFAULT_VALUE); + } + + default String getEmbeddedSettingsFile() { + return getProperty(EMBEDDED_SETTINGS_FILE_PROPERTY_NAME, EMBEDDED_SETTINGS_FILE_DEFAULT_VALUE); + } } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java index 2317076761..90fe3a09cc 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java @@ -14,13 +14,6 @@ public interface EmbeddedElasticSearch extends Lifecycle { Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearch.class); - String ES_PATH_DATA = "path.data"; - String ES_PATH_HOME = "path.home"; - - int DEFAULT_PORT = 9200; - String DEFAULT_CLUSTER_NAME = "elasticsearch_test"; - String DEFAULT_HOST = "127.0.0.1"; - String DEFAULT_SETTING_FILE = "embedded-es.yml"; default void cleanDataDir(String path) { try { diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java index 73a3b3db7c..3a1913fae7 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java @@ -12,6 +12,7 @@ */ package com.netflix.conductor.elasticsearch.es5; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; import org.elasticsearch.client.Client; @@ -35,10 +36,20 @@ public class EmbeddedElasticSearchV5 implements EmbeddedElasticSearch { private static final Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearchV5.class); + private final String clusterName; + private final String host; + private final int port; + private Node instance; private Client client; private File dataDir; + public EmbeddedElasticSearchV5(String clusterName, String host, int port){ + this.clusterName = clusterName; + this.host = host; + this.port = port; + } + private class PluginConfigurableNode extends Node { public PluginConfigurableNode(Settings preparedSettings, Collection> classpathPlugins) { super(InternalSettingsPreparer.prepareEnvironment(preparedSettings, null), classpathPlugins); @@ -47,18 +58,18 @@ public PluginConfigurableNode(Settings preparedSettings, Collection get() { - return isEmbedded() ? Optional.of(new EmbeddedElasticSearchV5()) : Optional.empty(); + return isEmbedded() ? Optional.of( + new EmbeddedElasticSearchV5( + configuration.getEmbeddedClusterName(), + configuration.getEmbeddedHost(), + configuration.getEmbeddedPort() + ) + ) : Optional.empty(); } - private boolean isEmbedded(){ + private boolean isEmbedded() { return configuration.getDB().equals(Configuration.DB.MEMORY); } } diff --git a/test-harness/build.gradle b/test-harness/build.gradle index 9f37bf8410..d2bb313705 100644 --- a/test-harness/build.gradle +++ b/test-harness/build.gradle @@ -24,7 +24,7 @@ dependencies { test { // Because tests in the module bind to ports they shouldn't be executed in parallel. - maxParallelForks = 1 +// maxParallelForks = 1 } task server(type: JavaExec) { diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 98f162586a..26ba401cb9 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -36,9 +36,11 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import com.netflix.conductor.grpc.server.GRPCServer; +import com.netflix.conductor.grpc.server.GRPCServerConfiguration; import com.netflix.conductor.grpc.server.GRPCServerProvider; import com.netflix.conductor.tests.utils.TestEnvironment; @@ -67,7 +69,9 @@ public class End2EndGrpcTests { @BeforeClass public static void setup() throws Exception { TestEnvironment.setup(); - System.setProperty("conductor.grpc.server.enabled", "true"); + System.setProperty(GRPCServerConfiguration.ENABLED_PROPERTY_NAME, "true"); + System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9202"); + System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9302"); Injector bootInjector = Guice.createInjector(new BootstrapModule()); Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 2d3c518b8d..58c510c273 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -34,6 +34,7 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.run.WorkflowSummary; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import com.netflix.conductor.jetty.server.JettyServer; @@ -63,6 +64,8 @@ public class End2EndTests { @BeforeClass public static void setup() throws Exception { TestEnvironment.setup(); + System.setProperty(ElasticSearchConfiguration.EMBEDDED_PORT_PROPERTY_NAME, "9201"); + System.setProperty(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME, "localhost:9301"); Injector bootInjector = Guice.createInjector(new BootstrapModule()); Injector serverInjector = Guice.createInjector(bootInjector.getInstance(ModulesProvider.class).get()); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java index 488d67365c..daf4d16130 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestEnvironment.java @@ -6,7 +6,6 @@ private TestEnvironment() {} private static void setupSystemProperties() { System.setProperty("EC2_REGION", "us-east-1"); System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - System.setProperty("workflow.elasticsearch.url", "localhost:9300"); System.setProperty("workflow.elasticsearch.index.name", "conductor"); System.setProperty("workflow.namespace.prefix", "integration-test"); System.setProperty("db", "memory"); From b5d6bfc911da8b4ee9cc73b561e529ba869605b2 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 25 Jun 2018 11:44:05 +0200 Subject: [PATCH 068/163] Add providers and centralize host parsing. Move the logic for parsing hosts from the configured to the configuration class and more strongly type the results to URI. I am then using this logic for the providers that can provide either a Transport or Http client, although the current code uses the transport client which will be deprecated. --- es5-persistence/build.gradle | 1 + .../ElasticSearchConfiguration.java | 16 +++++ .../elasticsearch/ElasticSearchModule.java | 4 ++ .../ElasticSearchRestClientProvider.java | 34 ++++++++++ .../ElasticSearchTransportClientProvider.java | 58 ++++++++++++++++ .../es5/ElasticSearchV5Module.java | 68 +++---------------- 6 files changed, 124 insertions(+), 57 deletions(-) create mode 100644 es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java create mode 100644 es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java diff --git a/es5-persistence/build.gradle b/es5-persistence/build.gradle index 7c0b7275d8..a36128e0b4 100644 --- a/es5-persistence/build.gradle +++ b/es5-persistence/build.gradle @@ -5,6 +5,7 @@ dependencies { compile "org.elasticsearch:elasticsearch:${revElasticSearch5}" compile "org.elasticsearch.client:transport:${revElasticSearch5}" + compile "org.elasticsearch.client:elasticsearch-rest-high-level-client:${revElasticSearch5}" //ES5 Dependency compile "org.apache.logging.log4j:log4j-api:${revLog4jApi}" diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index f5bd8df0a7..87a373793b 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -2,6 +2,11 @@ import com.netflix.conductor.core.config.Configuration; +import java.net.URI; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + public interface ElasticSearchConfiguration extends Configuration { String ELASTIC_SEARCH_URL_PROPERTY_NAME = "workflow.elasticsearch.url"; @@ -32,6 +37,17 @@ default String getURL() { return getProperty(ELASTIC_SEARCH_URL_PROPERTY_NAME, ELASTIC_SEARCH_URL_DEFAULT_VALUE); } + default List getURIs(){ + + String clusterAddress = getURL(); + + String[] hosts = clusterAddress.split(","); + + return Arrays.stream(hosts).map( host -> + (host.startsWith("http://") || host.startsWith("tcp://")) ? URI.create(host) : URI.create("tcp://" + host) + ).collect(Collectors.toList()); + } + default String getIndexName() { return getProperty(ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME, ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE); } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java index 91d5a5c304..b385442b66 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchModule.java @@ -1,10 +1,14 @@ package com.netflix.conductor.elasticsearch; import com.google.inject.AbstractModule; +import com.google.inject.Singleton; + +import org.elasticsearch.client.Client; public class ElasticSearchModule extends AbstractModule { @Override protected void configure() { bind(ElasticSearchConfiguration.class).to(SystemPropertiesElasticSearchConfiguration.class); + bind(Client.class).toProvider(ElasticSearchTransportClientProvider.class).in(Singleton.class); } } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java new file mode 100644 index 0000000000..a21e0bc2a6 --- /dev/null +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchRestClientProvider.java @@ -0,0 +1,34 @@ +package com.netflix.conductor.elasticsearch; + +import org.apache.http.HttpHost; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestHighLevelClient; + +import java.net.URI; +import java.util.List; +import java.util.stream.Collectors; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class ElasticSearchRestClientProvider implements Provider { + private final ElasticSearchConfiguration configuration; + + @Inject + public ElasticSearchRestClientProvider(ElasticSearchConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public RestHighLevelClient get() { + RestClient lowLevelRestClient = RestClient.builder(convertToHttpHosts(configuration.getURIs())).build(); + return new RestHighLevelClient(lowLevelRestClient); + } + + private HttpHost[] convertToHttpHosts(List hosts) { + List list = hosts.stream().map(host -> + new HttpHost(host.getHost(), host.getPort())) + .collect(Collectors.toList()); + return list.toArray(new HttpHost[0]); + } +} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java new file mode 100644 index 0000000000..4e60ac4460 --- /dev/null +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchTransportClientProvider.java @@ -0,0 +1,58 @@ +package com.netflix.conductor.elasticsearch; + +import com.google.inject.ProvisionException; + +import org.elasticsearch.client.Client; +import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.InetSocketTransportAddress; +import org.elasticsearch.transport.client.PreBuiltTransportClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.net.InetAddress; +import java.net.URI; +import java.net.UnknownHostException; +import java.util.List; +import java.util.Optional; + +import javax.inject.Inject; +import javax.inject.Provider; + +public class ElasticSearchTransportClientProvider implements Provider { + private static final Logger logger = LoggerFactory.getLogger(ElasticSearchTransportClientProvider.class); + + private final ElasticSearchConfiguration configuration; + + @Inject + public ElasticSearchTransportClientProvider(ElasticSearchConfiguration configuration) { + this.configuration = configuration; + } + + @Override + public Client get() { + + Settings settings = Settings.builder() + .put("client.transport.ignore_cluster_name", true) + .put("client.transport.sniff", true) + .build(); + + TransportClient tc = new PreBuiltTransportClient(settings); + + List clusterAddresses = configuration.getURIs(); + + if (clusterAddresses.isEmpty()) { + logger.warn(ElasticSearchConfiguration.ELASTIC_SEARCH_URL_PROPERTY_NAME + + " is not set. Indexing will remain DISABLED."); + } + for (URI hostAddress : clusterAddresses) { + int port = Optional.ofNullable(hostAddress.getPort()).orElse(9200); + try { + tc.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(hostAddress.getHost()), port)); + } catch (UnknownHostException uhe){ + throw new ProvisionException("Invalid host" + hostAddress.getHost(), uhe); + } + } + return tc; + } +} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java index d543691767..3592fd1bca 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/ElasticSearchV5Module.java @@ -1,44 +1,27 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.elasticsearch.es5; import com.google.inject.AbstractModule; -import com.google.inject.Provides; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.es5.index.ElasticSearchDAOV5; import com.netflix.conductor.elasticsearch.ElasticSearchModule; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import org.elasticsearch.client.Client; -import org.elasticsearch.client.transport.TransportClient; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.transport.InetSocketTransportAddress; -import org.elasticsearch.transport.client.PreBuiltTransportClient; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.net.InetAddress; - -import javax.inject.Singleton; - /** * @author Viren @@ -46,39 +29,10 @@ */ public class ElasticSearchV5Module extends AbstractModule { - private static Logger log = LoggerFactory.getLogger(ElasticSearchV5Module.class); - - @Provides - @Singleton - public Client getClient(Configuration config) throws Exception { - - String clusterAddress = config.getProperty("workflow.elasticsearch.url", ""); - if(clusterAddress.equals("")) { - log.warn("workflow.elasticsearch.url is not set. Indexing will remain DISABLED."); - } - - Settings settings = Settings.builder() - .put("client.transport.ignore_cluster_name",true) - .put("client.transport.sniff", true) - .build(); - - TransportClient tc = new PreBuiltTransportClient(settings); - String[] hosts = clusterAddress.split(","); - for (String host : hosts) { - String[] hostparts = host.split(":"); - String hostname = hostparts[0]; - int hostport = 9200; - if (hostparts.length == 2) hostport = Integer.parseInt(hostparts[1]); - tc.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName(hostname), hostport)); - } - return tc; - - } - - @Override - protected void configure() { - install(new ElasticSearchModule()); + @Override + protected void configure() { + install(new ElasticSearchModule()); bind(IndexDAO.class).to(ElasticSearchDAOV5.class); bind(EmbeddedElasticSearchProvider.class).to(EmbeddedElasticSearchV5Provider.class); - } + } } From d3562591027f3132095e335ee9c4fc22bdd7244d Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 27 Jun 2018 10:45:38 +0200 Subject: [PATCH 069/163] Address review feedback about some of the code documentation and extra build file descriptors. --- .../netflix/conductor/service/Lifecycle.java | 17 ++++++++++++++++- grpc-client/build.gradle | 12 ------------ 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/service/Lifecycle.java b/core/src/main/java/com/netflix/conductor/service/Lifecycle.java index e84a0fca57..8680ee1599 100644 --- a/core/src/main/java/com/netflix/conductor/service/Lifecycle.java +++ b/core/src/main/java/com/netflix/conductor/service/Lifecycle.java @@ -1,7 +1,20 @@ package com.netflix.conductor.service; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * This interface provides a means to help handle objects, especially those that are injected, that have a lifecycle + * component. Guice explicitly does not support this and recommends a patter much like this. This should be used by + * anything that needs to create resources or clean them up when the application is started or stopped, such as server + * listeners, clients, etc. + * + * @see ModulesShouldBeFastAndSideEffectFree + */ public interface Lifecycle { + Logger logger = LoggerFactory.getLogger(Lifecycle.class); + default void start() throws Exception { registerShutdownHook(); } @@ -12,7 +25,9 @@ default void registerShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread(() -> { try { stop(); - } catch (Exception e) {} + } catch (Exception e) { + logger.error("Error when trying to shutdown a lifecycle component: " + this.getClass().getName(), e); + } })); } } diff --git a/grpc-client/build.gradle b/grpc-client/build.gradle index 4714e44cfa..033fdd2fd3 100644 --- a/grpc-client/build.gradle +++ b/grpc-client/build.gradle @@ -1,15 +1,3 @@ -plugins { - id 'java' -} - -group 'com.netflix' - -sourceCompatibility = 1.8 - -repositories { - mavenCentral() -} - dependencies { compile project(':conductor-common') compile project(':conductor-core') From e96f8f3914e5fc8a61dcc8bfe881ae77448ab637 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 27 Jun 2018 12:48:29 +0200 Subject: [PATCH 070/163] Clean up some logic and error handling. --- .../conductor/elasticsearch/EmbeddedElasticSearch.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java index 90fe3a09cc..578309fb6d 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/EmbeddedElasticSearch.java @@ -16,14 +16,15 @@ public interface EmbeddedElasticSearch extends Lifecycle { Logger logger = LoggerFactory.getLogger(EmbeddedElasticSearch.class); default void cleanDataDir(String path) { + File dataDir = new File(path); + try { logger.info("Deleting contents of data dir {}", path); - File f = new File(path); - if (f.exists()) { - FileUtils.cleanDirectory(new File(path)); + if (dataDir.exists()) { + FileUtils.cleanDirectory(dataDir); } } catch (IOException e) { - logger.error("Failed to delete ES data dir"); + logger.error(String.format("Failed to delete ES data dir: %s", dataDir.getAbsolutePath()), e); } } From 900b8d21d422bd038e5f41b6d4ce9e173a314330 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 27 Jun 2018 12:48:51 +0200 Subject: [PATCH 071/163] Remove unused field client field. --- .../elasticsearch/es5/EmbeddedElasticSearchV5.java | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java index 3a1913fae7..f78070771b 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java @@ -15,7 +15,6 @@ import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; -import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.InternalSettingsPreparer; import org.elasticsearch.node.Node; @@ -41,7 +40,6 @@ public class EmbeddedElasticSearchV5 implements EmbeddedElasticSearch { private final int port; private Node instance; - private Client client; private File dataDir; public EmbeddedElasticSearchV5(String clusterName, String host, int port){ @@ -85,7 +83,6 @@ public void run() { } }); logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); - client = instance.client(); } private Settings getSettings(String clusterName, String host, int port) throws IOException { @@ -109,14 +106,6 @@ private Settings getSettings(String clusterName, String host, int port) throws I return settingsBuilder.build(); } - public Client getClient() { - if (instance == null || instance.isClosed()) { - logger.error("Embedded ElasticSearch is not Initialized and started, please call start() method first"); - return null; - } - return client; - } - private String getPort() { return instance.settings().get("http.port"); } From 33c672685b5b629c2ad36fcd7037ff1fd5e51c22 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 27 Jun 2018 13:06:03 +0200 Subject: [PATCH 072/163] Update double start logic to throw rather than just logging an error, since this is a coding error. --- .../elasticsearch/es5/EmbeddedElasticSearchV5.java | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java index f78070771b..3bf20aa319 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java @@ -61,9 +61,14 @@ public void start() throws Exception { public synchronized void start(String clusterName, String host, int port) throws Exception { - if (instance != null && !instance.isClosed()) { - logger.info("Elastic Search is already running on port {}", getPort()); - return; + if (instance != null) { + String msg = String.format( + "An instance of this Embedded Elastic Search server is already running on port: %d. " + + "It must be stopped before you can call start again.", + getPort() + ); + logger.error(msg); + throw new IllegalStateException(msg); } final Settings settings = getSettings(clusterName, host, port); @@ -117,6 +122,7 @@ public synchronized void stop() throws Exception { String port = getPort(); logger.info("Stopping Elastic Search"); instance.close(); + instance = null; logger.info("Elastic Search on port {} stopped", port); } From 0871ef8d5d8f40db7a20fd6fbede3cc75fb9958a Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 27 Jun 2018 15:59:07 +0200 Subject: [PATCH 073/163] grpc: Abstract all request and response messages --- .../conductor/client/grpc/MetadataClient.java | 16 ++-- .../conductor/client/grpc/TaskClient.java | 21 +++-- .../conductor/client/grpc/WorkflowClient.java | 16 ++-- .../grpc/server/service/EventServiceImpl.java | 29 +++--- .../grpc/server/service/GRPCHelper.java | 5 -- .../server/service/MetadataServiceImpl.java | 57 ++++++------ .../grpc/server/service/TaskServiceImpl.java | 48 ++++++---- .../server/service/WorkflowServiceImpl.java | 74 ++++++++------- grpc/src/main/proto/grpc/event_service.proto | 43 ++++++--- .../main/proto/grpc/metadata_service.proto | 54 ++++++++--- grpc/src/main/proto/grpc/search.proto | 10 +-- grpc/src/main/proto/grpc/task_service.proto | 57 ++++++++---- .../main/proto/grpc/workflow_service.proto | 89 +++++++++++++++---- 13 files changed, 343 insertions(+), 176 deletions(-) diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java index 858f633a9f..df854c2652 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/MetadataClient.java @@ -28,7 +28,9 @@ public MetadataClient(String address, int port) { public void registerWorkflowDef(WorkflowDef workflowDef) { Preconditions.checkNotNull(workflowDef, "Worfklow definition cannot be null"); stub.createWorkflow( - protoMapper.toProto(workflowDef) + MetadataServicePb.CreateWorkflowRequest.newBuilder() + .setWorkflow(protoMapper.toProto(workflowDef)) + .build() ); } @@ -65,7 +67,7 @@ public WorkflowDef getWorkflowDef(String name, @Nullable Integer version) { if (version != null) request.setVersion(version); - return protoMapper.fromProto(stub.getWorkflow(request.build())); + return protoMapper.fromProto(stub.getWorkflow(request.build()).getWorkflow()); } /** @@ -90,7 +92,11 @@ public void registerTaskDefs(List taskDefs) { */ public void updateTaskDef(TaskDef taskDef) { Preconditions.checkNotNull(taskDef, "Task definition cannot be null"); - stub.updateTask(protoMapper.toProto(taskDef)); + stub.updateTask( + MetadataServicePb.UpdateTaskRequest.newBuilder() + .setTask(protoMapper.toProto(taskDef)) + .build() + ); } /** @@ -105,7 +111,7 @@ public TaskDef getTaskDef(String taskType) { stub.getTask(MetadataServicePb.GetTaskRequest.newBuilder() .setTaskType(taskType) .build() - ) + ).getTask() ); } @@ -117,7 +123,7 @@ public TaskDef getTaskDef(String taskType) { */ public void unregisterTaskDef(String taskType) { Preconditions.checkArgument(StringUtils.isNotBlank(taskType), "Task type cannot be blank"); - stub.deleteTask(MetadataServicePb.GetTaskRequest.newBuilder() + stub.deleteTask(MetadataServicePb.DeleteTaskRequest.newBuilder() .setTaskType(taskType) .build() ); diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java index c604cfa50c..4eef15464c 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java @@ -6,6 +6,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.grpc.MetadataServicePb; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.TaskServicePb; import com.netflix.conductor.proto.TaskPb; @@ -36,14 +37,14 @@ public Task pollTask(String taskType, String workerId, String domain) { Preconditions.checkArgument(StringUtils.isNotBlank(domain), "Domain cannot be blank"); Preconditions.checkArgument(StringUtils.isNotBlank(workerId), "Worker id cannot be blank"); - TaskPb.Task task = stub.poll( + TaskServicePb.PollResponse response = stub.poll( TaskServicePb.PollRequest.newBuilder() .setTaskType(taskType) .setWorkerId(workerId) .setDomain(domain) .build() ); - return protoMapper.fromProto(task); + return protoMapper.fromProto(response.getTask()); } /** @@ -122,13 +123,13 @@ public Task getPendingTaskForWorkflow(String workflowId, String taskReferenceNam Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "Workflow id cannot be blank"); Preconditions.checkArgument(StringUtils.isNotBlank(taskReferenceName), "Task reference name cannot be blank"); - TaskPb.Task task = stub.getPendingTaskForWorkflow( + TaskServicePb.PendingTaskResponse response = stub.getPendingTaskForWorkflow( TaskServicePb.PendingTaskRequest.newBuilder() .setWorkflowId(workflowId) .setTaskRefName(taskReferenceName) .build() ); - return protoMapper.fromProto(task); + return protoMapper.fromProto(response.getTask()); } /** @@ -138,7 +139,10 @@ public Task getPendingTaskForWorkflow(String workflowId, String taskReferenceNam */ public void updateTask(TaskResult taskResult) { Preconditions.checkNotNull(taskResult, "Task result cannot be null"); - stub.updateTask(protoMapper.toProto(taskResult)); + stub.updateTask(TaskServicePb.UpdateTaskRequest.newBuilder() + .setResult(protoMapper.toProto(taskResult)) + .build() + ); } /** @@ -183,7 +187,7 @@ public void logMessageForTask(String taskId, String logMessage) { public List getTaskLogs(String taskId) { Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); return stub.getTaskLogs( - TaskServicePb.TaskId.newBuilder().setTaskId(taskId).build() + TaskServicePb.GetTaskLogsRequest.newBuilder().setTaskId(taskId).build() ).getLogsList() .stream() .map(protoMapper::fromProto) @@ -199,7 +203,10 @@ public List getTaskLogs(String taskId) { public Task getTaskDetails(String taskId) { Preconditions.checkArgument(StringUtils.isNotBlank(taskId), "Task id cannot be blank"); return protoMapper.fromProto( - stub.getTask(TaskServicePb.TaskId.newBuilder().setTaskId(taskId).build()) + stub.getTask(TaskServicePb.GetTaskRequest.newBuilder() + .setTaskId(taskId) + .build() + ).getTask() ); } diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java index 4e92ac341c..55d7438345 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/WorkflowClient.java @@ -147,7 +147,7 @@ public List getWorkflowsByTimePeriod(String workflowName, int version, L */ public void runDecider(String workflowId) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.decideWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + stub.decideWorkflow(WorkflowServicePb.DecideWorkflowRequest.newBuilder() .setWorkflowId(workflowId) .build() ); @@ -160,7 +160,7 @@ public void runDecider(String workflowId) { */ public void pauseWorkflow(String workflowId) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.pauseWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + stub.pauseWorkflow(WorkflowServicePb.PauseWorkflowRequest.newBuilder() .setWorkflowId(workflowId) .build() ); @@ -173,7 +173,7 @@ public void pauseWorkflow(String workflowId) { */ public void resumeWorkflow(String workflowId) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.resumeWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + stub.resumeWorkflow(WorkflowServicePb.ResumeWorkflowRequest.newBuilder() .setWorkflowId(workflowId) .build() ); @@ -215,7 +215,7 @@ public String rerunWorkflow(RerunWorkflowRequest rerunWorkflowRequest) { */ public void restart(String workflowId) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.restartWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + stub.restartWorkflow(WorkflowServicePb.RestartWorkflowRequest.newBuilder() .setWorkflowId(workflowId) .build() ); @@ -228,7 +228,7 @@ public void restart(String workflowId) { */ public void retryLastFailedTask(String workflowId) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.retryWorkflow(WorkflowServicePb.WorkflowId.newBuilder() + stub.retryWorkflow(WorkflowServicePb.RetryWorkflowRequest.newBuilder() .setWorkflowId(workflowId) .build() ); @@ -242,7 +242,7 @@ public void retryLastFailedTask(String workflowId) { */ public void resetCallbacksForInProgressTasks(String workflowId) { Preconditions.checkArgument(StringUtils.isNotBlank(workflowId), "workflow id cannot be blank"); - stub.resetWorkflowCallbacks(WorkflowServicePb.WorkflowId.newBuilder() + stub.resetWorkflowCallbacks(WorkflowServicePb.ResetWorkflowCallbacksRequest.newBuilder() .setWorkflowId(workflowId) .build() ); @@ -288,7 +288,7 @@ public SearchResult search( @Nullable String sort, @Nullable String freeText, @Nonnull String query) { Preconditions.checkNotNull(query, "query cannot be null"); - SearchPb.SearchRequest.Builder request = SearchPb.SearchRequest.newBuilder(); + SearchPb.Request.Builder request = SearchPb.Request.newBuilder(); request.setQuery(query); if (start != null) request.setStart(start); @@ -299,7 +299,7 @@ public SearchResult search( if (freeText != null) request.setFreeText(freeText); - SearchPb.WorkflowSummarySearchResult result = stub.search(request.build()); + WorkflowServicePb.WorkflowSummarySearchResult result = stub.search(request.build()); return new SearchResult( result.getTotalHits(), result.getResultsList().stream().map(protoMapper::fromProto).collect(Collectors.toList()) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java index 8b4af7b1ef..9211334ef0 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java @@ -33,38 +33,41 @@ public EventServiceImpl(MetadataService service, EventProcessor ep) { } @Override - public void addEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { - service.addEventHandler(protoMapper.fromProto(req)); - grpcHelper.emptyResponse(response); + public void addEventHandler(EventServicePb.AddEventHandlerRequest req, StreamObserver response) { + service.addEventHandler(protoMapper.fromProto(req.getHandler())); + response.onNext(EventServicePb.AddEventHandlerResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void updateEventHandler(EventHandlerPb.EventHandler req, StreamObserver response) { - service.updateEventHandler(protoMapper.fromProto(req)); - grpcHelper.emptyResponse(response); + public void updateEventHandler(EventServicePb.UpdateEventHandlerRequest req, StreamObserver response) { + service.updateEventHandler(protoMapper.fromProto(req.getHandler())); + response.onNext(EventServicePb.UpdateEventHandlerResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, StreamObserver response) { + public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, StreamObserver response) { service.removeEventHandlerStatus(req.getName()); - grpcHelper.emptyResponse(response); + response.onNext(EventServicePb.RemoveEventHandlerResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void getEventHandlers(Empty req, StreamObserver response) { + public void getEventHandlers(EventServicePb.GetEventHandlersRequest req, StreamObserver response) { service.getEventHandlers().stream().map(protoMapper::toProto).forEach(response::onNext); response.onCompleted(); } @Override - public void getEventHandlersForEvent(EventServicePb.GetEventHandlersRequest req, StreamObserver response) { + public void getEventHandlersForEvent(EventServicePb.GetEventHandlersForEventRequest req, StreamObserver response) { service.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly()) .stream().map(protoMapper::toProto).forEach(response::onNext); response.onCompleted(); } @Override - public void getQueues(Empty req, StreamObserver response) { + public void getQueues(EventServicePb.GetQueuesRequest req, StreamObserver response) { response.onNext( EventServicePb.GetQueuesResponse.newBuilder() .putAllEventToQueueUri(ep.getQueues()) @@ -74,7 +77,7 @@ public void getQueues(Empty req, StreamObserver response) { + public void getQueueSizes(EventServicePb.GetQueueSizesRequest req, StreamObserver response) { EventServicePb.GetQueueSizesResponse.Builder builder = EventServicePb.GetQueueSizesResponse.newBuilder(); for (Map.Entry> pair : ep.getQueueSizes().entrySet()) { builder.putEventToQueueInfo(pair.getKey(), @@ -87,7 +90,7 @@ public void getQueueSizes(Empty req, StreamObserver response) { + public void getQueueProviders(EventServicePb.GetQueueProvidersRequest req, StreamObserver response) { response.onNext( EventServicePb.GetQueueProvidersResponse.newBuilder() .addAllProviders(EventQueues.providers()) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java index 6015783c04..5a01c4d1d1 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java @@ -93,11 +93,6 @@ void onError(StreamObserver response, Throwable t) { response.onError(throwableToStatusException(t)); } - void emptyResponse(StreamObserver response) { - response.onNext(Empty.getDefaultInstance()); - response.onCompleted(); - } - String optional(@Nonnull String str) { return str.isEmpty() ? null : str; } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java index 7cfd60f80d..0d382b187b 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java @@ -31,33 +31,32 @@ public MetadataServiceImpl(MetadataService service) { } @Override - public void createWorkflow(WorkflowDefPb.WorkflowDef req, StreamObserver response) { - try { - service.registerWorkflowDef(protoMapper.fromProto(req)); - grpcHelper.emptyResponse(response); - } catch (Exception e) { - grpcHelper.onError(response, e); - } + public void createWorkflow(MetadataServicePb.CreateWorkflowRequest req, StreamObserver response) { + WorkflowDef workflow = protoMapper.fromProto(req.getWorkflow()); + service.registerWorkflowDef(workflow); + response.onNext(MetadataServicePb.CreateWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver response) { + public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver response) { List workflows = req.getDefsList().stream() .map(protoMapper::fromProto).collect(Collectors.toList()); - try { - service.updateWorkflowDef(workflows); - grpcHelper.emptyResponse(response); - } catch (Exception e) { - grpcHelper.onError(response, e); - } + service.updateWorkflowDef(workflows); + response.onNext(MetadataServicePb.UpdateWorkflowsResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { + public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { WorkflowDef def = service.getWorkflowDef(req.getName(), grpcHelper.optional(req.getVersion())); if (def != null) { - response.onNext(protoMapper.toProto(def)); + WorkflowDefPb.WorkflowDef workflow = protoMapper.toProto(def); + response.onNext(MetadataServicePb.GetWorkflowResponse.newBuilder() + .setWorkflow(workflow) + .build() + ); response.onCompleted(); } else { response.onError(Status.NOT_FOUND @@ -68,24 +67,31 @@ public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver } @Override - public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver response) { + public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver response) { service.registerTaskDef( req.getDefsList().stream().map(protoMapper::fromProto).collect(Collectors.toList()) ); - grpcHelper.emptyResponse(response); + response.onNext(MetadataServicePb.CreateTasksResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void updateTask(TaskDefPb.TaskDef req, StreamObserver response) { - service.updateTaskDef(protoMapper.fromProto(req)); - grpcHelper.emptyResponse(response); + public void updateTask(MetadataServicePb.UpdateTaskRequest req, StreamObserver response) { + TaskDef task = protoMapper.fromProto(req.getTask()); + service.updateTaskDef(task); + response.onNext(MetadataServicePb.UpdateTaskResponse.getDefaultInstance()); + response.onCompleted(); } @Override - public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { + public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { TaskDef def = service.getTaskDef(req.getTaskType()); if (def != null) { - response.onNext(protoMapper.toProto(def)); + TaskDefPb.TaskDef task = protoMapper.toProto(def); + response.onNext(MetadataServicePb.GetTaskResponse.newBuilder() + .setTask(task) + .build() + ); response.onCompleted(); } else { response.onError(Status.NOT_FOUND @@ -96,8 +102,9 @@ public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver response) { + public void deleteTask(MetadataServicePb.DeleteTaskRequest req, StreamObserver response) { service.unregisterTaskDef(req.getTaskType()); - grpcHelper.emptyResponse(response); + response.onNext(MetadataServicePb.DeleteTaskResponse.getDefaultInstance()); + response.onCompleted(); } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java index f968af8105..618c341f56 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java @@ -47,13 +47,16 @@ public TaskServiceImpl(ExecutionService taskService, QueueDAO queues, Configurat } @Override - public void poll(TaskServicePb.PollRequest req, StreamObserver response) { + public void poll(TaskServicePb.PollRequest req, StreamObserver response) { try { List tasks = taskService.poll(req.getTaskType(), req.getWorkerId(), grpcHelper.optional(req.getDomain()), 1, POLL_TIMEOUT_MS); if (!tasks.isEmpty()) { TaskPb.Task t = protoMapper.toProto(tasks.get(0)); - response.onNext(t); + response.onNext(TaskServicePb.PollResponse.newBuilder() + .setTask(t) + .build() + ); } response.onCompleted(); } catch (Exception e) { @@ -105,10 +108,14 @@ public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamO } @Override - public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, StreamObserver response) { + public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, StreamObserver response) { try { Task t = taskService.getPendingTaskForWorkflow(req.getTaskRefName(), req.getWorkflowId()); - response.onNext(protoMapper.toProto(t)); + response.onNext( + TaskServicePb.PendingTaskResponse.newBuilder() + .setTask(protoMapper.toProto(t)) + .build() + ); response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); @@ -116,14 +123,15 @@ public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, Stre } @Override - public void updateTask(TaskResultPb.TaskResult req, StreamObserver response) { + public void updateTask(TaskServicePb.UpdateTaskRequest req, StreamObserver response) { try { - TaskResult task = protoMapper.fromProto(req); + TaskResult task = protoMapper.fromProto(req.getResult()); taskService.updateTask(task); response.onNext( - TaskServicePb.TaskId.newBuilder() - .setTaskId(task.getTaskId()).build() + TaskServicePb.UpdateTaskResponse.newBuilder() + .setTaskId(task.getTaskId()) + .build() ); response.onCompleted(); } catch (Exception e) { @@ -143,15 +151,16 @@ public void ackTask(TaskServicePb.AckTaskRequest req, StreamObserver response) { + public void addLog(TaskServicePb.AddLogRequest req, StreamObserver response) { taskService.log(req.getTaskId(), req.getLog()); + response.onNext(TaskServicePb.AddLogResponse.getDefaultInstance()); response.onCompleted(); } @Override - public void getTaskLogs(TaskServicePb.TaskId req, StreamObserver response) { + public void getTaskLogs(TaskServicePb.GetTaskLogsRequest req, StreamObserver response) { List logs = taskService.getTaskLogs(req.getTaskId()); - response.onNext(TaskServicePb.GetLogsResponse.newBuilder() + response.onNext(TaskServicePb.GetTaskLogsResponse.newBuilder() .addAllLogs(logs.stream().map(protoMapper::toProto)::iterator) .build() ); @@ -159,7 +168,7 @@ public void getTaskLogs(TaskServicePb.TaskId req, StreamObserver response) { + public void getTask(TaskServicePb.GetTaskRequest req, StreamObserver response) { try { Task task = taskService.getTask(req.getTaskId()); if (task == null) { @@ -168,7 +177,11 @@ public void getTask(TaskServicePb.TaskId req, StreamObserver respon .asRuntimeException() ); } else { - response.onNext(protoMapper.toProto(task)); + response.onNext( + TaskServicePb.GetTaskResponse.newBuilder() + .setTask(protoMapper.toProto(task)) + .build() + ); response.onCompleted(); } } catch (Exception e) { @@ -178,9 +191,10 @@ public void getTask(TaskServicePb.TaskId req, StreamObserver respon } @Override - public void removeTaskFromQueue(TaskServicePb.RemoveTaskRequest req, StreamObserver response) { + public void removeTaskFromQueue(TaskServicePb.RemoveTaskRequest req, StreamObserver response) { taskService.removeTaskfromQueue(req.getTaskType(), req.getTaskId()); - grpcHelper.emptyResponse(response); + response.onNext(TaskServicePb.RemoveTaskResponse.getDefaultInstance()); + response.onCompleted(); } @Override @@ -195,7 +209,7 @@ public void getQueueSizesForTasks(TaskServicePb.QueueSizesRequest req, StreamObs } @Override - public void getQueueInfo(Empty req, StreamObserver response) { + public void getQueueInfo(TaskServicePb.QueueInfoRequest req, StreamObserver response) { Map queueInfo = queues.queuesDetail().entrySet().stream() .sorted(Comparator.comparing(Map.Entry::getKey)) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue, (v1, v2) -> v1, HashMap::new)); @@ -209,7 +223,7 @@ public void getQueueInfo(Empty req, StreamObserver response) { + public void getQueueAllInfo(TaskServicePb.QueueAllInfoRequest req, StreamObserver response) { Map>> info = queues.queuesDetailVerbose(); TaskServicePb.QueueAllInfoResponse.Builder queuesBuilder = TaskServicePb.QueueAllInfoResponse.newBuilder(); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index 2b878578e9..c80c1bcd7e 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -46,15 +46,8 @@ public WorkflowServiceImpl(WorkflowExecutor executor, ExecutionService service, this.maxSearchSize = config.getIntProperty("workflow.max.search.size", 5_000); } - private WorkflowServicePb.WorkflowId newWorkflowId(String id) { - return WorkflowServicePb.WorkflowId - .newBuilder() - .setWorkflowId(id) - .build(); - } - @Override - public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, StreamObserver response) { + public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, StreamObserver response) { // TODO: better handling of optional 'version' final StartWorkflowRequest request = protoMapper.fromProto(pbRequest); WorkflowDef def = metadata.getWorkflowDef(request.getName(), grpcHelper.optional(request.getVersion())); @@ -70,7 +63,10 @@ public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, String id = executor.startWorkflow( def.getName(), def.getVersion(), request.getCorrelationId(), request.getInput(), null, request.getTaskToDomain()); - response.onNext(newWorkflowId(id)); + response.onNext(WorkflowServicePb.StartWorkflowResponse.newBuilder() + .setWorkflowId(id) + .build() + ); response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); @@ -110,10 +106,11 @@ public void getWorkflowStatus(WorkflowServicePb.GetWorkflowStatusRequest req, St } @Override - public void removeWorkflow(WorkflowServicePb.RemoveWorkflowRequest req, StreamObserver response) { + public void removeWorkflow(WorkflowServicePb.RemoveWorkflowRequest req, StreamObserver response) { try { service.removeWorkflow(req.getWorkflodId(), req.getArchiveWorkflow()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.RemoveWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } @@ -142,51 +139,58 @@ public void getRunningWorkflows(WorkflowServicePb.GetRunningWorkflowsRequest req } @Override - public void decideWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + public void decideWorkflow(WorkflowServicePb.DecideWorkflowRequest req, StreamObserver response) { try { executor.decide(req.getWorkflowId()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.DecideWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void pauseWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + public void pauseWorkflow(WorkflowServicePb.PauseWorkflowRequest req, StreamObserver response) { try { executor.pauseWorkflow(req.getWorkflowId()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.PauseWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void resumeWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + public void resumeWorkflow(WorkflowServicePb.ResumeWorkflowRequest req, StreamObserver response) { try { executor.resumeWorkflow(req.getWorkflowId()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.ResumeWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest req, StreamObserver response) { + public void skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest req, StreamObserver response) { try { SkipTaskRequest skipTask = protoMapper.fromProto(req.getRequest()); executor.skipTaskFromWorkflow(req.getWorkflowId(), req.getTaskReferenceName(), skipTask); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.SkipTaskResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, StreamObserver response) { + public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, StreamObserver response) { try { String id = executor.rerun(protoMapper.fromProto(req)); - response.onNext(newWorkflowId(id)); + response.onNext(WorkflowServicePb.RerunWorkflowResponse.newBuilder() + .setWorkflowId(id) + .build() + ); response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); @@ -194,46 +198,50 @@ public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, Strea } @Override - public void restartWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + public void restartWorkflow(WorkflowServicePb.RestartWorkflowRequest req, StreamObserver response) { try { executor.rewind(req.getWorkflowId()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.RestartWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void retryWorkflow(WorkflowServicePb.WorkflowId req, StreamObserver response) { + public void retryWorkflow(WorkflowServicePb.RetryWorkflowRequest req, StreamObserver response) { try { executor.retry(req.getWorkflowId()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.RetryWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void resetWorkflowCallbacks(WorkflowServicePb.WorkflowId req, StreamObserver response) { + public void resetWorkflowCallbacks(WorkflowServicePb.ResetWorkflowCallbacksRequest req, StreamObserver response) { try { executor.resetCallbacksForInProgressTasks(req.getWorkflowId()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.ResetWorkflowCallbacksResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } @Override - public void terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest req, StreamObserver response) { + public void terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest req, StreamObserver response) { try { executor.terminateWorkflow(req.getWorkflowId(), req.getReason()); - grpcHelper.emptyResponse(response); + response.onNext(WorkflowServicePb.TerminateWorkflowResponse.getDefaultInstance()); + response.onCompleted(); } catch (Exception e) { grpcHelper.onError(response, e); } } - private void doSearch(boolean searchByTask, SearchPb.SearchRequest req, StreamObserver response) { + private void doSearch(boolean searchByTask, SearchPb.Request req, StreamObserver response) { final int start = req.getStart(); final int size = grpcHelper.optionalOr(req.getSize(), maxSearchSize); final List sort = convertSort(req.getSort()); @@ -257,7 +265,7 @@ private void doSearch(boolean searchByTask, SearchPb.SearchRequest req, StreamOb } response.onNext( - SearchPb.WorkflowSummarySearchResult.newBuilder() + WorkflowServicePb.WorkflowSummarySearchResult.newBuilder() .setTotalHits(search.getTotalHits()) .addAllResults( search.getResults().stream().map(protoMapper::toProto)::iterator @@ -275,12 +283,12 @@ private List convertSort(String sortStr) { } @Override - public void search(SearchPb.SearchRequest request, StreamObserver responseObserver) { + public void search(SearchPb.Request request, StreamObserver responseObserver) { doSearch(false, request, responseObserver); } @Override - public void searchByTasks(SearchPb.SearchRequest request, StreamObserver responseObserver) { + public void searchByTasks(SearchPb.Request request, StreamObserver responseObserver) { doSearch(true, request, responseObserver); } } diff --git a/grpc/src/main/proto/grpc/event_service.proto b/grpc/src/main/proto/grpc/event_service.proto index 2c0a2afac2..5aa11dfea3 100644 --- a/grpc/src/main/proto/grpc/event_service.proto +++ b/grpc/src/main/proto/grpc/event_service.proto @@ -1,7 +1,6 @@ syntax = "proto3"; -package conductor.grpc; +package conductor.grpc.events; -import "google/protobuf/empty.proto"; import "model/eventhandler.proto"; option java_package = "com.netflix.conductor.grpc"; @@ -10,41 +9,61 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service EventService { // POST / - rpc AddEventHandler(conductor.proto.EventHandler) returns (google.protobuf.Empty); + rpc AddEventHandler(AddEventHandlerRequest) returns (AddEventHandlerResponse); // PUT / - rpc UpdateEventHandler(conductor.proto.EventHandler) returns (google.protobuf.Empty); + rpc UpdateEventHandler(UpdateEventHandlerRequest) returns (UpdateEventHandlerResponse); // DELETE /{name} - rpc RemoveEventHandler(RemoveEventHandlerRequest) returns (google.protobuf.Empty); + rpc RemoveEventHandler(RemoveEventHandlerRequest) returns (RemoveEventHandlerResponse); // GET / - rpc GetEventHandlers(google.protobuf.Empty) returns (stream conductor.proto.EventHandler); + rpc GetEventHandlers(GetEventHandlersRequest) returns (stream conductor.proto.EventHandler); // GET /{name} - rpc GetEventHandlersForEvent(GetEventHandlersRequest) returns (stream conductor.proto.EventHandler); + rpc GetEventHandlersForEvent(GetEventHandlersForEventRequest) returns (stream conductor.proto.EventHandler); // GET /queues - rpc GetQueues(google.protobuf.Empty) returns (GetQueuesResponse); - rpc GetQueueSizes(google.protobuf.Empty) returns (GetQueueSizesResponse); + rpc GetQueues(GetQueuesRequest) returns (GetQueuesResponse); + rpc GetQueueSizes(GetQueueSizesRequest) returns (GetQueueSizesResponse); // GET /queues/providers - rpc GetQueueProviders(google.protobuf.Empty) returns (GetQueueProvidersResponse); + rpc GetQueueProviders(GetQueueProvidersRequest) returns (GetQueueProvidersResponse); } +message AddEventHandlerRequest { + conductor.proto.EventHandler handler = 1; +} + +message AddEventHandlerResponse {} + +message UpdateEventHandlerRequest { + conductor.proto.EventHandler handler = 1; +} + +message UpdateEventHandlerResponse {} + message RemoveEventHandlerRequest { string name = 1; } -message GetEventHandlersRequest { +message RemoveEventHandlerResponse {} + +message GetEventHandlersRequest {} + +message GetEventHandlersForEventRequest { string event = 1; bool active_only = 2; } +message GetQueuesRequest {} + message GetQueuesResponse { map event_to_queue_uri = 1; } +message GetQueueSizesRequest {} + message GetQueueSizesResponse { message QueueInfo { map queue_sizes = 1; @@ -52,6 +71,8 @@ message GetQueueSizesResponse { map event_to_queue_info = 2; } +message GetQueueProvidersRequest {} + message GetQueueProvidersResponse { repeated string providers = 1; } diff --git a/grpc/src/main/proto/grpc/metadata_service.proto b/grpc/src/main/proto/grpc/metadata_service.proto index 6a5029326a..3abe4fc952 100644 --- a/grpc/src/main/proto/grpc/metadata_service.proto +++ b/grpc/src/main/proto/grpc/metadata_service.proto @@ -1,7 +1,6 @@ syntax = "proto3"; -package conductor.grpc; +package conductor.grpc.metadata; -import "google/protobuf/empty.proto"; import "model/taskdef.proto"; import "model/workflowdef.proto"; @@ -11,40 +10,71 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service MetadataService { // POST /workflow - rpc CreateWorkflow(conductor.proto.WorkflowDef) returns (google.protobuf.Empty); + rpc CreateWorkflow(CreateWorkflowRequest) returns (CreateWorkflowResponse); // PUT /workflow - rpc UpdateWorkflows(UpdateWorkflowsRequest) returns (google.protobuf.Empty); + rpc UpdateWorkflows(UpdateWorkflowsRequest) returns (UpdateWorkflowsResponse); // GET /workflow/{name} - rpc GetWorkflow(GetWorkflowRequest) returns (conductor.proto.WorkflowDef); + rpc GetWorkflow(GetWorkflowRequest) returns (GetWorkflowResponse); // POST /taskdefs - rpc CreateTasks(CreateTasksRequest) returns (google.protobuf.Empty); + rpc CreateTasks(CreateTasksRequest) returns (CreateTasksResponse); // PUT /taskdefs - rpc UpdateTask(conductor.proto.TaskDef) returns (google.protobuf.Empty); + rpc UpdateTask(UpdateTaskRequest) returns (UpdateTaskResponse); // GET /taskdefs/{tasktype} - rpc GetTask(GetTaskRequest) returns (conductor.proto.TaskDef); + rpc GetTask(GetTaskRequest) returns (GetTaskResponse); // DELETE /taskdefs/{tasktype} - rpc DeleteTask(GetTaskRequest) returns (google.protobuf.Empty); + rpc DeleteTask(DeleteTaskRequest) returns (DeleteTaskResponse); } +message CreateWorkflowRequest { + conductor.proto.WorkflowDef workflow = 1; +} + +message CreateWorkflowResponse {} + message UpdateWorkflowsRequest { repeated conductor.proto.WorkflowDef defs = 1; } -message CreateTasksRequest { - repeated conductor.proto.TaskDef defs = 1; -} +message UpdateWorkflowsResponse {} message GetWorkflowRequest { string name = 1; int32 version = 2; } +message GetWorkflowResponse { + conductor.proto.WorkflowDef workflow = 1; +} + +message CreateTasksRequest { + repeated conductor.proto.TaskDef defs = 1; +} + +message CreateTasksResponse {} + +message UpdateTaskRequest { + conductor.proto.TaskDef task = 1; +} + +message UpdateTaskResponse {} + + message GetTaskRequest { string task_type = 1; } + +message GetTaskResponse { + conductor.proto.TaskDef task = 1; +} + +message DeleteTaskRequest { + string task_type = 1; +} + +message DeleteTaskResponse {} diff --git a/grpc/src/main/proto/grpc/search.proto b/grpc/src/main/proto/grpc/search.proto index 41c7f4b58d..d7f87c9b65 100644 --- a/grpc/src/main/proto/grpc/search.proto +++ b/grpc/src/main/proto/grpc/search.proto @@ -1,13 +1,11 @@ syntax = "proto3"; -package conductor.grpc; - -import "model/workflowsummary.proto"; +package conductor.grpc.search; option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "SearchPb"; option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; -message SearchRequest { +message Request { int32 start = 1; int32 size = 2; string sort = 3; @@ -15,7 +13,3 @@ message SearchRequest { string query = 5; } -message WorkflowSummarySearchResult { - int64 total_hits = 1; - repeated conductor.proto.WorkflowSummary results = 2; -} \ No newline at end of file diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto index 1caef0e10c..7f540dfa4f 100644 --- a/grpc/src/main/proto/grpc/task_service.proto +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -1,7 +1,6 @@ syntax = "proto3"; -package conductor.grpc; +package conductor.grpc.tasks; -import "google/protobuf/empty.proto"; import "model/taskexeclog.proto"; import "model/taskresult.proto"; import "model/task.proto"; @@ -12,7 +11,7 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service TaskService { // GET /poll/{tasktype} - rpc Poll(PollRequest) returns (conductor.proto.Task); + rpc Poll(PollRequest) returns (PollResponse); // /poll/batch/{tasktype} rpc BatchPoll(BatchPollRequest) returns (stream conductor.proto.Task); @@ -21,34 +20,34 @@ service TaskService { rpc GetTasksInProgress(TasksInProgressRequest) returns (TasksInProgressResponse); // GET /in_progress/{workflowId}/{taskRefName} - rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (conductor.proto.Task); + rpc GetPendingTaskForWorkflow(PendingTaskRequest) returns (PendingTaskResponse); // POST / - rpc UpdateTask(conductor.proto.TaskResult) returns (TaskId); + rpc UpdateTask(UpdateTaskRequest) returns (UpdateTaskResponse); // POST /{taskId}/ack rpc AckTask(AckTaskRequest) returns (AckTaskResponse); // POST /{taskId}/log - rpc AddLog(AddLogRequest) returns (google.protobuf.Empty); + rpc AddLog(AddLogRequest) returns (AddLogResponse); // GET {taskId}/log - rpc GetTaskLogs(TaskId) returns (GetLogsResponse); + rpc GetTaskLogs(GetTaskLogsRequest) returns (GetTaskLogsResponse); // GET /{taskId} - rpc GetTask(TaskId) returns (conductor.proto.Task); + rpc GetTask(GetTaskRequest) returns (GetTaskResponse); // DELETE /queue/{taskType}/{taskId} - rpc RemoveTaskFromQueue(RemoveTaskRequest) returns (google.protobuf.Empty); + rpc RemoveTaskFromQueue(RemoveTaskRequest) returns (RemoveTaskResponse); // GET /queue/sizes rpc GetQueueSizesForTasks(QueueSizesRequest) returns (QueueSizesResponse); // GET /queue/all - rpc GetQueueInfo(google.protobuf.Empty) returns (QueueInfoResponse); + rpc GetQueueInfo(QueueInfoRequest) returns (QueueInfoResponse); // GET /queue/all/verbose - rpc GetQueueAllInfo(google.protobuf.Empty) returns (QueueAllInfoResponse); + rpc GetQueueAllInfo(QueueAllInfoRequest) returns (QueueAllInfoResponse); } message PollRequest { @@ -57,6 +56,10 @@ message PollRequest { string domain = 3; } +message PollResponse { + conductor.proto.Task task = 1; +} + message BatchPollRequest { string task_type = 1; string worker_id = 2; @@ -80,6 +83,18 @@ message PendingTaskRequest { string task_ref_name = 2; } +message PendingTaskResponse { + conductor.proto.Task task = 1; +} + +message UpdateTaskRequest { + conductor.proto.TaskResult result = 1; +} + +message UpdateTaskResponse { + string task_id = 1; +} + message AckTaskRequest { string task_id = 1; string worker_id = 2; @@ -94,16 +109,22 @@ message AddLogRequest { string log = 2; } -message GetLogsResponse { +message AddLogResponse {} + +message GetTaskLogsRequest { + string task_id = 1; +} + +message GetTaskLogsResponse { repeated conductor.proto.TaskExecLog logs = 1; } -message TaskId { +message GetTaskRequest { string task_id = 1; } -message TaskType { - string task_type = 1; +message GetTaskResponse { + conductor.proto.Task task = 1; } message RemoveTaskRequest { @@ -111,6 +132,8 @@ message RemoveTaskRequest { string task_id = 2; } +message RemoveTaskResponse {} + message QueueSizesRequest { repeated string task_types = 1; } @@ -119,10 +142,14 @@ message QueueSizesResponse { map queue_for_task = 1; } +message QueueInfoRequest {} + message QueueInfoResponse { map queues = 1; } +message QueueAllInfoRequest {} + message QueueAllInfoResponse { message ShardInfo { int64 size = 1; diff --git a/grpc/src/main/proto/grpc/workflow_service.proto b/grpc/src/main/proto/grpc/workflow_service.proto index 79f4e48571..083b778df5 100644 --- a/grpc/src/main/proto/grpc/workflow_service.proto +++ b/grpc/src/main/proto/grpc/workflow_service.proto @@ -1,9 +1,9 @@ syntax = "proto3"; -package conductor.grpc; +package conductor.grpc.workflows; -import "google/protobuf/empty.proto"; import "grpc/search.proto"; import "model/workflow.proto"; +import "model/workflowsummary.proto"; import "model/skiptaskrequest.proto"; import "model/startworkflowrequest.proto"; import "model/rerunworkflowrequest.proto"; @@ -14,7 +14,7 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; service WorkflowService { // POST / - rpc StartWorkflow(conductor.proto.StartWorkflowRequest) returns (WorkflowId); + rpc StartWorkflow(conductor.proto.StartWorkflowRequest) returns (StartWorkflowResponse); // GET /{name}/correlated/{correlationId} rpc GetWorkflows(GetWorkflowsRequest) returns (GetWorkflowsResponse); @@ -23,41 +23,45 @@ service WorkflowService { rpc GetWorkflowStatus(GetWorkflowStatusRequest) returns (conductor.proto.Workflow); // DELETE /{workflodId}/remove - rpc RemoveWorkflow(RemoveWorkflowRequest) returns (google.protobuf.Empty); + rpc RemoveWorkflow(RemoveWorkflowRequest) returns (RemoveWorkflowResponse); // GET /running/{name} rpc GetRunningWorkflows(GetRunningWorkflowsRequest) returns (GetRunningWorkflowsResponse); // PUT /decide/{workflowId} - rpc DecideWorkflow(WorkflowId) returns (google.protobuf.Empty); + rpc DecideWorkflow(DecideWorkflowRequest) returns (DecideWorkflowResponse); // PUT /{workflowId}/pause - rpc PauseWorkflow(WorkflowId) returns (google.protobuf.Empty); + rpc PauseWorkflow(PauseWorkflowRequest) returns (PauseWorkflowResponse); // PUT /{workflowId}/pause - rpc ResumeWorkflow(WorkflowId) returns (google.protobuf.Empty); + rpc ResumeWorkflow(ResumeWorkflowRequest) returns (ResumeWorkflowResponse); // PUT /{workflowId}/skiptask/{taskReferenceName} - rpc SkipTaskFromWorkflow(SkipTaskRequest) returns (google.protobuf.Empty); + rpc SkipTaskFromWorkflow(SkipTaskRequest) returns (SkipTaskResponse); // POST /{workflowId}/rerun - rpc RerunWorkflow(conductor.proto.RerunWorkflowRequest) returns (WorkflowId); + rpc RerunWorkflow(conductor.proto.RerunWorkflowRequest) returns (RerunWorkflowResponse); // POST /{workflowId}/restart - rpc RestartWorkflow(WorkflowId) returns (google.protobuf.Empty); + rpc RestartWorkflow(RestartWorkflowRequest) returns (RestartWorkflowResponse); // POST /{workflowId}retry - rpc RetryWorkflow(WorkflowId) returns (google.protobuf.Empty); + rpc RetryWorkflow(RetryWorkflowRequest) returns (RetryWorkflowResponse); // POST /{workflowId}/resetcallbacks - rpc ResetWorkflowCallbacks(WorkflowId) returns (google.protobuf.Empty); + rpc ResetWorkflowCallbacks(ResetWorkflowCallbacksRequest) returns (ResetWorkflowCallbacksResponse); // DELETE /{workflowId} - rpc TerminateWorkflow(TerminateWorkflowRequest) returns (google.protobuf.Empty); + rpc TerminateWorkflow(TerminateWorkflowRequest) returns (TerminateWorkflowResponse); // GET /search - rpc Search(SearchRequest) returns (WorkflowSummarySearchResult); - rpc SearchByTasks(SearchRequest) returns (WorkflowSummarySearchResult); + rpc Search(conductor.grpc.search.Request) returns (WorkflowSummarySearchResult); + rpc SearchByTasks(conductor.grpc.search.Request) returns (WorkflowSummarySearchResult); +} + +message StartWorkflowResponse { + string workflow_id = 1; } message GetWorkflowsRequest { @@ -79,11 +83,17 @@ message GetWorkflowStatusRequest { bool include_tasks = 2; } +message GetWorkflowStatusResponse { + conductor.proto.Workflow workflow = 1; +} + message RemoveWorkflowRequest { string workflod_id = 1; bool archive_workflow = 2; } +message RemoveWorkflowResponse {} + message GetRunningWorkflowsRequest { string name = 1; int32 version = 2; @@ -95,17 +105,62 @@ message GetRunningWorkflowsResponse { repeated string workflow_ids = 1; } -message WorkflowId { +message DecideWorkflowRequest { + string workflow_id = 1; +} + +message DecideWorkflowResponse {} + +message PauseWorkflowRequest { + string workflow_id = 1; +} + +message PauseWorkflowResponse {} + +message ResumeWorkflowRequest { string workflow_id = 1; } +message ResumeWorkflowResponse {} + message SkipTaskRequest { string workflow_id = 1; string task_reference_name = 2; conductor.proto.SkipTaskRequest request = 3; } +message SkipTaskResponse {} + +message RerunWorkflowResponse { + string workflow_id = 1; +} + +message RestartWorkflowRequest { + string workflow_id = 1; +} + +message RestartWorkflowResponse {} + +message RetryWorkflowRequest { + string workflow_id = 1; +} + +message RetryWorkflowResponse {} + +message ResetWorkflowCallbacksRequest { + string workflow_id = 1; +} + +message ResetWorkflowCallbacksResponse {} + message TerminateWorkflowRequest { string workflow_id = 1; string reason = 2; -} \ No newline at end of file +} + +message TerminateWorkflowResponse {} + +message WorkflowSummarySearchResult { + int64 total_hits = 1; + repeated conductor.proto.WorkflowSummary results = 2; +} From 1358d45cf263a3c806d09f67acdfac51ea0d8b9a Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 28 Jun 2018 16:43:40 +0200 Subject: [PATCH 074/163] grpc-server: Comment the optional helpers --- .../grpc/server/service/GRPCHelper.java | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java index 5a01c4d1d1..d57a73548c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/GRPCHelper.java @@ -93,18 +93,65 @@ void onError(StreamObserver response, Throwable t) { response.onError(throwableToStatusException(t)); } + /** + * Convert a non-null String instance to a possibly null String instance + * based on ProtoBuf's rules for optional arguments. + * + * This helper converts an String instance from a ProtoBuf object into a + * possibly null String. In ProtoBuf objects, String fields are not + * nullable, but an empty String field is considered to be "missing". + * + * The internal Conductor APIs expect missing arguments to be passed + * as null values, so this helper performs such conversion. + * + * @param str a string from a ProtoBuf object + * @return the original string, or null + */ String optional(@Nonnull String str) { return str.isEmpty() ? null : str; } + /** + * Check if a given non-null String instance is "missing" according to ProtoBuf's + * missing field rules. If the String is missing, the given default value will be + * returned. Otherwise, the string itself will be returned. + * + * @param str the input String + * @param defaults the default value for the string + * @return 'str' if it is not empty according to ProtoBuf rules; 'defaults' otherwise + */ String optionalOr(@Nonnull String str, String defaults) { return str.isEmpty() ? defaults : str; } + /** + * Convert a non-null Integer instance to a possibly null Integer instance + * based on ProtoBuf's rules for optional arguments. + * + * This helper converts an Integer instance from a ProtoBuf object into a + * possibly null Integer. In ProtoBuf objects, Integer fields are not + * nullable, but a zero-value Integer field is considered to be "missing". + * + * The internal Conductor APIs expect missing arguments to be passed + * as null values, so this helper performs such conversion. + * + * @param i an Integer from a ProtoBuf object + * @return the original Integer, or null + */ Integer optional(@Nonnull Integer i) { return i == 0 ? null : i; } + /** + * Check if a given non-null Integer instance is "missing" according to ProtoBuf's + * missing field rules. If the Integer is missing (i.e. if it has a zero-value), + * the given default value will be returned. Otherwise, the Integer itself will be + * returned. + * + * @param i the input Integer + * @param defaults the default value for the Integer + * @return 'i' if it is not a zero-value according to ProtoBuf rules; 'defaults' otherwise + */ Integer optionalOr(@Nonnull Integer i, int defaults) { return i == 0 ? defaults : i; } From 792eb6c473774cf4b3c7a85485b02b65644c0bb9 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 28 Jun 2018 16:46:29 +0200 Subject: [PATCH 075/163] server: Upcase all server class constants --- .../grpc/server/service/EventServiceImpl.java | 16 +++--- .../server/service/MetadataServiceImpl.java | 21 ++++---- .../grpc/server/service/TaskServiceImpl.java | 51 +++++++++---------- .../server/service/WorkflowServiceImpl.java | 51 +++++++++---------- .../server/resources/AdminResource.java | 4 +- .../resources/ApplicationExceptionMapper.java | 4 +- .../resources/GenericExceptionMapper.java | 4 +- .../server/resources/TaskResource.java | 18 +++---- 8 files changed, 80 insertions(+), 89 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java index 9211334ef0..e220c4a4a7 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/EventServiceImpl.java @@ -1,7 +1,5 @@ package com.netflix.conductor.grpc.server.service; -import com.google.protobuf.Empty; - import com.netflix.conductor.core.events.EventProcessor; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.grpc.EventServiceGrpc; @@ -19,9 +17,9 @@ import org.slf4j.LoggerFactory; public class EventServiceImpl extends EventServiceGrpc.EventServiceImplBase { - private static final Logger logger = LoggerFactory.getLogger(EventServiceImpl.class); - private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; - private static final GRPCHelper grpcHelper = new GRPCHelper(logger); + private static final Logger LOGGER = LoggerFactory.getLogger(EventServiceImpl.class); + private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; + private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); private final MetadataService service; private final EventProcessor ep; @@ -34,14 +32,14 @@ public EventServiceImpl(MetadataService service, EventProcessor ep) { @Override public void addEventHandler(EventServicePb.AddEventHandlerRequest req, StreamObserver response) { - service.addEventHandler(protoMapper.fromProto(req.getHandler())); + service.addEventHandler(PROTO_MAPPER.fromProto(req.getHandler())); response.onNext(EventServicePb.AddEventHandlerResponse.getDefaultInstance()); response.onCompleted(); } @Override public void updateEventHandler(EventServicePb.UpdateEventHandlerRequest req, StreamObserver response) { - service.updateEventHandler(protoMapper.fromProto(req.getHandler())); + service.updateEventHandler(PROTO_MAPPER.fromProto(req.getHandler())); response.onNext(EventServicePb.UpdateEventHandlerResponse.getDefaultInstance()); response.onCompleted(); } @@ -55,14 +53,14 @@ public void removeEventHandler(EventServicePb.RemoveEventHandlerRequest req, Str @Override public void getEventHandlers(EventServicePb.GetEventHandlersRequest req, StreamObserver response) { - service.getEventHandlers().stream().map(protoMapper::toProto).forEach(response::onNext); + service.getEventHandlers().stream().map(PROTO_MAPPER::toProto).forEach(response::onNext); response.onCompleted(); } @Override public void getEventHandlersForEvent(EventServicePb.GetEventHandlersForEventRequest req, StreamObserver response) { service.getEventHandlersForEvent(req.getEvent(), req.getActiveOnly()) - .stream().map(protoMapper::toProto).forEach(response::onNext); + .stream().map(PROTO_MAPPER::toProto).forEach(response::onNext); response.onCompleted(); } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java index 0d382b187b..216439bfb5 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java @@ -1,6 +1,5 @@ package com.netflix.conductor.grpc.server.service; -import com.google.protobuf.Empty; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.grpc.MetadataServiceGrpc; @@ -19,9 +18,9 @@ import java.util.stream.Collectors; public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { - private static final Logger logger = LoggerFactory.getLogger(MetadataServiceImpl.class); - private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; - private static final GRPCHelper grpcHelper = new GRPCHelper(logger); + private static final Logger LOGGER = LoggerFactory.getLogger(MetadataServiceImpl.class); + private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; + private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); private final MetadataService service; @@ -32,7 +31,7 @@ public MetadataServiceImpl(MetadataService service) { @Override public void createWorkflow(MetadataServicePb.CreateWorkflowRequest req, StreamObserver response) { - WorkflowDef workflow = protoMapper.fromProto(req.getWorkflow()); + WorkflowDef workflow = PROTO_MAPPER.fromProto(req.getWorkflow()); service.registerWorkflowDef(workflow); response.onNext(MetadataServicePb.CreateWorkflowResponse.getDefaultInstance()); response.onCompleted(); @@ -41,7 +40,7 @@ public void createWorkflow(MetadataServicePb.CreateWorkflowRequest req, StreamOb @Override public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, StreamObserver response) { List workflows = req.getDefsList().stream() - .map(protoMapper::fromProto).collect(Collectors.toList()); + .map(PROTO_MAPPER::fromProto).collect(Collectors.toList()); service.updateWorkflowDef(workflows); response.onNext(MetadataServicePb.UpdateWorkflowsResponse.getDefaultInstance()); @@ -50,9 +49,9 @@ public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, Stream @Override public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { - WorkflowDef def = service.getWorkflowDef(req.getName(), grpcHelper.optional(req.getVersion())); + WorkflowDef def = service.getWorkflowDef(req.getName(), GRPC_HELPER.optional(req.getVersion())); if (def != null) { - WorkflowDefPb.WorkflowDef workflow = protoMapper.toProto(def); + WorkflowDefPb.WorkflowDef workflow = PROTO_MAPPER.toProto(def); response.onNext(MetadataServicePb.GetWorkflowResponse.newBuilder() .setWorkflow(workflow) .build() @@ -69,7 +68,7 @@ public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver @Override public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver response) { service.registerTaskDef( - req.getDefsList().stream().map(protoMapper::fromProto).collect(Collectors.toList()) + req.getDefsList().stream().map(PROTO_MAPPER::fromProto).collect(Collectors.toList()) ); response.onNext(MetadataServicePb.CreateTasksResponse.getDefaultInstance()); response.onCompleted(); @@ -77,7 +76,7 @@ public void createTasks(MetadataServicePb.CreateTasksRequest req, StreamObserver @Override public void updateTask(MetadataServicePb.UpdateTaskRequest req, StreamObserver response) { - TaskDef task = protoMapper.fromProto(req.getTask()); + TaskDef task = PROTO_MAPPER.fromProto(req.getTask()); service.updateTaskDef(task); response.onNext(MetadataServicePb.UpdateTaskResponse.getDefaultInstance()); response.onCompleted(); @@ -87,7 +86,7 @@ public void updateTask(MetadataServicePb.UpdateTaskRequest req, StreamObserver response) { TaskDef def = service.getTaskDef(req.getTaskType()); if (def != null) { - TaskDefPb.TaskDef task = protoMapper.toProto(def); + TaskDefPb.TaskDef task = PROTO_MAPPER.toProto(def); response.onNext(MetadataServicePb.GetTaskResponse.newBuilder() .setTask(task) .build() diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java index 618c341f56..4a0519945f 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/TaskServiceImpl.java @@ -6,16 +6,13 @@ import java.util.Map; import java.util.stream.Collectors; -import com.google.protobuf.Empty; import com.netflix.conductor.common.metadata.tasks.TaskExecLog; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.grpc.ProtoMapper; import com.netflix.conductor.proto.TaskPb; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.TaskServicePb; -import com.netflix.conductor.proto.TaskResultPb; import io.grpc.Status; -import io.grpc.stub.ServerCallStreamObserver; import io.grpc.stub.StreamObserver; import com.netflix.conductor.common.metadata.tasks.Task; @@ -29,9 +26,9 @@ import javax.inject.Inject; public class TaskServiceImpl extends TaskServiceGrpc.TaskServiceImplBase { - private static final Logger logger = LoggerFactory.getLogger(TaskServiceImpl.class); - private static final ProtoMapper protoMapper = ProtoMapper.INSTANCE; - private static final GRPCHelper grpcHelper = new GRPCHelper(logger); + private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); + private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; + private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); private static final int MAX_TASK_COUNT = 100; private static final int POLL_TIMEOUT_MS = 100; @@ -50,9 +47,9 @@ public TaskServiceImpl(ExecutionService taskService, QueueDAO queues, Configurat public void poll(TaskServicePb.PollRequest req, StreamObserver response) { try { List tasks = taskService.poll(req.getTaskType(), req.getWorkerId(), - grpcHelper.optional(req.getDomain()), 1, POLL_TIMEOUT_MS); + GRPC_HELPER.optional(req.getDomain()), 1, POLL_TIMEOUT_MS); if (!tasks.isEmpty()) { - TaskPb.Task t = protoMapper.toProto(tasks.get(0)); + TaskPb.Task t = PROTO_MAPPER.toProto(tasks.get(0)); response.onNext(TaskServicePb.PollResponse.newBuilder() .setTask(t) .build() @@ -60,14 +57,14 @@ public void poll(TaskServicePb.PollRequest req, StreamObserver response) { - final int count = grpcHelper.optionalOr(req.getCount(), 1); - final int timeout = grpcHelper.optionalOr(req.getTimeout(), POLL_TIMEOUT_MS); + final int count = GRPC_HELPER.optionalOr(req.getCount(), 1); + final int timeout = GRPC_HELPER.optionalOr(req.getTimeout(), POLL_TIMEOUT_MS); if (timeout > MAX_POLL_TIMEOUT_MS) { response.onError(Status.INVALID_ARGUMENT @@ -79,31 +76,31 @@ public void batchPoll(TaskServicePb.BatchPollRequest req, StreamObserver polledTasks = taskService.poll(req.getTaskType(), req.getWorkerId(), - grpcHelper.optional(req.getDomain()), count, timeout); - logger.info("polled tasks: "+polledTasks); - polledTasks.stream().map(protoMapper::toProto).forEach(response::onNext); + GRPC_HELPER.optional(req.getDomain()), count, timeout); + LOGGER.info("polled tasks: "+polledTasks); + polledTasks.stream().map(PROTO_MAPPER::toProto).forEach(response::onNext); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @Override public void getTasksInProgress(TaskServicePb.TasksInProgressRequest req, StreamObserver response) { - final String startKey = grpcHelper.optional(req.getStartKey()); - final int count = grpcHelper.optionalOr(req.getCount(), MAX_TASK_COUNT); + final String startKey = GRPC_HELPER.optional(req.getStartKey()); + final int count = GRPC_HELPER.optionalOr(req.getCount(), MAX_TASK_COUNT); try { response.onNext( TaskServicePb.TasksInProgressResponse.newBuilder().addAllTasks( taskService.getTasks(req.getTaskType(), startKey, count) .stream() - .map(protoMapper::toProto)::iterator + .map(PROTO_MAPPER::toProto)::iterator ).build() ); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -113,19 +110,19 @@ public void getPendingTaskForWorkflow(TaskServicePb.PendingTaskRequest req, Stre Task t = taskService.getPendingTaskForWorkflow(req.getTaskRefName(), req.getWorkflowId()); response.onNext( TaskServicePb.PendingTaskResponse.newBuilder() - .setTask(protoMapper.toProto(t)) + .setTask(PROTO_MAPPER.toProto(t)) .build() ); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @Override public void updateTask(TaskServicePb.UpdateTaskRequest req, StreamObserver response) { try { - TaskResult task = protoMapper.fromProto(req.getResult()); + TaskResult task = PROTO_MAPPER.fromProto(req.getResult()); taskService.updateTask(task); response.onNext( @@ -135,7 +132,7 @@ public void updateTask(TaskServicePb.UpdateTaskRequest req, StreamObserver response) { List logs = taskService.getTaskLogs(req.getTaskId()); response.onNext(TaskServicePb.GetTaskLogsResponse.newBuilder() - .addAllLogs(logs.stream().map(protoMapper::toProto)::iterator) + .addAllLogs(logs.stream().map(PROTO_MAPPER::toProto)::iterator) .build() ); response.onCompleted(); @@ -179,13 +176,13 @@ public void getTask(TaskServicePb.GetTaskRequest req, StreamObserver response) { // TODO: better handling of optional 'version' - final StartWorkflowRequest request = protoMapper.fromProto(pbRequest); - WorkflowDef def = metadata.getWorkflowDef(request.getName(), grpcHelper.optional(request.getVersion())); + final StartWorkflowRequest request = PROTO_MAPPER.fromProto(pbRequest); + WorkflowDef def = metadata.getWorkflowDef(request.getName(), GRPC_HELPER.optional(request.getVersion())); if(def == null){ response.onError(Status.NOT_FOUND .withDescription("No such workflow found by name="+request.getName()) @@ -69,7 +68,7 @@ public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, ); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -85,7 +84,7 @@ public void getWorkflows(WorkflowServicePb.GetWorkflowsRequest req, StreamObserv List workflows = service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks); builder.putWorkflowsById(correlationId, WorkflowServicePb.GetWorkflowsResponse.Workflows.newBuilder() - .addAllWorkflows(workflows.stream().map(protoMapper::toProto)::iterator) + .addAllWorkflows(workflows.stream().map(PROTO_MAPPER::toProto)::iterator) .build() ); } @@ -98,10 +97,10 @@ public void getWorkflows(WorkflowServicePb.GetWorkflowsRequest req, StreamObserv public void getWorkflowStatus(WorkflowServicePb.GetWorkflowStatusRequest req, StreamObserver response) { try { Workflow workflow = service.getExecutionStatus(req.getWorkflowId(), req.getIncludeTasks()); - response.onNext(protoMapper.toProto(workflow)); + response.onNext(PROTO_MAPPER.toProto(workflow)); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -112,7 +111,7 @@ public void removeWorkflow(WorkflowServicePb.RemoveWorkflowRequest req, StreamOb response.onNext(WorkflowServicePb.RemoveWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -134,7 +133,7 @@ public void getRunningWorkflows(WorkflowServicePb.GetRunningWorkflowsRequest req ); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -145,7 +144,7 @@ public void decideWorkflow(WorkflowServicePb.DecideWorkflowRequest req, StreamOb response.onNext(WorkflowServicePb.DecideWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -156,7 +155,7 @@ public void pauseWorkflow(WorkflowServicePb.PauseWorkflowRequest req, StreamObse response.onNext(WorkflowServicePb.PauseWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -167,33 +166,33 @@ public void resumeWorkflow(WorkflowServicePb.ResumeWorkflowRequest req, StreamOb response.onNext(WorkflowServicePb.ResumeWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @Override public void skipTaskFromWorkflow(WorkflowServicePb.SkipTaskRequest req, StreamObserver response) { try { - SkipTaskRequest skipTask = protoMapper.fromProto(req.getRequest()); + SkipTaskRequest skipTask = PROTO_MAPPER.fromProto(req.getRequest()); executor.skipTaskFromWorkflow(req.getWorkflowId(), req.getTaskReferenceName(), skipTask); response.onNext(WorkflowServicePb.SkipTaskResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @Override public void rerunWorkflow(RerunWorkflowRequestPb.RerunWorkflowRequest req, StreamObserver response) { try { - String id = executor.rerun(protoMapper.fromProto(req)); + String id = executor.rerun(PROTO_MAPPER.fromProto(req)); response.onNext(WorkflowServicePb.RerunWorkflowResponse.newBuilder() .setWorkflowId(id) .build() ); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -204,7 +203,7 @@ public void restartWorkflow(WorkflowServicePb.RestartWorkflowRequest req, Stream response.onNext(WorkflowServicePb.RestartWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -215,7 +214,7 @@ public void retryWorkflow(WorkflowServicePb.RetryWorkflowRequest req, StreamObse response.onNext(WorkflowServicePb.RetryWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -226,7 +225,7 @@ public void resetWorkflowCallbacks(WorkflowServicePb.ResetWorkflowCallbacksReque response.onNext(WorkflowServicePb.ResetWorkflowCallbacksResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } @@ -237,15 +236,15 @@ public void terminateWorkflow(WorkflowServicePb.TerminateWorkflowRequest req, St response.onNext(WorkflowServicePb.TerminateWorkflowResponse.getDefaultInstance()); response.onCompleted(); } catch (Exception e) { - grpcHelper.onError(response, e); + GRPC_HELPER.onError(response, e); } } private void doSearch(boolean searchByTask, SearchPb.Request req, StreamObserver response) { final int start = req.getStart(); - final int size = grpcHelper.optionalOr(req.getSize(), maxSearchSize); + final int size = GRPC_HELPER.optionalOr(req.getSize(), maxSearchSize); final List sort = convertSort(req.getSort()); - final String freeText = grpcHelper.optionalOr(req.getFreeText(), "*"); + final String freeText = GRPC_HELPER.optionalOr(req.getFreeText(), "*"); final String query = req.getQuery(); if (size > maxSearchSize) { @@ -268,7 +267,7 @@ private void doSearch(boolean searchByTask, SearchPb.Request req, StreamObserver WorkflowServicePb.WorkflowSummarySearchResult.newBuilder() .setTotalHits(search.getTotalHits()) .addAllResults( - search.getResults().stream().map(protoMapper::toProto)::iterator + search.getResults().stream().map(PROTO_MAPPER::toProto)::iterator ).build() ); response.onCompleted(); diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java index dcf0470e43..b1cdbb5765 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java @@ -58,7 +58,7 @@ @Singleton public class AdminResource { - private static Logger logger = LoggerFactory.getLogger(AdminResource.class); + private static final Logger LOGGER = LoggerFactory.getLogger(AdminResource.class); private Configuration config; @@ -86,7 +86,7 @@ public AdminResource(Configuration config, ExecutionService service, QueueDAO qu this.version = prop.getProperty("Implementation-Version"); this.buildDate = prop.getProperty("Build-Date"); }catch(Exception e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); } } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/ApplicationExceptionMapper.java b/jersey/src/main/java/com/netflix/conductor/server/resources/ApplicationExceptionMapper.java index c7afc1533a..6e7fa7a37b 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/ApplicationExceptionMapper.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/ApplicationExceptionMapper.java @@ -46,7 +46,7 @@ @Singleton public class ApplicationExceptionMapper implements ExceptionMapper { - private static Logger logger = LoggerFactory.getLogger(ApplicationExceptionMapper.class); + private static final Logger LOGGER = LoggerFactory.getLogger(ApplicationExceptionMapper.class); private static List supportedMediaTypes = Variant.mediaTypes(MediaType.APPLICATION_JSON_TYPE, MediaType.TEXT_HTML_TYPE, MediaType.TEXT_PLAIN_TYPE).add().build(); @@ -62,7 +62,7 @@ public ApplicationExceptionMapper(Configuration config) { @Override public Response toResponse(ApplicationException e) { - logger.error(e.getMessage(), e); + LOGGER.error(e.getMessage(), e); if(e.getHttpStatusCode() == 500) { Monitors.error("error", "error"); } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/GenericExceptionMapper.java b/jersey/src/main/java/com/netflix/conductor/server/resources/GenericExceptionMapper.java index 6ffdddbd21..79173eef81 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/GenericExceptionMapper.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/GenericExceptionMapper.java @@ -47,7 +47,7 @@ @Singleton public class GenericExceptionMapper implements ExceptionMapper { - private static Logger logger = LoggerFactory.getLogger(GenericExceptionMapper.class); + private static final Logger LOGGER = LoggerFactory.getLogger(GenericExceptionMapper.class); private static List supportedMediaTypes = Variant.mediaTypes(MediaType.APPLICATION_JSON_TYPE, MediaType.TEXT_HTML_TYPE, MediaType.TEXT_PLAIN_TYPE).add().build(); @@ -63,7 +63,7 @@ public GenericExceptionMapper(Configuration config) { @Override public Response toResponse(Throwable t) { - logger.error(t.getMessage(), t); + LOGGER.error(t.getMessage(), t); Monitors.error("error", "error"); ApplicationException e = new ApplicationException(Code.INTERNAL_ERROR, t.getMessage(), t); MediaType mediaType = context.getRequest().selectVariant(supportedMediaTypes).getMediaType(); diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/TaskResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/TaskResource.java index 143d4efbee..58731dc59b 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/TaskResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/TaskResource.java @@ -22,8 +22,6 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; -import java.util.TreeSet; import java.util.stream.Collectors; import javax.inject.Inject; @@ -68,7 +66,7 @@ @Singleton public class TaskResource { - private static final Logger logger = LoggerFactory.getLogger(TaskResource.class); + private static final Logger LOGGER = LoggerFactory.getLogger(TaskResource.class); private ExecutionService taskService; @@ -88,14 +86,14 @@ public TaskResource(ExecutionService taskService, QueueDAO queues, Configuration @ApiOperation("Poll for a task of a certain type") @Consumes({MediaType.WILDCARD}) public Task poll(@PathParam("tasktype") String taskType, @QueryParam("workerid") String workerId, @QueryParam("domain") String domain) throws Exception { - logger.debug("Task being polled: /tasks/poll/{}?{}&{}", taskType, workerId, domain); + LOGGER.debug("Task being polled: /tasks/poll/{}?{}&{}", taskType, workerId, domain); List tasks = taskService.poll(taskType, workerId, domain, 1, 100); if (tasks.isEmpty()) { - logger.debug("No Task available for the poll: /tasks/poll/{}?{}&{}", taskType, workerId, domain); + LOGGER.debug("No Task available for the poll: /tasks/poll/{}?{}&{}", taskType, workerId, domain); return null; } Task task = tasks.get(0); - logger.debug("The Task {} being returned for /tasks/poll/{}?{}&{}", task, taskType, workerId, domain); + LOGGER.debug("The Task {} being returned for /tasks/poll/{}?{}&{}", task, taskType, workerId, domain); return task; } @@ -113,7 +111,7 @@ public List batchPoll( throw new ApplicationException(Code.INVALID_INPUT, "Long Poll Timeout value cannot be more than 5 seconds"); } List polledTasks = taskService.poll(taskType, workerId, domain, count, timeout); - logger.debug("The Tasks {} being returned for /tasks/poll/{}?{}&{}", + LOGGER.debug("The Tasks {} being returned for /tasks/poll/{}?{}&{}", polledTasks.stream() .map(Task::getTaskId) .collect(Collectors.toList()), taskType, workerId, domain); @@ -141,9 +139,9 @@ public Task getPendingTaskForWorkflow(@PathParam("workflowId") String workflowId @POST @ApiOperation("Update a task") public String updateTask(TaskResult taskResult) throws Exception { - logger.debug("Update Task: {} with callback time: {}", taskResult, taskResult.getCallbackAfterSeconds()); + LOGGER.debug("Update Task: {} with callback time: {}", taskResult, taskResult.getCallbackAfterSeconds()); taskService.updateTask(taskResult); - logger.debug("Task: {} updated successfully with callback time: {}", taskResult, taskResult.getCallbackAfterSeconds()); + LOGGER.debug("Task: {} updated successfully with callback time: {}", taskResult, taskResult.getCallbackAfterSeconds()); return "\"" + taskResult.getTaskId() + "\""; } @@ -152,7 +150,7 @@ public String updateTask(TaskResult taskResult) throws Exception { @ApiOperation("Ack Task is recieved") @Consumes({ MediaType.WILDCARD }) public String ack(@PathParam("taskId") String taskId, @QueryParam("workerid") String workerId) throws Exception { - logger.debug("Ack received for task: {} from worker: {}", taskId, workerId); + LOGGER.debug("Ack received for task: {} from worker: {}", taskId, workerId); return "" + taskService.ackTaskReceived(taskId); } From 855a302b525a3a9271c9f903ce8eb76bb7555f07 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 28 Jun 2018 17:00:58 +0200 Subject: [PATCH 076/163] grpc: Document the ProtoMapper methods --- .../netflix/conductor/grpc/ProtoMapper.java | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java index 092b1f9465..2f120a49a7 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java @@ -13,11 +13,36 @@ import java.util.Map; import java.util.stream.Collectors; +/** + * ProtoMapper implements conversion code between the internal models + * used by Conductor (POJOs) and their corresponding equivalents in + * the exposed Protocol Buffers interface. + * + * The vast majority of the mapping logic is implemented in the autogenerated + * {@link AbstractProtoMapper} class. This class only implements the custom + * logic for objects that need to be special cased in the API. + */ public final class ProtoMapper extends AbstractProtoMapper { public static final ProtoMapper INSTANCE = new ProtoMapper(); private ProtoMapper() {} + /** + * Convert an {@link Object} instance into its equivalent {@link Value} + * ProtoBuf object. + * + * The {@link Value} ProtoBuf message is a variant type that can define any + * value representable as a native JSON type. Consequently, this method expects + * the given {@link Object} instance to be a Java object instance of JSON-native + * value, namely: null, {@link Boolean}, {@link Double}, {@link String}, + * {@link Map}, {@link List}. + * + * Any other values will cause an exception to be thrown. + * See {@link ProtoMapper#fromProto(Value)} for the reverse mapping. + * + * @param val a Java object that can be represented natively in JSON + * @return an instance of a {@link Value} ProtoBuf message + */ @Override public Value toProto(Object val) { Value.Builder builder = Value.newBuilder(); @@ -49,6 +74,16 @@ public Value toProto(Object val) { return builder.build(); } + /** + * Convert a ProtoBuf {@link Value} message into its native Java object + * equivalent. + * + * See {@link ProtoMapper#toProto(Object)} for the reverse mapping and the + * possible values that can be returned from this method. + * + * @param any an instance of a ProtoBuf {@link Value} message + * @return a native Java object representing the value + */ @Override public Object fromProto(Value any) { switch (any.getKindCase()) { @@ -78,11 +113,24 @@ public Object fromProto(Value any) { } } + /** + * Convert a WorkflowTaskList message wrapper into a {@link List} instance + * with its contents. + * + * @param list an instance of a ProtoBuf message + * @return a list with the contents of the message + */ @Override public List fromProto(WorkflowTaskPb.WorkflowTask.WorkflowTaskList list) { return list.getTasksList().stream().map(this::fromProto).collect(Collectors.toList()); } + /** + * Convert a list of {@link WorkflowTask} instances into a ProtoBuf wrapper object. + * + * @param list a list of {@link WorkflowTask} instances + * @return a ProtoBuf message wrapping the contents of the list + */ @Override public WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List list) { return WorkflowTaskPb.WorkflowTask.WorkflowTaskList.newBuilder() From ee8dff3c85e4352cc2af3cac6e7d9caa4fc9ea93 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Thu, 28 Jun 2018 18:25:11 +0200 Subject: [PATCH 077/163] grpc-client: Add missing curly braces --- .../com/netflix/conductor/client/grpc/TaskClient.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java index 4eef15464c..7e2b62786d 100644 --- a/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java +++ b/grpc-client/src/main/java/com/netflix/conductor/client/grpc/TaskClient.java @@ -100,10 +100,12 @@ public List getPendingTasksByType(String taskType, @Nullable String startK TaskServicePb.TasksInProgressRequest.Builder request = TaskServicePb.TasksInProgressRequest.newBuilder(); request.setTaskType(taskType); - if (startKey != null) + if (startKey != null) { request.setStartKey(startKey); - if (count != null) + } + if (count != null) { request.setCount(count); + } return stub.getTasksInProgress(request.build()) .getTasksList() @@ -157,8 +159,9 @@ public boolean ack(String taskId, @Nullable String workerId) { TaskServicePb.AckTaskRequest.Builder request = TaskServicePb.AckTaskRequest.newBuilder(); request.setTaskId(taskId); - if (workerId != null) + if (workerId != null) { request.setWorkerId(workerId); + } return stub.ackTask(request.build()).getAck(); } From e078761525ba8200d3c56f682168b3d12ade0e23 Mon Sep 17 00:00:00 2001 From: Charles Zhao Date: Wed, 4 Jul 2018 11:47:19 -0700 Subject: [PATCH 078/163] fixed javadoc build issue --- grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java index 2f120a49a7..d43bf96fc5 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java @@ -35,7 +35,7 @@ private ProtoMapper() {} * value representable as a native JSON type. Consequently, this method expects * the given {@link Object} instance to be a Java object instance of JSON-native * value, namely: null, {@link Boolean}, {@link Double}, {@link String}, - * {@link Map}, {@link List}. + * {@link Map}, {@link List}. * * Any other values will cause an exception to be thrown. * See {@link ProtoMapper#fromProto(Value)} for the reverse mapping. From 2ebe7d80a2d67ea4fd19d0dde78f42b74b8ed995 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 25 Jun 2018 11:52:06 +0200 Subject: [PATCH 079/163] server: Export ProtoBuf Any through the REST API --- .../conductor/client/http/ClientBase.java | 10 +- common/build.gradle | 3 + .../conductor/common/utils/JsonUtils.java | 139 ++++++++++++++++++ core/build.gradle | 2 - .../core/execution/TestDeciderService.java | 14 +- .../conductor/core/utils/JsonUtilsTest.java | 42 ++++++ grpc/build.gradle | 2 +- .../conductor/dao/mysql/MySQLBaseDAOTest.java | 15 +- .../dao/dynomite/DynoQueueDAOTest.java | 13 +- .../dao/dynomite/RedisExecutionDAOTest.java | 13 +- .../dao/dynomite/RedisMetadataDAOTest.java | 13 +- .../conductor/server/JerseyModule.java | 11 +- .../conductor/tests/utils/TestModule.java | 3 + versionsOfDependencies.gradle | 1 + 14 files changed, 203 insertions(+), 78 deletions(-) create mode 100644 common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java create mode 100644 core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java diff --git a/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java b/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java index 2b932696f9..c30d62423b 100644 --- a/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java +++ b/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java @@ -18,10 +18,9 @@ */ package com.netflix.conductor.client.http; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; +import com.netflix.conductor.common.utils.JsonUtils; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientHandler; import com.sun.jersey.api.client.ClientHandlerException; @@ -62,12 +61,7 @@ protected ClientBase(ClientConfig clientConfig) { } protected ClientBase(ClientConfig clientConfig, ClientHandler handler) { - objectMapper = new ObjectMapper(); - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - objectMapper.setSerializationInclusion(Include.NON_NULL); - objectMapper.setSerializationInclusion(Include.NON_EMPTY); + objectMapper = JsonUtils.getMapper(); JacksonJsonProvider provider = new JacksonJsonProvider(objectMapper); clientConfig.getSingletons().add(provider); diff --git a/common/build.gradle b/common/build.gradle index 281bedba4d..fe31db64ca 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -1,4 +1,7 @@ dependencies { compile "com.github.rholder:guava-retrying:${revGuavaRetrying}" compile "org.slf4j:slf4j-api:${revSlf4j}" + compile "com.google.protobuf:protobuf-java:${revProtoBuf}" + compile "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" + compile "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" } \ No newline at end of file diff --git a/common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java b/common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java new file mode 100644 index 0000000000..faf9b223aa --- /dev/null +++ b/common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java @@ -0,0 +1,139 @@ +package com.netflix.conductor.common.utils; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.*; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.google.protobuf.Any; +import com.google.protobuf.ByteString; +import com.google.protobuf.Message; + +import java.io.IOException; + +public class JsonUtils { + private JsonUtils() {} + + /** + * JsonProtoModule can be registered into an {@link ObjectMapper} + * to enable the serialization and deserialization of ProtoBuf objects + * from/to JSON. + * + * Right now this module only provides (de)serialization for the {@link Any} + * ProtoBuf type, as this is the only ProtoBuf object which we're currently + * exposing through the REST API. + * + * {@see AnySerializer}, {@see AnyDeserializer} + */ + private static class JsonProtoModule extends SimpleModule { + /** + * AnySerializer converts a ProtoBuf {@link Any} object into its JSON + * representation. + * + * This is not a canonical ProtoBuf JSON representation. Let us + * explain what we're trying to accomplish here: + * + * The {@link Any} ProtoBuf message is a type in the PB standard library that + * can store any other arbitrary ProtoBuf message in a type-safe way, even + * when the server has no knowledge of the schema of the stored message. + * + * It accomplishes this by storing a tuple of informtion: an URL-like type + * declaration for the stored message, and the serialized binary encoding + * of the stored message itself. Language specific implementations of ProtoBuf + * provide helper methods to encode and decode arbitrary messages into an + * {@link Any} object ({@link Any#pack(Message)} in Java). + * + * We want to expose these {@link Any} objects in the REST API because they've + * been introduced as part of the new GRPC interface to Conductor, but unfortunately + * we cannot encode them using their canonical ProtoBuf JSON encoding. According to + * the docs: + * + * The JSON representation of an `Any` value uses the regular + * representation of the deserialized, embedded message, with an + * additional field `@type` which contains the type URL. Example: + * + * package google.profile; + * message Person { + * string first_name = 1; + * string last_name = 2; + * } + * { + * "@type": "type.googleapis.com/google.profile.Person", + * "firstName": , + * "lastName": + * } + * + * In order to accomplish this representation, the PB-JSON encoder needs to have + * knowledge of all the ProtoBuf messages that could be serialized inside the + * {@link Any} message. This is not possible to accomplish inside the Conductor server, + * which is simply passing through arbitrary payloads from/to clients. + * + * Consequently, to actually expose the Message through the REST API, we must create + * a custom encoding that contains the raw data of the serialized message, as we are + * not able to deserialize it on the server. We simply return a dictionary with + * '@type' and '@value' keys, where '@type' is identical to the canonical representation, + * but '@value' contains a base64 encoded string with the binary data of the serialized + * message. + * + * Since all the provided Conductor clients are aware of this encoding, it's always possible + * to re-build the original {@link Any} message regardless of the client's language. + * + * {@see AnyDeserializer} + */ + protected class AnySerializer extends JsonSerializer { + @Override + public void serialize(Any value, JsonGenerator jgen, SerializerProvider provider) + throws IOException, JsonProcessingException { + jgen.writeStartObject(); + jgen.writeStringField("@type", value.getTypeUrl()); + jgen.writeBinaryField("@value", value.getValue().toByteArray()); + jgen.writeEndObject(); + } + } + + /** + * AnyDeserializer converts the custom JSON representation of an {@link Any} value + * into its original form. + * + * {@see AnySerializer} for details on this representation. + */ + protected class AnyDeserializer extends JsonDeserializer { + @Override + public Any deserialize(JsonParser p, DeserializationContext ctxt) + throws IOException, JsonProcessingException { + JsonNode root = p.getCodec().readTree(p); + JsonNode type = root.get("@type"); + JsonNode value = root.get("@value"); + + if (type == null || !type.isTextual()) + throw ctxt.reportMappingException("invalid '@type' field when deserializing ProtoBuf Any object"); + + if (value == null || !value.isTextual()) + throw ctxt.reportMappingException("invalid '@value' field when deserializing ProtoBuf Any object"); + + return Any.newBuilder() + .setTypeUrl(type.textValue()) + .setValue(ByteString.copyFrom(value.binaryValue())) + .build(); + } + } + + public JsonProtoModule() { + super("ConductorJsonProtoModule"); + addSerializer(Any.class, new AnySerializer()); + addDeserializer(Any.class, new AnyDeserializer()); + } + } + + public static ObjectMapper getMapper() { + final ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); + objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL); + objectMapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY); + objectMapper.registerModule(new JsonProtoModule()); + return objectMapper; + } +} diff --git a/core/build.gradle b/core/build.gradle index d8dd5c6392..f9abcc2442 100644 --- a/core/build.gradle +++ b/core/build.gradle @@ -8,8 +8,6 @@ dependencies { compile "com.netflix.servo:servo-core:${revServo}" compile "com.netflix.spectator:spectator-api:${revSpectator}" - compile "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" - compile "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" compile "com.jayway.jsonpath:json-path:${revJsonPath}" compile "org.apache.commons:commons-lang3:${revCommonsLang3}" diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index d564fa953f..ba4665d9b6 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -18,8 +18,6 @@ */ package com.netflix.conductor.core.execution; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import com.netflix.conductor.common.metadata.tasks.Task; @@ -44,7 +42,7 @@ import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; -import com.netflix.conductor.dao.ExecutionDAO; +import com.netflix.conductor.common.utils.JsonUtils; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.spectator.api.Counter; import com.netflix.spectator.api.DefaultRegistry; @@ -94,15 +92,7 @@ public class TestDeciderService { private static Registry registry; - private static ObjectMapper objectMapper = new ObjectMapper(); - - static { - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - objectMapper.setSerializationInclusion(Include.NON_NULL); - objectMapper.setSerializationInclusion(Include.NON_EMPTY); - } + private static ObjectMapper objectMapper = JsonUtils.getMapper(); @BeforeClass public static void init() { diff --git a/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java new file mode 100644 index 0000000000..ccc8f94056 --- /dev/null +++ b/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java @@ -0,0 +1,42 @@ +package com.netflix.conductor.core.utils; + +import com.fasterxml.jackson.core.JsonGenerationException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.protobuf.Any; +import com.google.protobuf.Struct; +import com.google.protobuf.Value; +import com.netflix.conductor.common.utils.JsonUtils; +import org.junit.Test; + +import java.io.IOException; +import java.io.StringWriter; + +import static org.junit.Assert.*; + +public class JsonUtilsTest { + @Test + public void testSimpleMapping() throws JsonGenerationException, JsonMappingException, IOException { + ObjectMapper m = JsonUtils.getMapper(); + assertTrue(m.canSerialize(Any.class)); + + Struct struct1 = Struct.newBuilder().putFields( + "some-key", Value.newBuilder().setStringValue("some-value").build() + ).build(); + + Any source = Any.pack(struct1); + + StringWriter buf = new StringWriter(); + m.writer().writeValue(buf, source); + + Any dest = m.reader().forType(Any.class).readValue(buf.toString()); + assertEquals(source.getTypeUrl(), dest.getTypeUrl()); + + Struct struct2 = dest.unpack(Struct.class); + assertTrue(struct2.containsFields("some-key")); + assertEquals( + struct1.getFieldsOrThrow("some-key").getStringValue(), + struct2.getFieldsOrThrow("some-key").getStringValue() + ); + } +} \ No newline at end of file diff --git a/grpc/build.gradle b/grpc/build.gradle index c59abe8448..910260bb00 100644 --- a/grpc/build.gradle +++ b/grpc/build.gradle @@ -21,7 +21,7 @@ dependencies { protobuf { protoc { - artifact = 'com.google.protobuf:protoc:3.5.1-1' + artifact = "com.google.protobuf:protoc:${revProtoBuf}" } plugins { grpc { diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java index e98d11770f..f8cde6e247 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java @@ -1,10 +1,9 @@ package com.netflix.conductor.dao.mysql; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.common.utils.JsonUtils; import com.zaxxer.hikari.HikariDataSource; import org.flywaydb.core.Flyway; @@ -27,7 +26,7 @@ public class MySQLBaseDAOTest { protected final Logger logger = LoggerFactory.getLogger(getClass()); protected final DataSource dataSource; protected final TestConfiguration testConfiguration = new TestConfiguration(); - protected final ObjectMapper objectMapper = createObjectMapper(); + protected final ObjectMapper objectMapper = JsonUtils.getMapper(); protected final DB db = EmbeddedDatabase.INSTANCE.getDB(); static AtomicBoolean migrated = new AtomicBoolean(false); @@ -71,16 +70,6 @@ private synchronized static void flywayMigrate(DataSource dataSource) { } } - private static ObjectMapper createObjectMapper() { - ObjectMapper om = new ObjectMapper(); - om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - om.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - om.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - om.setSerializationInclusion(JsonInclude.Include.NON_NULL); - om.setSerializationInclusion(JsonInclude.Include.NON_EMPTY); - return om; - } - protected void resetAllData() { logger.info("Resetting data for test"); try (Connection connection = dataSource.getConnection()) { diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java index 008f8f6bec..724a51d11e 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java @@ -15,10 +15,9 @@ */ package com.netflix.conductor.dao.dynomite; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.config.TestConfiguration; +import com.netflix.conductor.common.utils.JsonUtils; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; import com.netflix.conductor.dao.redis.JedisMock; @@ -49,15 +48,7 @@ public class DynoQueueDAOTest { private QueueDAO dao; - private static ObjectMapper om = new ObjectMapper(); - - static { - om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - om.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - om.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - om.setSerializationInclusion(Include.NON_NULL); - om.setSerializationInclusion(Include.NON_EMPTY); - } + private static ObjectMapper om = JsonUtils.getMapper(); @Before public void init() throws Exception { diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java index 13df1c6bc7..be031d1e43 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java @@ -18,8 +18,6 @@ */ package com.netflix.conductor.dao.dynomite; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; @@ -29,6 +27,7 @@ import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.common.utils.JsonUtils; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.redis.JedisMock; import com.netflix.conductor.dyno.DynoProxy; @@ -75,15 +74,7 @@ public class RedisExecutionDAOTest { @Mock private IndexDAO indexDAO; - private static ObjectMapper objectMapper = new ObjectMapper(); - - static { - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - objectMapper.setSerializationInclusion(Include.NON_NULL); - objectMapper.setSerializationInclusion(Include.NON_EMPTY); - } + private static ObjectMapper objectMapper = JsonUtils.getMapper(); @SuppressWarnings("unchecked") @Before diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java index 90df9f7ff0..25b8011db1 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java @@ -28,12 +28,11 @@ import java.util.UUID; import java.util.stream.Collectors; +import com.netflix.conductor.common.utils.JsonUtils; import org.apache.commons.lang.builder.EqualsBuilder; import org.junit.Before; import org.junit.Test; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.events.EventHandler.Action; @@ -59,16 +58,8 @@ public class RedisMetadataDAOTest { private RedisMetadataDAO dao; - private static ObjectMapper om = new ObjectMapper(); + private static ObjectMapper om = JsonUtils.getMapper(); - static { - om.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - om.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - om.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - om.setSerializationInclusion(Include.NON_NULL); - om.setSerializationInclusion(Include.NON_EMPTY); - } - @Before public void init() { Configuration config = new TestConfiguration(); diff --git a/server/src/main/java/com/netflix/conductor/server/JerseyModule.java b/server/src/main/java/com/netflix/conductor/server/JerseyModule.java index 3b711c82fc..b0d344a2e1 100644 --- a/server/src/main/java/com/netflix/conductor/server/JerseyModule.java +++ b/server/src/main/java/com/netflix/conductor/server/JerseyModule.java @@ -28,11 +28,10 @@ import javax.servlet.ServletResponse; import javax.servlet.http.HttpServletResponse; -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; import com.google.inject.Provides; +import com.netflix.conductor.common.utils.JsonUtils; import com.sun.jersey.api.core.PackagesResourceConfig; import com.sun.jersey.api.core.ResourceConfig; import com.sun.jersey.guice.JerseyServletModule; @@ -62,13 +61,7 @@ protected void configureServlets() { @Provides @Singleton public ObjectMapper objectMapper() { - final ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); - objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); - objectMapper.setSerializationInclusion(Include.NON_NULL); - objectMapper.setSerializationInclusion(Include.NON_EMPTY); - return objectMapper; + return JsonUtils.getMapper(); } @Provides diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java index 9112ee9a9a..a54735f251 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java @@ -15,11 +15,13 @@ */ package com.netflix.conductor.tests.utils; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; +import com.netflix.conductor.common.utils.JsonUtils; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.MetadataDAO; @@ -81,6 +83,7 @@ public String getCurrentShard() { bind(JedisCommands.class).toProvider(InMemoryJedisProvider.class); install(new CoreModule()); bind(UserTask.class).asEagerSingleton(); + bind(ObjectMapper.class).toInstance(JsonUtils.getMapper()); } @Provides diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index 43162730fc..ade3a588d8 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -41,6 +41,7 @@ ext { revJettyServlet = '9.3.9.v20160517' revOauthClient = '1.19.4' revOauthSignature = '1.19.4' + revProtoBuf = '3.5.1' revRarefiedRedis = '0.0.17' revServo = '0.12.17' revServletApi = '3.1.0' From b0b374a246fcfcbeec76d17db01a7e9085ab92c0 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 25 Jun 2018 18:26:33 +0200 Subject: [PATCH 080/163] common: Add `inputMessage` and `outputMessage` to Task-related POJOs --- .../conductor/client/http/TaskClient.java | 1 + .../common/metadata/events/EventHandler.java | 28 ++++++++++--- .../conductor/common/metadata/tasks/Task.java | 28 ++++++++++++- .../common/metadata/tasks/TaskResult.java | 13 ++++++ .../metadata/workflow/SkipTaskRequest.java | 25 ++++++++++- .../core/events/ActionProcessor.java | 3 +- .../core/execution/WorkflowExecutor.java | 4 ++ .../conductor/grpc/AbstractProtoMapper.java | 41 +++++++++++++++++++ .../netflix/conductor/grpc/ProtoMapper.java | 15 +++++-- grpc/src/main/proto/model/eventhandler.proto | 3 ++ .../main/proto/model/skiptaskrequest.proto | 3 ++ grpc/src/main/proto/model/task.proto | 3 ++ grpc/src/main/proto/model/taskresult.proto | 2 + .../protogen/types/AbstractType.java | 9 ++++ .../dao/dynomite/RedisExecutionDAO.java | 1 + 15 files changed, 166 insertions(+), 13 deletions(-) diff --git a/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java b/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java index d096a5a023..36eca9540c 100644 --- a/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java +++ b/client/src/main/java/com/netflix/conductor/client/http/TaskClient.java @@ -225,6 +225,7 @@ public void updateTask(TaskResult taskResult, String taskType) { taskResult.setReasonForIncompletion(String.format("The TaskResult payload: %d is greater than the permissible 3MB", taskResultSize)); taskResult.setStatus(TaskResult.Status.FAILED_WITH_TERMINAL_ERROR); taskResult.setOutputData(null); + taskResult.setOutputMessage(null); } } catch (Exception e) { logger.error("Unable to parse the TaskResult: {}", taskResult); diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java index cb60e0a4a4..14820fe978 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java @@ -18,6 +18,7 @@ */ package com.netflix.conductor.common.metadata.events; +import com.google.protobuf.Any; import com.netflix.conductor.common.annotations.ProtoEnum; import com.netflix.conductor.common.annotations.ProtoField; import com.netflix.conductor.common.annotations.ProtoMessage; @@ -239,6 +240,9 @@ public static class TaskDetails { @ProtoField(id = 3) private Map output = new HashMap<>(); + @ProtoField(id = 4) + private Any outputMessage; + /** * @return the workflowId */ @@ -283,9 +287,14 @@ public Map getOutput() { public void setOutput(Map output) { this.output = output; } - - - + + public Any getOutputMessage() { + return outputMessage; + } + + public void setOutputMessage(Any outputMessage) { + this.outputMessage = outputMessage; + } } @ProtoMessage @@ -303,6 +312,9 @@ public static class StartWorkflow { @ProtoField(id = 4) private Map input = new HashMap<>(); + @ProtoField(id = 5) + private Any inputMessage; + /** * @return the name */ @@ -363,8 +375,14 @@ public Map getInput() { public void setInput(Map input) { this.input = input; } - - + + public Any getInputMessage() { + return inputMessage; + } + + public void setInputMessage(Any inputMessage) { + this.inputMessage = inputMessage; + } } } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index ab1b24acbd..bf0108f784 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -15,6 +15,7 @@ */ package com.netflix.conductor.common.metadata.tasks; +import com.google.protobuf.Any; import com.netflix.conductor.common.annotations.ProtoEnum; import com.netflix.conductor.common.annotations.ProtoField; import com.netflix.conductor.common.annotations.ProtoMessage; @@ -161,6 +162,12 @@ public boolean isRetriable() { @ProtoField(id = 28) private String domain; + @ProtoField(id = 29) + private Any inputMessage; + + @ProtoField(id = 30) + private Any outputMessage; + public Task() { } @@ -572,8 +579,23 @@ public void setDomain(String domain) { this.domain = domain; } - public Task copy() { + public Any getInputMessage() { + return inputMessage; + } + + public void setInputMessage(Any inputMessage) { + this.inputMessage = inputMessage; + } + public Any getOutputMessage() { + return outputMessage; + } + + public void setOutputMessage(Any outputMessage) { + this.outputMessage = outputMessage; + } + + public Task copy() { Task copy = new Task(); copy.setCallbackAfterSeconds(callbackAfterSeconds); copy.setCallbackFromWorker(callbackFromWorker); @@ -594,6 +616,8 @@ public Task copy() { copy.setWorkerId(workerId); copy.setWorkflowTask(workflowTask); copy.setDomain(domain); + copy.setInputMessage(inputMessage); + copy.setOutputMessage(outputMessage); return copy; } @@ -627,6 +651,8 @@ public String toString() { ", outputData=" + outputData + ", workflowTask=" + workflowTask + ", domain='" + domain + '\'' + + ", inputMessage='" + inputMessage + '\'' + + ", outputMessage='" + outputMessage + '\'' + '}'; } } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java index 65060de8a4..75bcf1c178 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java @@ -18,6 +18,7 @@ */ package com.netflix.conductor.common.metadata.tasks; +import com.google.protobuf.Any; import com.netflix.conductor.common.annotations.ProtoEnum; import com.netflix.conductor.common.annotations.ProtoField; import com.netflix.conductor.common.annotations.ProtoMessage; @@ -61,6 +62,9 @@ public enum Status { @ProtoField(id = 7) private Map outputData = new HashMap<>(); + @ProtoField(id = 8) + private Any outputMessage; + private List logs = new CopyOnWriteArrayList<>(); public TaskResult(Task task) { @@ -177,6 +181,14 @@ public TaskResult addOutputData(String key, Object value) { return this; } + public Any getOutputMessage() { + return outputMessage; + } + + public void setOutputMessage(Any outputMessage) { + this.outputMessage = outputMessage; + } + /** * * @return Task execution logs @@ -214,6 +226,7 @@ public String toString() { ", workerId='" + workerId + '\'' + ", status=" + status + ", outputData=" + outputData + + ", outputMessage=" + outputMessage + ", logs=" + logs + '}'; } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java index cfd76ef34c..649f0f654e 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java @@ -15,6 +15,7 @@ */ package com.netflix.conductor.common.metadata.workflow; +import com.google.protobuf.Any; import com.netflix.conductor.common.annotations.ProtoField; import com.netflix.conductor.common.annotations.ProtoMessage; @@ -28,6 +29,12 @@ public class SkipTaskRequest { @ProtoField(id = 2) private Map taskOutput; + @ProtoField(id = 3) + private Any taskInputMessage; + + @ProtoField(id = 4) + private Any taskOutputMessage; + public Map getTaskInput() { return taskInput; } @@ -43,6 +50,20 @@ public Map getTaskOutput() { public void setTaskOutput(Map taskOutput) { this.taskOutput = taskOutput; } - - + + public Any getTaskInputMessage() { + return taskInputMessage; + } + + public void setTaskInputMessage(Any taskInputMessage) { + this.taskInputMessage = taskInputMessage; + } + + public Any getTaskOutputMessage() { + return taskOutputMessage; + } + + public void setTaskOutputMessage(Any taskOutputMessage) { + this.taskOutputMessage = taskOutputMessage; + } } diff --git a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java index a70c5c550b..6478c682b8 100644 --- a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java +++ b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java @@ -109,9 +109,10 @@ private Map completeTask(Action action, Object payload, TaskDeta replaced.put("error", "No task found with reference name: " + taskRefName + ", workflowId: " + workflowId); return replaced; } - + task.setStatus(status); task.setOutputData(replaced); + task.setOutputMessage(taskDetails.getOutputMessage()); task.getOutputData().put("conductor.event.messageId", messageId); task.getOutputData().put("conductor.event.name", event); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 3ae2824ce2..592b834a67 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -442,6 +442,7 @@ public void updateTask(TaskResult taskResult) throws Exception { task.setStatus(COMPLETED); } task.setOutputData(taskResult.getOutputData()); + task.setOutputMessage(taskResult.getOutputMessage()); task.setReasonForIncompletion(taskResult.getReasonForIncompletion()); task.setWorkerId(taskResult.getWorkerId()); executionDAO.updateTask(task); @@ -465,6 +466,7 @@ public void updateTask(TaskResult taskResult) throws Exception { task.setStatus(valueOf(taskResult.getStatus().name())); task.setOutputData(taskResult.getOutputData()); + task.setOutputMessage(taskResult.getOutputMessage()); task.setReasonForIncompletion(taskResult.getReasonForIncompletion()); task.setWorkerId(taskResult.getWorkerId()); task.setCallbackAfterSeconds(taskResult.getCallbackAfterSeconds()); @@ -678,6 +680,8 @@ public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, Sk if (skipTaskRequest != null) { theTask.setInputData(skipTaskRequest.getTaskInput()); theTask.setOutputData(skipTaskRequest.getTaskOutput()); + theTask.setInputMessage(skipTaskRequest.getTaskInputMessage()); + theTask.setOutputMessage(skipTaskRequest.getTaskOutputMessage()); } executionDAO.createTasks(Arrays.asList(theTask)); decide(workflowId); diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index d8a44cec0b..594b0c058d 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -1,5 +1,6 @@ package com.netflix.conductor.grpc; +import com.google.protobuf.Any; import com.google.protobuf.Value; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.events.EventHandler; @@ -161,6 +162,9 @@ public EventHandlerPb.EventHandler.StartWorkflow toProto(EventHandler.StartWorkf for (Map.Entry pair : from.getInput().entrySet()) { to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } + if (from.getInputMessage() != null) { + to.setInputMessage( toProto( from.getInputMessage() ) ); + } return to.build(); } @@ -174,6 +178,9 @@ public EventHandler.StartWorkflow fromProto(EventHandlerPb.EventHandler.StartWor inputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setInput(inputMap); + if (from.hasInputMessage()) { + to.setInputMessage( fromProto( from.getInputMessage() ) ); + } return to; } @@ -188,6 +195,9 @@ public EventHandlerPb.EventHandler.TaskDetails toProto(EventHandler.TaskDetails for (Map.Entry pair : from.getOutput().entrySet()) { to.putOutput( pair.getKey(), toProto( pair.getValue() ) ); } + if (from.getOutputMessage() != null) { + to.setOutputMessage( toProto( from.getOutputMessage() ) ); + } return to.build(); } @@ -200,6 +210,9 @@ public EventHandler.TaskDetails fromProto(EventHandlerPb.EventHandler.TaskDetail outputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutput(outputMap); + if (from.hasOutputMessage()) { + to.setOutputMessage( fromProto( from.getOutputMessage() ) ); + } return to; } @@ -343,6 +356,12 @@ public TaskPb.Task toProto(Task from) { if (from.getDomain() != null) { to.setDomain( from.getDomain() ); } + if (from.getInputMessage() != null) { + to.setInputMessage( toProto( from.getInputMessage() ) ); + } + if (from.getOutputMessage() != null) { + to.setOutputMessage( toProto( from.getOutputMessage() ) ); + } return to.build(); } @@ -386,6 +405,12 @@ public Task fromProto(TaskPb.Task from) { to.setWorkflowTask( fromProto( from.getWorkflowTask() ) ); } to.setDomain( from.getDomain() ); + if (from.hasInputMessage()) { + to.setInputMessage( fromProto( from.getInputMessage() ) ); + } + if (from.hasOutputMessage()) { + to.setOutputMessage( fromProto( from.getOutputMessage() ) ); + } return to; } @@ -558,6 +583,9 @@ public TaskResultPb.TaskResult toProto(TaskResult from) { for (Map.Entry pair : from.getOutputData().entrySet()) { to.putOutputData( pair.getKey(), toProto( pair.getValue() ) ); } + if (from.getOutputMessage() != null) { + to.setOutputMessage( toProto( from.getOutputMessage() ) ); + } return to.build(); } @@ -574,6 +602,9 @@ public TaskResult fromProto(TaskResultPb.TaskResult from) { outputDataMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setOutputData(outputDataMap); + if (from.hasOutputMessage()) { + to.setOutputMessage( fromProto( from.getOutputMessage() ) ); + } return to; } @@ -702,6 +733,12 @@ public SkipTaskRequest fromProto(SkipTaskRequestPb.SkipTaskRequest from) { taskOutputMap.put( pair.getKey(), fromProto( pair.getValue() ) ); } to.setTaskOutput(taskOutputMap); + if (from.hasTaskInputMessage()) { + to.setTaskInputMessage( fromProto( from.getTaskInputMessage() ) ); + } + if (from.hasTaskOutputMessage()) { + to.setTaskOutputMessage( fromProto( from.getTaskOutputMessage() ) ); + } return to; } @@ -1141,4 +1178,8 @@ public WorkflowSummary fromProto(WorkflowSummaryPb.WorkflowSummary from) { public abstract Value toProto(Object in); public abstract Object fromProto(Value in); + + public abstract Any toProto(Any in); + + public abstract Any fromProto(Any in); } diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java index d43bf96fc5..f39beb1355 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/ProtoMapper.java @@ -1,9 +1,6 @@ package com.netflix.conductor.grpc; -import com.google.protobuf.ListValue; -import com.google.protobuf.NullValue; -import com.google.protobuf.Struct; -import com.google.protobuf.Value; +import com.google.protobuf.*; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.proto.WorkflowTaskPb; @@ -137,4 +134,14 @@ public WorkflowTaskPb.WorkflowTask.WorkflowTaskList toProto(List l .addAllTasks(list.stream().map(this::toProto)::iterator) .build(); } + + @Override + public Any toProto(Any in) { + return in; + } + + @Override + public Any fromProto(Any in) { + return in; + } } diff --git a/grpc/src/main/proto/model/eventhandler.proto b/grpc/src/main/proto/model/eventhandler.proto index d4ff3611f8..5dd866230e 100644 --- a/grpc/src/main/proto/model/eventhandler.proto +++ b/grpc/src/main/proto/model/eventhandler.proto @@ -2,6 +2,7 @@ syntax = "proto3"; package conductor.proto; import "google/protobuf/struct.proto"; +import "google/protobuf/any.proto"; option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "EventHandlerPb"; @@ -13,11 +14,13 @@ message EventHandler { int32 version = 2; string correlation_id = 3; map input = 4; + google.protobuf.Any input_message = 5; } message TaskDetails { string workflow_id = 1; string task_ref_name = 2; map output = 3; + google.protobuf.Any output_message = 4; } message Action { enum Type { diff --git a/grpc/src/main/proto/model/skiptaskrequest.proto b/grpc/src/main/proto/model/skiptaskrequest.proto index aef7ac2b93..323e5162fe 100644 --- a/grpc/src/main/proto/model/skiptaskrequest.proto +++ b/grpc/src/main/proto/model/skiptaskrequest.proto @@ -2,6 +2,7 @@ syntax = "proto3"; package conductor.proto; import "google/protobuf/struct.proto"; +import "google/protobuf/any.proto"; option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "SkipTaskRequestPb"; @@ -10,4 +11,6 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model" message SkipTaskRequest { map task_input = 1; map task_output = 2; + google.protobuf.Any task_input_message = 3; + google.protobuf.Any task_output_message = 4; } diff --git a/grpc/src/main/proto/model/task.proto b/grpc/src/main/proto/model/task.proto index 82ecf136af..a27b24c3b1 100644 --- a/grpc/src/main/proto/model/task.proto +++ b/grpc/src/main/proto/model/task.proto @@ -3,6 +3,7 @@ package conductor.proto; import "model/workflowtask.proto"; import "google/protobuf/struct.proto"; +import "google/protobuf/any.proto"; option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskPb"; @@ -49,4 +50,6 @@ message Task { map output_data = 26; WorkflowTask workflow_task = 27; string domain = 28; + google.protobuf.Any input_message = 29; + google.protobuf.Any output_message = 30; } diff --git a/grpc/src/main/proto/model/taskresult.proto b/grpc/src/main/proto/model/taskresult.proto index ddd1933cac..d94fd25716 100644 --- a/grpc/src/main/proto/model/taskresult.proto +++ b/grpc/src/main/proto/model/taskresult.proto @@ -2,6 +2,7 @@ syntax = "proto3"; package conductor.proto; import "google/protobuf/struct.proto"; +import "google/protobuf/any.proto"; option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "TaskResultPb"; @@ -22,4 +23,5 @@ message TaskResult { string worker_id = 5; TaskResult.Status status = 6; map output_data = 7; + google.protobuf.Any output_message = 8; } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java index cfbc02d557..0a73be77e0 100644 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java +++ b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java @@ -1,6 +1,7 @@ package com.netflix.conductor.protogen.types; import com.google.common.base.CaseFormat; +import com.google.protobuf.Any; import com.netflix.conductor.protogen.*; import com.squareup.javapoet.ClassName; import com.squareup.javapoet.MethodSpec; @@ -32,6 +33,14 @@ private static void addScalar(Type t, String protoType) { "google.protobuf.Value", "google/protobuf/struct.proto") ); + + TYPES.put(Any.class, + new ExternMessageType( + Any.class, + ClassName.get(Any.class), + "google.protobuf.Any", + "google/protobuf/any.proto") + ); } static Map PROTO_LIST_TYPES = new HashMap<>(); diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index 444ff7d5f4..f7f381834a 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -223,6 +223,7 @@ public void updateTask(Task task) { task.setReasonForIncompletion(String.format("Payload of the task: %s larger than the permissible %s bytes", FileUtils.byteCountToDisplaySize(payload.length()), FileUtils.byteCountToDisplaySize(taskPayloadThreshold))); task.setOutputData(null); + task.setOutputMessage(null); task.setStatus(Status.FAILED_WITH_TERMINAL_ERROR); payload = toJson(task); } From f86f3160becc62bf57f8cdbe736ed24d09523cd7 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 26 Jun 2018 16:37:04 +0200 Subject: [PATCH 081/163] common: Make JsonUtils into an injection provider --- .../netflix/conductor/client/http/ClientBase.java | 4 ++-- common/build.gradle | 1 + .../{JsonUtils.java => JsonMapperProvider.java} | 12 +++++++----- .../conductor/core/execution/TestDeciderService.java | 4 ++-- ...sonUtilsTest.java => JsonMapperProviderTest.java} | 6 +++--- .../conductor/dao/mysql/MySQLBaseDAOTest.java | 4 ++-- .../conductor/dao/dynomite/DynoQueueDAOTest.java | 4 ++-- .../dao/dynomite/RedisExecutionDAOTest.java | 4 ++-- .../conductor/dao/dynomite/RedisMetadataDAOTest.java | 4 ++-- .../netflix/conductor/bootstrap/BootstrapModule.java | 1 + .../com/netflix/conductor/server/JerseyModule.java | 12 ++---------- .../com/netflix/conductor/server/ServerModule.java | 3 +++ .../netflix/conductor/tests/utils/TestModule.java | 4 ++-- 13 files changed, 31 insertions(+), 32 deletions(-) rename common/src/main/java/com/netflix/conductor/common/utils/{JsonUtils.java => JsonMapperProvider.java} (94%) rename core/src/test/java/com/netflix/conductor/core/utils/{JsonUtilsTest.java => JsonMapperProviderTest.java} (89%) diff --git a/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java b/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java index c30d62423b..e7abda50ec 100644 --- a/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java +++ b/client/src/main/java/com/netflix/conductor/client/http/ClientBase.java @@ -20,7 +20,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientHandler; import com.sun.jersey.api.client.ClientHandlerException; @@ -61,7 +61,7 @@ protected ClientBase(ClientConfig clientConfig) { } protected ClientBase(ClientConfig clientConfig, ClientHandler handler) { - objectMapper = JsonUtils.getMapper(); + objectMapper = new JsonMapperProvider().get(); JacksonJsonProvider provider = new JacksonJsonProvider(objectMapper); clientConfig.getSingletons().add(provider); diff --git a/common/build.gradle b/common/build.gradle index fe31db64ca..4fabafa19c 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -4,4 +4,5 @@ dependencies { compile "com.google.protobuf:protobuf-java:${revProtoBuf}" compile "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" compile "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" + compile "javax.inject:javax.inject:1" } \ No newline at end of file diff --git a/common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java b/common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java similarity index 94% rename from common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java rename to common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java index faf9b223aa..94c38042d1 100644 --- a/common/src/main/java/com/netflix/conductor/common/utils/JsonUtils.java +++ b/common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java @@ -10,10 +10,11 @@ import com.google.protobuf.ByteString; import com.google.protobuf.Message; +import javax.inject.Provider; import java.io.IOException; -public class JsonUtils { - private JsonUtils() {} +public class JsonMapperProvider implements Provider { + public JsonMapperProvider() {} /** * JsonProtoModule can be registered into an {@link ObjectMapper} @@ -76,8 +77,8 @@ private static class JsonProtoModule extends SimpleModule { * but '@value' contains a base64 encoded string with the binary data of the serialized * message. * - * Since all the provided Conductor clients are aware of this encoding, it's always possible - * to re-build the original {@link Any} message regardless of the client's language. + * Since all the provided Conductor clients are required to know this encoding, it's always + * possible to re-build the original {@link Any} message regardless of the client's language. * * {@see AnyDeserializer} */ @@ -126,7 +127,8 @@ public JsonProtoModule() { } } - public static ObjectMapper getMapper() { + @Override + public ObjectMapper get() { final ObjectMapper objectMapper = new ObjectMapper(); objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index ba4665d9b6..cf40fabb6f 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -42,7 +42,7 @@ import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.spectator.api.Counter; import com.netflix.spectator.api.DefaultRegistry; @@ -92,7 +92,7 @@ public class TestDeciderService { private static Registry registry; - private static ObjectMapper objectMapper = JsonUtils.getMapper(); + private static ObjectMapper objectMapper = new JsonMapperProvider().get(); @BeforeClass public static void init() { diff --git a/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java b/core/src/test/java/com/netflix/conductor/core/utils/JsonMapperProviderTest.java similarity index 89% rename from core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java rename to core/src/test/java/com/netflix/conductor/core/utils/JsonMapperProviderTest.java index ccc8f94056..ced225cf1a 100644 --- a/core/src/test/java/com/netflix/conductor/core/utils/JsonUtilsTest.java +++ b/core/src/test/java/com/netflix/conductor/core/utils/JsonMapperProviderTest.java @@ -6,7 +6,7 @@ import com.google.protobuf.Any; import com.google.protobuf.Struct; import com.google.protobuf.Value; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import org.junit.Test; import java.io.IOException; @@ -14,10 +14,10 @@ import static org.junit.Assert.*; -public class JsonUtilsTest { +public class JsonMapperProviderTest { @Test public void testSimpleMapping() throws JsonGenerationException, JsonMappingException, IOException { - ObjectMapper m = JsonUtils.getMapper(); + ObjectMapper m = new JsonMapperProvider().get(); assertTrue(m.canSerialize(Any.class)); Struct struct1 = Struct.newBuilder().putFields( diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java index f8cde6e247..62a4823901 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java @@ -3,7 +3,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.zaxxer.hikari.HikariDataSource; import org.flywaydb.core.Flyway; @@ -26,7 +26,7 @@ public class MySQLBaseDAOTest { protected final Logger logger = LoggerFactory.getLogger(getClass()); protected final DataSource dataSource; protected final TestConfiguration testConfiguration = new TestConfiguration(); - protected final ObjectMapper objectMapper = JsonUtils.getMapper(); + protected final ObjectMapper objectMapper = new JsonMapperProvider().get(); protected final DB db = EmbeddedDatabase.INSTANCE.getDB(); static AtomicBoolean migrated = new AtomicBoolean(false); diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java index 724a51d11e..2484c89597 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/DynoQueueDAOTest.java @@ -17,7 +17,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.config.TestConfiguration; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.dao.dynomite.queue.DynoQueueDAO; import com.netflix.conductor.dao.redis.JedisMock; @@ -48,7 +48,7 @@ public class DynoQueueDAOTest { private QueueDAO dao; - private static ObjectMapper om = JsonUtils.getMapper(); + private static ObjectMapper om = new JsonMapperProvider().get(); @Before public void init() throws Exception { diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java index be031d1e43..37ffa60fd9 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java @@ -27,7 +27,7 @@ import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.redis.JedisMock; import com.netflix.conductor.dyno.DynoProxy; @@ -74,7 +74,7 @@ public class RedisExecutionDAOTest { @Mock private IndexDAO indexDAO; - private static ObjectMapper objectMapper = JsonUtils.getMapper(); + private static ObjectMapper objectMapper = new JsonMapperProvider().get(); @SuppressWarnings("unchecked") @Before diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java index 25b8011db1..7a71cb6dc7 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java @@ -28,7 +28,7 @@ import java.util.UUID; import java.util.stream.Collectors; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import org.apache.commons.lang.builder.EqualsBuilder; import org.junit.Before; import org.junit.Test; @@ -58,7 +58,7 @@ public class RedisMetadataDAOTest { private RedisMetadataDAO dao; - private static ObjectMapper om = JsonUtils.getMapper(); + private static ObjectMapper om = new JsonMapperProvider().get(); @Before public void init() { diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java b/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java index 0a1f9066b4..d68d77c8e3 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/BootstrapModule.java @@ -2,6 +2,7 @@ import com.google.inject.AbstractModule; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; diff --git a/server/src/main/java/com/netflix/conductor/server/JerseyModule.java b/server/src/main/java/com/netflix/conductor/server/JerseyModule.java index b0d344a2e1..caff6e85db 100644 --- a/server/src/main/java/com/netflix/conductor/server/JerseyModule.java +++ b/server/src/main/java/com/netflix/conductor/server/JerseyModule.java @@ -31,7 +31,6 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.jaxrs.json.JacksonJsonProvider; import com.google.inject.Provides; -import com.netflix.conductor.common.utils.JsonUtils; import com.sun.jersey.api.core.PackagesResourceConfig; import com.sun.jersey.api.core.ResourceConfig; import com.sun.jersey.guice.JerseyServletModule; @@ -43,11 +42,10 @@ * */ public final class JerseyModule extends JerseyServletModule { + @Override protected void configureServlets() { - - filter("/*").through(apiOriginFilter()); Map jerseyParams = new HashMap<>(); @@ -58,13 +56,7 @@ protected void configureServlets() { serve("/api/*").with(GuiceContainer.class, jerseyParams); } - @Provides - @Singleton - public ObjectMapper objectMapper() { - return JsonUtils.getMapper(); - } - - @Provides + @Provides @Singleton JacksonJsonProvider jacksonJsonProvider(ObjectMapper mapper) { return new JacksonJsonProvider(mapper); diff --git a/server/src/main/java/com/netflix/conductor/server/ServerModule.java b/server/src/main/java/com/netflix/conductor/server/ServerModule.java index f51ce72985..175e4c9a0c 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServerModule.java +++ b/server/src/main/java/com/netflix/conductor/server/ServerModule.java @@ -15,9 +15,11 @@ */ package com.netflix.conductor.server; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.AbstractModule; import com.google.inject.Scopes; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; @@ -38,6 +40,7 @@ protected void configure() { install(new JettyModule()); install(new GRPCModule()); + bind(ObjectMapper.class).toProvider(JsonMapperProvider.class); bind(Configuration.class).to(SystemPropertiesDynomiteConfiguration.class); bind(ExecutorService.class).toProvider(ExecutorServiceProvider.class).in(Scopes.SINGLETON); } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java index a54735f251..73cbb40cae 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java @@ -21,7 +21,7 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; import com.netflix.conductor.core.config.SystemPropertiesConfiguration; -import com.netflix.conductor.common.utils.JsonUtils; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.MetadataDAO; @@ -83,7 +83,7 @@ public String getCurrentShard() { bind(JedisCommands.class).toProvider(InMemoryJedisProvider.class); install(new CoreModule()); bind(UserTask.class).asEagerSingleton(); - bind(ObjectMapper.class).toInstance(JsonUtils.getMapper()); + bind(ObjectMapper.class).toProvider(JsonMapperProvider.class); } @Provides From 034dfa904e72b8e02431eb732ae783a1714ce1d7 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 2 Jul 2018 15:57:41 +0200 Subject: [PATCH 082/163] protogen: Rewrite as an external dependency --- build.gradle | 1 + common/build.gradle | 1 + .../common/annotations/ProtoEnum.java | 15 -- .../common/annotations/ProtoField.java | 25 ---- .../common/annotations/ProtoMessage.java | 37 ----- .../metadata/events/EventExecution.java | 2 +- .../common/metadata/events/EventHandler.java | 4 +- .../common/metadata/tasks/PollData.java | 3 +- .../conductor/common/metadata/tasks/Task.java | 4 +- .../common/metadata/tasks/TaskDef.java | 4 +- .../common/metadata/tasks/TaskExecLog.java | 3 +- .../common/metadata/tasks/TaskResult.java | 4 +- .../workflow/DynamicForkJoinTask.java | 3 +- .../workflow/DynamicForkJoinTaskList.java | 3 +- .../workflow/RerunWorkflowRequest.java | 3 +- .../metadata/workflow/SkipTaskRequest.java | 3 +- .../workflow/StartWorkflowRequest.java | 3 +- .../metadata/workflow/SubWorkflowParams.java | 3 +- .../common/metadata/workflow/WorkflowDef.java | 3 +- .../metadata/workflow/WorkflowTask.java | 4 +- .../conductor/common/run/TaskSummary.java | 3 +- .../conductor/common/run/Workflow.java | 4 +- .../conductor/common/run/WorkflowSummary.java | 3 +- .../conductor/grpc/AbstractProtoMapper.java | 2 +- protogen/build.gradle | 3 +- .../conductor/protogen/ConductorProtoGen.java | 41 ++++++ .../netflix/conductor/protogen/Element.java | 111 --------------- .../com/netflix/conductor/protogen/Enum.java | 67 --------- .../com/netflix/conductor/protogen/File.java | 52 ------- .../netflix/conductor/protogen/Message.java | 129 ------------------ .../netflix/conductor/protogen/ProtoGen.java | 104 -------------- .../protogen/types/AbstractType.java | 127 ----------------- .../protogen/types/ExternMessageType.java | 40 ------ .../conductor/protogen/types/GenericType.java | 58 -------- .../conductor/protogen/types/ListType.java | 78 ----------- .../conductor/protogen/types/MapType.java | 99 -------------- .../conductor/protogen/types/MessageType.java | 68 --------- .../conductor/protogen/types/ScalarType.java | 76 ----------- .../conductor/protogen/types/WrappedType.java | 75 ---------- protogen/templates/file.proto | 14 -- protogen/templates/message.proto | 8 -- versionsOfDependencies.gradle | 1 + 42 files changed, 64 insertions(+), 1227 deletions(-) delete mode 100644 common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java delete mode 100644 common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java delete mode 100644 common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java create mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/Element.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/Enum.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/File.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/Message.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java delete mode 100644 protogen/templates/file.proto delete mode 100644 protogen/templates/message.proto diff --git a/build.gradle b/build.gradle index 01ea4774b8..2ec3e7b1de 100644 --- a/build.gradle +++ b/build.gradle @@ -34,6 +34,7 @@ subprojects { repositories { jcenter() + maven { url "https://dl.bintray.com/vmg/protogen" } } dependencies { diff --git a/common/build.gradle b/common/build.gradle index 4fabafa19c..43a037129d 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -5,4 +5,5 @@ dependencies { compile "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" compile "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" compile "javax.inject:javax.inject:1" + compile "com.github.vmg.protogen:protogen-annotations:${revProtoGen}" } \ No newline at end of file diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java deleted file mode 100644 index 6217190c3a..0000000000 --- a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoEnum.java +++ /dev/null @@ -1,15 +0,0 @@ -package com.netflix.conductor.common.annotations; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * ProtoEnum annotates an enum type that will be exposed via the GRPC - * API as a native Protocol Buffers enum. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface ProtoEnum { -} diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java deleted file mode 100644 index c3ea743821..0000000000 --- a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoField.java +++ /dev/null @@ -1,25 +0,0 @@ -package com.netflix.conductor.common.annotations; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * ProtoField annotates a field inside an struct with metadata on how to - * expose it on its corresponding Protocol Buffers struct. - * For a field to be exposed in a ProtoBuf struct, the containing struct - * must also be annotated with a {@link ProtoMessage} or {@link ProtoEnum} - * tag. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface ProtoField { - /** - * Mandatory. Sets the Protocol Buffer ID for this specific field. Once a field - * has been annotated with a given ID, the ID can never change to a different value - * or the resulting Protocol Buffer struct will not be backwards compatible. - * @return the numeric ID for the field - */ - int id(); -} diff --git a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java b/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java deleted file mode 100644 index fcae3262e5..0000000000 --- a/common/src/main/java/com/netflix/conductor/common/annotations/ProtoMessage.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.netflix.conductor.common.annotations; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * ProtoMessage annotates a given Java class so it becomes exposed via the GRPC - * API as a native Protocol Buffers struct. - * The annotated class must be a POJO. - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface ProtoMessage { - /** - * Sets whether the generated mapping code will contain a helper to translate - * the POJO for this class into the equivalent ProtoBuf object. - * @return whether this class will generate a mapper to ProtoBuf objects - */ - boolean toProto() default true; - - /** - * Sets whether the generated mapping code will contain a helper to translate - * the ProtoBuf object for this class into the equivalent POJO. - * @return whether this class will generate a mapper from ProtoBuf objects - */ - boolean fromProto() default true; - - /** - * Sets whether this is a wrapper class that will be used to encapsulate complex - * nested type interfaces. Wrapper classes are not directly exposed by the ProtoBuf - * API and must be mapped manually. - * @return whether this is a wrapper class - */ - boolean wrapper() default false; -} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java index 70f1a1c10d..43f96d241b 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventExecution.java @@ -21,7 +21,7 @@ import java.util.HashMap; import java.util.Map; -import com.netflix.conductor.common.annotations.*; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.events.EventHandler.Action; /** diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java index 14820fe978..0f1216011c 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/events/EventHandler.java @@ -19,9 +19,7 @@ package com.netflix.conductor.common.metadata.events; import com.google.protobuf.Any; -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.HashMap; import java.util.LinkedList; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java index 74ad6e9d68..cbd4c30d40 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java @@ -1,7 +1,6 @@ package com.netflix.conductor.common.metadata.tasks; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; /** * Copyright 2016 Netflix, Inc. diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index bf0108f784..1fddb0a807 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -16,9 +16,7 @@ package com.netflix.conductor.common.metadata.tasks; import com.google.protobuf.Any; -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import java.util.HashMap; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java index 30901515f0..fbf2b0f6b1 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskDef.java @@ -23,9 +23,7 @@ import java.util.List; import java.util.Map; -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.Auditable; /** diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java index 65a1ff9825..ba98ce2a30 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskExecLog.java @@ -18,8 +18,7 @@ */ package com.netflix.conductor.common.metadata.tasks; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; /** * @author Viren diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java index 75bcf1c178..458cdd2bef 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/TaskResult.java @@ -19,9 +19,7 @@ package com.netflix.conductor.common.metadata.tasks; import com.google.protobuf.Any; -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.HashMap; import java.util.List; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java index 7728a91ab0..94d8aaaec4 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java @@ -18,8 +18,7 @@ import java.util.HashMap; import java.util.Map; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; @ProtoMessage diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java index 6b40a6ea10..a4e0b98553 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTaskList.java @@ -15,8 +15,7 @@ */ package com.netflix.conductor.common.metadata.workflow; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.ArrayList; import java.util.List; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java index bcab01a620..3da1a7c89b 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/RerunWorkflowRequest.java @@ -15,8 +15,7 @@ */ package com.netflix.conductor.common.metadata.workflow; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.Map; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java index 649f0f654e..63725d393f 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SkipTaskRequest.java @@ -16,8 +16,7 @@ package com.netflix.conductor.common.metadata.workflow; import com.google.protobuf.Any; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.Map; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java index 39d86c8981..9d3d93f9e5 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java @@ -1,7 +1,6 @@ package com.netflix.conductor.common.metadata.workflow; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.HashMap; import java.util.Map; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java index d274aca8be..4380a8d268 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java @@ -18,8 +18,7 @@ */ package com.netflix.conductor.common.metadata.workflow; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; /** * @author Viren diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 5321d06385..01f72be73c 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -25,8 +25,7 @@ import java.util.Map; import java.util.Optional; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.Auditable; /** diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index dfe269bf0e..e0d524d2ab 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -18,9 +18,7 @@ */ package com.netflix.conductor.common.metadata.workflow; -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import java.util.Collection; import java.util.HashMap; diff --git a/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java b/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java index 3c6c40e7c6..f0fc91361c 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java +++ b/common/src/main/java/com/netflix/conductor/common/run/TaskSummary.java @@ -22,8 +22,7 @@ import java.util.Date; import java.util.TimeZone; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index 1c3fcaed03..21459d01b8 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -22,9 +22,7 @@ import java.util.Map; import java.util.Set; -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.Auditable; import com.netflix.conductor.common.metadata.tasks.Task; diff --git a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java index 289666bb9c..53d95cf4d9 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java +++ b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java @@ -23,8 +23,7 @@ import java.util.TimeZone; import java.util.stream.Collectors; -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; +import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; /** diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 594b0c058d..7b9a0b0d81 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -50,7 +50,7 @@ import java.util.stream.Collectors; import javax.annotation.Generated; -@Generated("com.netflix.conductor.protogen.ProtoGen") +@Generated("ProtoGen") public abstract class AbstractProtoMapper { public EventExecutionPb.EventExecution toProto(EventExecution from) { EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); diff --git a/protogen/build.gradle b/protogen/build.gradle index aa6df2376f..2355074ed7 100644 --- a/protogen/build.gradle +++ b/protogen/build.gradle @@ -1,5 +1,4 @@ dependencies { compile project(':conductor-common') - compile 'com.squareup:javapoet:1.11.1' - compile 'com.github.jknack:handlebars:4.0.6' + compile "com.github.vmg.protogen:protogen-codegen:${revProtoGen}" } diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java new file mode 100644 index 0000000000..ac93f58c38 --- /dev/null +++ b/protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java @@ -0,0 +1,41 @@ +package com.netflix.conductor.protogen; + +import com.github.vmg.protogen.ProtoGen; + +public class ConductorProtoGen { + private final static String PROTO_PACKAGE_NAME = "conductor.proto"; + private final static String JAVA_PACKAGE_NAME = "com.netflix.conductor.proto"; + private final static String GO_PACKAGE_NAME = "github.com/netflix/conductor/client/gogrpc/conductor/model"; + private final static String MAPPER_PACKAGE_NAME = "com.netflix.conductor.grpc"; + + public static void main(String[] args) throws Exception { + ProtoGen generator = new ProtoGen( + PROTO_PACKAGE_NAME, JAVA_PACKAGE_NAME, GO_PACKAGE_NAME + ); + + generator.process(com.netflix.conductor.common.metadata.events.EventExecution.class); + generator.process(com.netflix.conductor.common.metadata.events.EventHandler.class); + + generator.process(com.netflix.conductor.common.metadata.tasks.PollData.class); + generator.process(com.netflix.conductor.common.metadata.tasks.Task.class); + generator.process(com.netflix.conductor.common.metadata.tasks.TaskDef.class); + generator.process(com.netflix.conductor.common.metadata.tasks.TaskExecLog.class); + generator.process(com.netflix.conductor.common.metadata.tasks.TaskResult.class); + + generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask.class); + generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList.class); + generator.process(com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest.class); + generator.process(com.netflix.conductor.common.metadata.workflow.SkipTaskRequest.class); + generator.process(com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest.class); + generator.process(com.netflix.conductor.common.metadata.workflow.SubWorkflowParams.class); + generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowDef.class); + generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowTask.class); + + generator.process(com.netflix.conductor.common.run.TaskSummary.class); + generator.process(com.netflix.conductor.common.run.Workflow.class); + generator.process(com.netflix.conductor.common.run.WorkflowSummary.class); + + generator.writeProtos("grpc/src/main/proto"); + generator.writeMapper(MAPPER_PACKAGE_NAME,"grpc/src/main/java/com/netflix/conductor/grpc/"); + } +} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Element.java b/protogen/src/main/java/com/netflix/conductor/protogen/Element.java deleted file mode 100644 index 0e662b03db..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Element.java +++ /dev/null @@ -1,111 +0,0 @@ -package com.netflix.conductor.protogen; - -import com.netflix.conductor.common.annotations.ProtoEnum; -import com.netflix.conductor.common.annotations.ProtoMessage; -import com.netflix.conductor.protogen.types.AbstractType; -import com.netflix.conductor.protogen.types.MessageType; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeSpec; - -import java.util.*; - -public abstract class Element { - protected Class clazz; - protected MessageType type; - protected List fields = new ArrayList(); - protected List nested = new ArrayList<>(); - - public Element(Class cls, MessageType parentType) { - this.clazz = cls; - this.type = AbstractType.declare(cls, parentType); - - for (Class nested : clazz.getDeclaredClasses()) { - if (nested.isEnum()) - addNestedEnum(nested); - else - addNestedClass(nested); - } - } - - private void addNestedEnum(Class cls) { - ProtoEnum ann = (ProtoEnum)cls.getAnnotation(ProtoEnum.class); - if (ann != null) { - nested.add(new Enum(cls, this.type)); - } - } - - private void addNestedClass(Class cls) { - ProtoMessage ann = (ProtoMessage)cls.getAnnotation(ProtoMessage.class); - if (ann != null) { - nested.add(new Message(cls, this.type)); - } - } - - public abstract String getProtoClass(); - protected abstract void javaMapToProto(TypeSpec.Builder builder); - protected abstract void javaMapFromProto(TypeSpec.Builder builder); - - public void generateJavaMapper(TypeSpec.Builder builder) { - javaMapToProto(builder); - javaMapFromProto(builder); - - for (Element element : this.nested) { - element.generateJavaMapper(builder); - } - } - - public void generateAbstractMethods(Set specs) { - for (Field field : fields) { - field.generateAbstractMethods(specs); - } - - for (Element elem : nested) { - elem.generateAbstractMethods(specs); - } - } - - public void findDependencies(Set dependencies) { - for (Field field : fields) { - field.getDependencies(dependencies); - } - - for (Element elem : nested) { - elem.findDependencies(dependencies); - } - } - - public List getNested() { - return nested; - } - - public List getFields() { - return fields; - } - - public String getName() { - return clazz.getSimpleName(); - } - - public static abstract class Field { - protected int protoIndex; - protected java.lang.reflect.Field field; - - protected Field(int index, java.lang.reflect.Field field) { - this.protoIndex = index; - this.field = field; - } - - public abstract String getProtoTypeDeclaration(); - - public int getProtoIndex() { - return protoIndex; - } - - public String getName() { - return field.getName(); - } - - public void getDependencies(Set deps) {} - public void generateAbstractMethods(Set specs) {} - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java b/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java deleted file mode 100644 index 2ce3138890..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Enum.java +++ /dev/null @@ -1,67 +0,0 @@ -package com.netflix.conductor.protogen; - -import com.netflix.conductor.protogen.types.MessageType; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; -import com.squareup.javapoet.TypeSpec; - -import javax.lang.model.element.Modifier; - -public class Enum extends Element { - public Enum(Class cls, MessageType parent) { - super(cls, parent); - - int protoIndex = 0; - for (java.lang.reflect.Field field : cls.getDeclaredFields()) { - if (field.isEnumConstant()) - fields.add(new EnumField(protoIndex++, field)); - } - } - - @Override - public String getProtoClass() { - return "enum"; - } - - private MethodSpec javaMap(String methodName, TypeName from, TypeName to) { - MethodSpec.Builder method = MethodSpec.methodBuilder(methodName); - method.addModifiers(Modifier.PUBLIC); - method.returns(to); - method.addParameter(from, "from"); - - method.addStatement("$T to", to); - method.beginControlFlow("switch (from)"); - - for (Field field : fields) { - String name = field.getName(); - method.addStatement("case $L: to = $T.$L; break", name, to, name); - } - - method.addStatement("default: throw new $T(\"Unexpected enum constant: \" + from)", - IllegalArgumentException.class); - method.endControlFlow(); - method.addStatement("return to"); - return method.build(); - } - - @Override - protected void javaMapFromProto(TypeSpec.Builder type) { - type.addMethod(javaMap("fromProto", this.type.getJavaProtoType(), TypeName.get(this.clazz))); - } - - @Override - protected void javaMapToProto(TypeSpec.Builder type) { - type.addMethod(javaMap("toProto", TypeName.get(this.clazz), this.type.getJavaProtoType())); - } - - public class EnumField extends Field { - protected EnumField(int index, java.lang.reflect.Field field) { - super(index, field); - } - - @Override - public String getProtoTypeDeclaration() { - return String.format("%s = %d", getName().toUpperCase(), getProtoIndex()); - } - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/File.java b/protogen/src/main/java/com/netflix/conductor/protogen/File.java deleted file mode 100644 index e4c134bed9..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/File.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.netflix.conductor.protogen; - -import com.netflix.conductor.protogen.types.AbstractType; -import com.squareup.javapoet.ClassName; - -import java.util.*; - -public class File { - public static String PROTO_SUFFIX = "Pb"; - - private ClassName baseClass; - private Element message; - private String filePath; - - public File(Class object) { - String className = object.getSimpleName() + PROTO_SUFFIX; - this.filePath = "model/" + object.getSimpleName().toLowerCase() + ".proto"; - this.baseClass = ClassName.get(ProtoGen.PROTO_JAVA_PACKAGE_NAME, className); - this.message = new Message(object, AbstractType.baseClass(baseClass, filePath)); - } - - public String getJavaClassName() { - return baseClass.simpleName(); - } - - public String getFilePath() { - return filePath; - } - - public String getPackageName() { - return ProtoGen.PROTO_PACKAGE_NAME; - } - - public String getJavaPackageName() { - return ProtoGen.PROTO_JAVA_PACKAGE_NAME; - } - - public String getGoPackage() { - return ProtoGen.PROTO_GO_PACKAGE_NAME; - } - - public Element getMessage() { - return message; - } - - public Set getIncludes() { - Set includes = new HashSet<>(); - message.findDependencies(includes); - includes.remove(this.getFilePath()); - return includes; - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java b/protogen/src/main/java/com/netflix/conductor/protogen/Message.java deleted file mode 100644 index 958cb0814a..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/Message.java +++ /dev/null @@ -1,129 +0,0 @@ -package com.netflix.conductor.protogen; - -import com.netflix.conductor.common.annotations.ProtoField; -import com.netflix.conductor.common.annotations.ProtoMessage; -import com.netflix.conductor.protogen.types.AbstractType; -import com.netflix.conductor.protogen.types.MessageType; -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeSpec; - -import javax.lang.model.element.Modifier; -import java.util.Set; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class Message extends Element { - public Message(Class cls, MessageType parent) { - super(cls, parent); - - for (java.lang.reflect.Field field: clazz.getDeclaredFields()) { - ProtoField ann = field.getAnnotation(ProtoField.class); - if (ann == null) - continue; - - fields.add(new MessageField(ann.id(), field)); - } - } - - protected ProtoMessage getAnnotation() { - return (ProtoMessage)this.clazz.getAnnotation(ProtoMessage.class); - } - - @Override - public String getProtoClass() { - return "message"; - } - - @Override - protected void javaMapToProto(TypeSpec.Builder type) { - if (!getAnnotation().toProto() || getAnnotation().wrapper()) - return; - - ClassName javaProtoType = (ClassName)this.type.getJavaProtoType(); - MethodSpec.Builder method = MethodSpec.methodBuilder("toProto"); - method.addModifiers(Modifier.PUBLIC); - method.returns(javaProtoType); - method.addParameter(this.clazz, "from"); - - method.addStatement("$T to = $T.newBuilder()", - javaProtoType.nestedClass("Builder"), javaProtoType); - - for (Field field : this.fields) { - if (field instanceof MessageField) { - AbstractType fieldType = ((MessageField) field).getAbstractType(); - fieldType.mapToProto(field.getName(), method); - } - } - - method.addStatement("return to.build()"); - type.addMethod(method.build()); - } - - @Override - protected void javaMapFromProto(TypeSpec.Builder type) { - if (!getAnnotation().fromProto() || getAnnotation().wrapper()) - return; - - MethodSpec.Builder method = MethodSpec.methodBuilder("fromProto"); - method.addModifiers(Modifier.PUBLIC); - method.returns(this.clazz); - method.addParameter(this.type.getJavaProtoType(), "from"); - - method.addStatement("$T to = new $T()", this.clazz, this.clazz); - - for (Field field : this.fields) { - if (field instanceof MessageField) { - AbstractType fieldType = ((MessageField) field).getAbstractType(); - fieldType.mapFromProto(field.getName(), method); - } - } - - method.addStatement("return to"); - type.addMethod(method.build()); - } - - public static class MessageField extends Field { - protected AbstractType type; - - protected MessageField(int index, java.lang.reflect.Field field) { - super(index, field); - } - - public AbstractType getAbstractType() { - if (type == null) { - type = AbstractType.get(field.getGenericType()); - } - return type; - } - - private static Pattern CAMEL_CASE_RE = Pattern.compile("(?<=[a-z])[A-Z]"); - private static String toUnderscoreCase(String input) { - Matcher m = CAMEL_CASE_RE.matcher(input); - StringBuffer sb = new StringBuffer(); - while (m.find()) { - m.appendReplacement(sb, "_" + m.group()); - } - m.appendTail(sb); - return sb.toString().toLowerCase(); - } - - @Override - public String getProtoTypeDeclaration() { - return String.format("%s %s = %d", - getAbstractType().getProtoType(), - toUnderscoreCase(getName()), - getProtoIndex()); - } - - @Override - public void getDependencies(Set deps) { - getAbstractType().getDependencies(deps); - } - - @Override - public void generateAbstractMethods(Set specs) { - getAbstractType().generateAbstractMethods(specs); - } - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java deleted file mode 100644 index 1165b03342..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/ProtoGen.java +++ /dev/null @@ -1,104 +0,0 @@ -package com.netflix.conductor.protogen; - -import com.github.jknack.handlebars.EscapingStrategy; -import com.github.jknack.handlebars.Handlebars; -import com.github.jknack.handlebars.Template; -import com.github.jknack.handlebars.io.FileTemplateLoader; -import com.github.jknack.handlebars.io.TemplateLoader; -import com.squareup.javapoet.*; - -import javax.annotation.Generated; -import javax.lang.model.element.Modifier; -import java.io.FileWriter; -import java.io.Writer; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.*; - -public class ProtoGen { - public static String PROTO_PACKAGE_NAME = "conductor.proto"; - public static String PROTO_JAVA_PACKAGE_NAME = "com.netflix.conductor.proto"; - public static String PROTO_GO_PACKAGE_NAME = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - - public static String GENERATED_MAPPER_PACKAGE = "com.netflix.conductor.grpc"; - public static String GENERATOR_NAME = "com.netflix.conductor.protogen.ProtoGen"; - - private List files = new ArrayList<>(); - - public static void main(String[] args) throws Exception { - ProtoGen generator = new ProtoGen(); - - generator.process(com.netflix.conductor.common.metadata.events.EventExecution.class); - generator.process(com.netflix.conductor.common.metadata.events.EventHandler.class); - - generator.process(com.netflix.conductor.common.metadata.tasks.PollData.class); - generator.process(com.netflix.conductor.common.metadata.tasks.Task.class); - generator.process(com.netflix.conductor.common.metadata.tasks.TaskDef.class); - generator.process(com.netflix.conductor.common.metadata.tasks.TaskExecLog.class); - generator.process(com.netflix.conductor.common.metadata.tasks.TaskResult.class); - - generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask.class); - generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList.class); - generator.process(com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest.class); - generator.process(com.netflix.conductor.common.metadata.workflow.SkipTaskRequest.class); - generator.process(com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest.class); - generator.process(com.netflix.conductor.common.metadata.workflow.SubWorkflowParams.class); - generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowDef.class); - generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowTask.class); - - generator.process(com.netflix.conductor.common.run.TaskSummary.class); - generator.process(com.netflix.conductor.common.run.Workflow.class); - generator.process(com.netflix.conductor.common.run.WorkflowSummary.class); - - generator.writeProtos("grpc/src/main/proto"); - generator.writeMapper("grpc/src/main/java/com/netflix/conductor/grpc/"); - } - - public ProtoGen() { - } - - public void writeMapper(String root) throws Exception { - TypeSpec.Builder protoMapper = TypeSpec.classBuilder("AbstractProtoMapper") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .addAnnotation(AnnotationSpec.builder(Generated.class) - .addMember("value", "$S", GENERATOR_NAME).build()); - - Set abstractMethods = new HashSet<>(); - - for (File file : files) { - Element elem = file.getMessage(); - elem.generateJavaMapper(protoMapper); - elem.generateAbstractMethods(abstractMethods); - } - - protoMapper.addMethods(abstractMethods); - - JavaFile javaFile = JavaFile.builder(GENERATED_MAPPER_PACKAGE, protoMapper.build()) - .indent(" ").build(); - Path filename = Paths.get(root, "AbstractProtoMapper.java"); - try (Writer writer = new FileWriter(filename.toString())) { - javaFile.writeTo(writer); - } - } - - public void writeProtos(String root) throws Exception { - TemplateLoader loader = new FileTemplateLoader("protogen/templates", ".proto"); - Handlebars handlebars = new Handlebars(loader) - .infiniteLoops(true) - .prettyPrint(true) - .with(EscapingStrategy.NOOP); - - Template protoFile = handlebars.compile("file"); - - for (File file : files) { - Path filename = Paths.get(root, file.getFilePath()); - try (Writer writer = new FileWriter(filename.toString())) { - protoFile.apply(file, writer); - } - } - } - - public void process(Class obj) throws Exception { - files.add(new File(obj)); - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java deleted file mode 100644 index 0a73be77e0..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/AbstractType.java +++ /dev/null @@ -1,127 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.google.common.base.CaseFormat; -import com.google.protobuf.Any; -import com.netflix.conductor.protogen.*; -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.util.*; - -public abstract class AbstractType { - private static Map TYPES = new HashMap<>(); - private static void addScalar(Type t, String protoType) { - TYPES.put(t, new ScalarType(t, TypeName.get(t), protoType)); - } - static { - addScalar(int.class, "int32"); - addScalar(Integer.class, "int32"); - addScalar(long.class, "int64"); - addScalar(Long.class, "int64"); - addScalar(String.class, "string"); - addScalar(boolean.class, "bool"); - addScalar(Boolean.class, "bool"); - - - TYPES.put(Object.class, - new ExternMessageType( - Object.class, - ClassName.get("com.google.protobuf", "Value"), - "google.protobuf.Value", - "google/protobuf/struct.proto") - ); - - TYPES.put(Any.class, - new ExternMessageType( - Any.class, - ClassName.get(Any.class), - "google.protobuf.Any", - "google/protobuf/any.proto") - ); - } - - static Map PROTO_LIST_TYPES = new HashMap<>(); - static { - PROTO_LIST_TYPES.put(List.class, ArrayList.class); - PROTO_LIST_TYPES.put(Set.class, HashSet.class); - PROTO_LIST_TYPES.put(LinkedList.class, LinkedList.class); - } - - public static AbstractType get(Type t) { - if (!TYPES.containsKey(t)) { - if (t instanceof ParameterizedType) { - Type raw = ((ParameterizedType) t).getRawType(); - if (PROTO_LIST_TYPES.containsKey(raw)) { - TYPES.put(t, new ListType(t)); - } else if (raw.equals(Map.class)) { - TYPES.put(t, new MapType(t)); - } - } - } - if (!TYPES.containsKey(t)) { - throw new IllegalArgumentException("Cannot map type: " + t); - } - return TYPES.get(t); - } - - public static MessageType get(String className) { - for (Map.Entry pair : TYPES.entrySet()) { - AbstractType t = pair.getValue(); - if (t instanceof MessageType) { - if (((Class) t.getJavaType()).getSimpleName().equals(className)) - return (MessageType)t; - } - } - return null; - } - - public static MessageType declare(Class type, MessageType parent) { - return declare(type, (ClassName)parent.getJavaProtoType(), parent.getProtoFilePath()); - } - - public static MessageType declare(Class type, ClassName parentType, String protoFilePath) { - String simpleName = type.getSimpleName(); - MessageType t = new MessageType(type, parentType.nestedClass(simpleName), protoFilePath); - if (TYPES.containsKey(type)) { - throw new IllegalArgumentException("duplicate type declaration: "+type); - } - TYPES.put(type, t); - return t; - } - - public static MessageType baseClass(ClassName className, String protoFilePath) { - return new MessageType(Object.class, className, protoFilePath); - } - - Type javaType; - TypeName javaProtoType; - - AbstractType(Type javaType, TypeName javaProtoType) { - this.javaType = javaType; - this.javaProtoType = javaProtoType; - } - - public Type getJavaType() { - return javaType; - } - - public TypeName getJavaProtoType() { - return javaProtoType; - } - - public abstract String getProtoType(); - public abstract TypeName getRawJavaType(); - public abstract void mapToProto(String field, MethodSpec.Builder method); - public abstract void mapFromProto(String field, MethodSpec.Builder method); - - public abstract void getDependencies(Set deps); - public abstract void generateAbstractMethods(Set specs); - - protected String fieldMethod(String m, String field) { - return m + CaseFormat.LOWER_CAMEL.to(CaseFormat.UPPER_CAMEL, field); - } - -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java deleted file mode 100644 index 81390b573a..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/ExternMessageType.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; - -import javax.lang.model.element.Modifier; -import java.lang.reflect.Type; -import java.util.Set; - -public class ExternMessageType extends MessageType { - private String externProtoType; - - public ExternMessageType(Type javaType, ClassName javaProtoType, String externProtoType, String protoFilePath) { - super(javaType, javaProtoType, protoFilePath); - this.externProtoType = externProtoType; - } - - @Override - public String getProtoType() { - return externProtoType; - } - - @Override - public void generateAbstractMethods(Set specs) { - MethodSpec fromProto = MethodSpec.methodBuilder("fromProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.getJavaType()) - .addParameter(this.getJavaProtoType(), "in") - .build(); - - MethodSpec toProto = MethodSpec.methodBuilder("toProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.getJavaProtoType()) - .addParameter(this.getJavaType(), "in") - .build(); - - specs.add(fromProto); - specs.add(toProto); - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java deleted file mode 100644 index e1e61175eb..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/GenericType.java +++ /dev/null @@ -1,58 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.util.Set; - -abstract class GenericType extends AbstractType { - public GenericType(Type type) { - super(type, null); - } - - protected Class getRawType() { - ParameterizedType tt = (ParameterizedType)this.getJavaType(); - return (Class)tt.getRawType(); - } - - protected AbstractType resolveGenericParam(int idx) { - ParameterizedType tt = (ParameterizedType)this.getJavaType(); - Type[] types = tt.getActualTypeArguments(); - - AbstractType abstractType = AbstractType.get(types[idx]); - if (abstractType instanceof GenericType) { - return WrappedType.wrap((GenericType) abstractType); - } - return abstractType; - } - - public abstract String getWrapperSuffix(); - public abstract AbstractType getValueType(); - public abstract TypeName resolveJavaProtoType(); - - @Override - public TypeName getRawJavaType() { - return ClassName.get(getRawType()); - } - - @Override - public void getDependencies(Set deps) { - getValueType().getDependencies(deps); - } - - @Override - public void generateAbstractMethods(Set specs) { - getValueType().generateAbstractMethods(specs); - } - - @Override - public TypeName getJavaProtoType() { - if (javaProtoType == null) { - javaProtoType = resolveJavaProtoType(); - } - return javaProtoType; - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java deleted file mode 100644 index 77854e70b5..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/ListType.java +++ /dev/null @@ -1,78 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.ParameterizedTypeName; -import com.squareup.javapoet.TypeName; - -import java.lang.reflect.Type; -import java.util.stream.Collectors; - -public class ListType extends GenericType { - private AbstractType valueType; - - public ListType(Type type) { - super(type); - } - - @Override - public String getWrapperSuffix() { - return "List"; - } - - @Override - public AbstractType getValueType() { - if (valueType == null) { - valueType = resolveGenericParam(0); - } - return valueType; - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - AbstractType subtype = getValueType(); - if (subtype instanceof ScalarType) { - method.addStatement("to.$L( from.$L() )", - fieldMethod("addAll", field), fieldMethod("get", field)); - } else { - method.beginControlFlow("for ($T elem : from.$L())", - subtype.getJavaType(), fieldMethod("get", field)); - method.addStatement("to.$L( toProto(elem) )", - fieldMethod("add", field)); - method.endControlFlow(); - } - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - AbstractType subtype = getValueType(); - Type entryType = subtype.getJavaType(); - Class collector = PROTO_LIST_TYPES.get(getRawType()); - - if (subtype instanceof ScalarType) { - if (entryType.equals(String.class)) { - method.addStatement("to.$L( from.$L().stream().collect($T.toCollection($T::new)) )", - fieldMethod("set", field), fieldMethod("get", field)+"List", - Collectors.class, collector); - } else { - method.addStatement("to.$L( from.$L() )", - fieldMethod("set", field), fieldMethod("get", field) + "List"); - } - } else { - method.addStatement("to.$L( from.$L().stream().map(this::fromProto).collect($T.toCollection($T::new)) )", - fieldMethod("set", field), fieldMethod("get", field)+"List", - Collectors.class, collector); - } - } - - @Override - public TypeName resolveJavaProtoType() { - return ParameterizedTypeName.get((ClassName)getRawJavaType(), - getValueType().getJavaProtoType()); - } - - @Override - public String getProtoType() { - return "repeated " + getValueType().getProtoType(); - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java deleted file mode 100644 index 133f13f10c..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/MapType.java +++ /dev/null @@ -1,99 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.ParameterizedTypeName; -import com.squareup.javapoet.TypeName; - -import java.lang.reflect.Type; -import java.util.HashMap; -import java.util.Map; - -public class MapType extends GenericType { - private AbstractType keyType; - private AbstractType valueType; - - public MapType(Type type) { - super(type); - } - - @Override - public String getWrapperSuffix() { - return "Map"; - } - - @Override - public AbstractType getValueType() { - if (valueType == null) { - valueType = resolveGenericParam(1); - } - return valueType; - } - - public AbstractType getKeyType() { - if (keyType == null) { - keyType = resolveGenericParam(0); - } - return keyType; - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - AbstractType valueType = getValueType(); - if (valueType instanceof ScalarType) { - method.addStatement("to.$L( from.$L() )", - fieldMethod("putAll", field), fieldMethod("get", field)); - } else { - TypeName typeName = ParameterizedTypeName.get(Map.Entry.class, - getKeyType().getJavaType(), - getValueType().getJavaType()); - method.beginControlFlow("for ($T pair : from.$L().entrySet())", - typeName, fieldMethod("get", field)); - method.addStatement("to.$L( pair.getKey(), toProto( pair.getValue() ) )", - fieldMethod("put", field)); - method.endControlFlow(); - } - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - AbstractType valueType = getValueType(); - if (valueType instanceof ScalarType) { - method.addStatement("to.$L( from.$L() )", - fieldMethod("set", field), fieldMethod("get", field)+"Map"); - } else { - Type keyType = getKeyType().getJavaType(); - Type valueTypeJava = getValueType().getJavaType(); - TypeName valueTypePb = getValueType().getJavaProtoType(); - - ParameterizedTypeName entryType = ParameterizedTypeName.get(ClassName.get(Map.Entry.class), TypeName.get(keyType), valueTypePb); - ParameterizedTypeName mapType = ParameterizedTypeName.get(Map.class, keyType, valueTypeJava); - ParameterizedTypeName hashMapType = ParameterizedTypeName.get(HashMap.class, keyType, valueTypeJava); - String mapName = field+"Map"; - - method.addStatement("$T $L = new $T()", mapType, mapName, hashMapType); - method.beginControlFlow("for ($T pair : from.$L().entrySet())", - entryType, fieldMethod("get", field)+"Map"); - method.addStatement("$L.put( pair.getKey(), fromProto( pair.getValue() ) )", mapName); - method.endControlFlow(); - method.addStatement("to.$L($L)", fieldMethod("set", field), mapName); - } - } - - @Override - public TypeName resolveJavaProtoType() { - return ParameterizedTypeName.get((ClassName)getRawJavaType(), - getKeyType().getJavaProtoType(), - getValueType().getJavaProtoType()); - } - - @Override - public String getProtoType() { - AbstractType keyType = getKeyType(); - AbstractType valueType = getValueType(); - if (!(keyType instanceof ScalarType)) { - throw new IllegalArgumentException("cannot map non-scalar map key: "+this.getJavaType()); - } - return String.format("map<%s, %s>", keyType.getProtoType(), valueType.getProtoType()); - } -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java deleted file mode 100644 index f87326fe5b..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/MessageType.java +++ /dev/null @@ -1,68 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.netflix.conductor.protogen.File; -import com.netflix.conductor.protogen.types.AbstractType; -import com.squareup.javapoet.ClassName; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -import java.lang.reflect.Type; -import java.util.List; -import java.util.Set; - -public class MessageType extends AbstractType { - private String protoFilePath; - - public MessageType(Type javaType, ClassName javaProtoType, String protoFilePath) { - super(javaType, javaProtoType); - this.protoFilePath = protoFilePath; - } - - @Override - public String getProtoType() { - List classes = ((ClassName)getJavaProtoType()).simpleNames(); - return String.join(".", classes.subList(1, classes.size())); - } - - public String getProtoFilePath() { - return protoFilePath; - } - - @Override - public TypeName getRawJavaType() { - return getJavaProtoType(); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - final String getter = fieldMethod("get", field); - method.beginControlFlow("if (from.$L() != null)", getter); - method.addStatement("to.$L( toProto( from.$L() ) )", fieldMethod("set", field), getter); - method.endControlFlow(); - } - - private boolean isEnum() { - Type clazz = getJavaType(); - return (clazz instanceof Class) && ((Class) clazz).isEnum(); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - if (!isEnum()) - method.beginControlFlow("if (from.$L())", fieldMethod("has", field)); - - method.addStatement("to.$L( fromProto( from.$L() ) )", - fieldMethod("set", field), fieldMethod("get", field)); - - if (!isEnum()) - method.endControlFlow(); - } - - @Override - public void getDependencies(Set deps) { - deps.add(protoFilePath); - } - - @Override - public void generateAbstractMethods(Set specs) {} -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java deleted file mode 100644 index 62d68fe01b..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/ScalarType.java +++ /dev/null @@ -1,76 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.netflix.conductor.protogen.types.AbstractType; -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; -import org.apache.commons.lang3.ClassUtils; - -import java.lang.reflect.Type; -import java.util.Set; - -public class ScalarType extends AbstractType { - private String protoType; - - public ScalarType(Type javaType, TypeName javaProtoType, String protoType) { - super(javaType, javaProtoType); - this.protoType = protoType; - } - - @Override - public String getProtoType() { - return protoType; - } - - @Override - public TypeName getRawJavaType() { - return getJavaProtoType(); - } - - private void mapCode(String field, MethodSpec.Builder method, String getter) { - method.addStatement("to.$L( from.$L() )", - fieldMethod("set", field), fieldMethod(getter, field)); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - method.addStatement("to.$L( from.$L() )", - fieldMethod("set", field), fieldMethod("get", field)); - } - - private boolean isNullableType() { - final Type jt = getJavaType(); - return jt.equals(Boolean.class) || - jt.equals(Byte.class) || - jt.equals(Character.class) || - jt.equals(Short.class) || - jt.equals(Integer.class) || - jt.equals(Long.class) || - jt.equals(Double.class) || - jt.equals(Float.class) || - jt.equals(String.class); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - final boolean nullable = isNullableType(); - String getter = ( - getJavaType().equals(boolean.class) || - getJavaType().equals(Boolean.class)) ? - fieldMethod("is", field) : - fieldMethod("get", field); - - if (nullable) - method.beginControlFlow("if (from.$L() != null)", getter); - - method.addStatement("to.$L( from.$L() )", fieldMethod("set", field), getter); - - if (nullable) - method.endControlFlow(); - } - - @Override - public void getDependencies(Set deps) {} - - @Override - public void generateAbstractMethods(Set specs) {} -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java b/protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java deleted file mode 100644 index 1c95bdefcf..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/types/WrappedType.java +++ /dev/null @@ -1,75 +0,0 @@ -package com.netflix.conductor.protogen.types; - -import com.squareup.javapoet.MethodSpec; -import com.squareup.javapoet.TypeName; - -import javax.lang.model.element.Modifier; -import java.lang.reflect.Type; -import java.util.Set; - -public class WrappedType extends AbstractType { - private AbstractType realType; - private MessageType wrappedType; - - public static com.netflix.conductor.protogen.types.WrappedType wrap(GenericType realType) { - Type valueType = realType.getValueType().getJavaType(); - if (!(valueType instanceof Class)) - throw new IllegalArgumentException("cannot wrap primitive type: "+ valueType); - - String className = ((Class) valueType).getSimpleName() + realType.getWrapperSuffix(); - MessageType wrappedType = AbstractType.get(className); - if (wrappedType == null) - throw new IllegalArgumentException("missing wrapper class: "+className); - return new com.netflix.conductor.protogen.types.WrappedType(realType, wrappedType); - } - - public WrappedType(AbstractType realType, MessageType wrappedType) { - super(realType.getJavaType(), wrappedType.getJavaProtoType()); - this.realType = realType; - this.wrappedType = wrappedType; - } - - @Override - public String getProtoType() { - return wrappedType.getProtoType(); - } - - @Override - public TypeName getRawJavaType() { - return realType.getRawJavaType(); - } - - @Override - public void mapToProto(String field, MethodSpec.Builder method) { - wrappedType.mapToProto(field, method); - } - - @Override - public void mapFromProto(String field, MethodSpec.Builder method) { - wrappedType.mapFromProto(field, method); - } - - @Override - public void getDependencies(Set deps) { - this.realType.getDependencies(deps); - this.wrappedType.getDependencies(deps); - } - - @Override - public void generateAbstractMethods(Set specs) { - MethodSpec fromProto = MethodSpec.methodBuilder("fromProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.realType.getJavaType()) - .addParameter(this.wrappedType.getJavaProtoType(), "in") - .build(); - - MethodSpec toProto = MethodSpec.methodBuilder("toProto") - .addModifiers(Modifier.PUBLIC, Modifier.ABSTRACT) - .returns(this.wrappedType.getJavaProtoType()) - .addParameter(this.realType.getJavaType(), "in") - .build(); - - specs.add(fromProto); - specs.add(toProto); - } -} diff --git a/protogen/templates/file.proto b/protogen/templates/file.proto deleted file mode 100644 index a8a940fca3..0000000000 --- a/protogen/templates/file.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; -package {{packageName}}; - -{{#includes}} -import "{{this}}"; -{{/includes}} - -option java_package = "{{javaPackageName}}"; -option java_outer_classname = "{{javaClassName}}"; -option go_package = "{{goPackage}}"; - -{{#message}} -{{>message}} -{{/message}} diff --git a/protogen/templates/message.proto b/protogen/templates/message.proto deleted file mode 100644 index 7de110162b..0000000000 --- a/protogen/templates/message.proto +++ /dev/null @@ -1,8 +0,0 @@ -{{protoClass}} {{name}} { -{{#nested}} - {{>message}} -{{/nested}} -{{#fields}} - {{protoTypeDeclaration}}; -{{/fields}} -} diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index ade3a588d8..ac28ce972b 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -42,6 +42,7 @@ ext { revOauthClient = '1.19.4' revOauthSignature = '1.19.4' revProtoBuf = '3.5.1' + revProtoGen = '0.4.0' revRarefiedRedis = '0.0.17' revServo = '0.12.17' revServletApi = '3.1.0' From 065c8262e764dbaecd0ac552465c4731f8d670a5 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 2 Jul 2018 19:16:33 +0200 Subject: [PATCH 083/163] gradle: Remove BinTray dependency The ProtoGen packages are now mirrored in JCenter. --- build.gradle | 1 - 1 file changed, 1 deletion(-) diff --git a/build.gradle b/build.gradle index 2ec3e7b1de..01ea4774b8 100644 --- a/build.gradle +++ b/build.gradle @@ -34,7 +34,6 @@ subprojects { repositories { jcenter() - maven { url "https://dl.bintray.com/vmg/protogen" } } dependencies { From 1116ba4352f33eaf7d08680ec0abc78c164e2e00 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 3 Jul 2018 16:46:02 +0200 Subject: [PATCH 084/163] protogen: Wire up as part of the build process --- grpc/build.gradle | 2 ++ protogen/build.gradle | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/grpc/build.gradle b/grpc/build.gradle index 910260bb00..572cfdbd5d 100644 --- a/grpc/build.gradle +++ b/grpc/build.gradle @@ -41,3 +41,5 @@ idea { sourceDirs += file("${projectDir}/build/generated/source/proto/main/grpc"); } } + +compileJava.dependsOn(tasks.getByPath(":conductor-protogen:generate")) diff --git a/protogen/build.gradle b/protogen/build.gradle index 2355074ed7..813c481255 100644 --- a/protogen/build.gradle +++ b/protogen/build.gradle @@ -2,3 +2,9 @@ dependencies { compile project(':conductor-common') compile "com.github.vmg.protogen:protogen-codegen:${revProtoGen}" } + +task generate(type: JavaExec) { + classpath = sourceSets.main.runtimeClasspath + main = "com.netflix.conductor.protogen.ConductorProtoGen" + workingDir = rootDir +} From fc93516b7b980bb1c6a5fd2228de2aaada3cc730 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 3 Jul 2018 16:46:14 +0200 Subject: [PATCH 085/163] grpc: Export new field in WorkflowDef --- .../java/com/netflix/conductor/grpc/AbstractProtoMapper.java | 2 ++ grpc/src/main/proto/model/workflowdef.proto | 1 + 2 files changed, 3 insertions(+) diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 7b9a0b0d81..78eb08b95f 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -814,6 +814,7 @@ public WorkflowDefPb.WorkflowDef toProto(WorkflowDef from) { to.setFailureWorkflow( from.getFailureWorkflow() ); } to.setSchemaVersion( from.getSchemaVersion() ); + to.setRestartable( from.isRestartable() ); return to.build(); } @@ -831,6 +832,7 @@ public WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { to.setOutputParameters(outputParametersMap); to.setFailureWorkflow( from.getFailureWorkflow() ); to.setSchemaVersion( from.getSchemaVersion() ); + to.setRestartable( from.getRestartable() ); return to; } diff --git a/grpc/src/main/proto/model/workflowdef.proto b/grpc/src/main/proto/model/workflowdef.proto index 7ca7747295..9e5be4f627 100644 --- a/grpc/src/main/proto/model/workflowdef.proto +++ b/grpc/src/main/proto/model/workflowdef.proto @@ -17,4 +17,5 @@ message WorkflowDef { map output_parameters = 6; string failure_workflow = 7; int32 schema_version = 8; + bool restartable = 9; } From 3695d044413291e39b7c12faf6ae1ef6317691a9 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Wed, 4 Jul 2018 12:23:10 +0200 Subject: [PATCH 086/163] client/gogrpc: Update generated code --- client/gogrpc/Gopkg.lock | 2 +- client/gogrpc/Makefile | 15 +- client/gogrpc/conductor/client.go | 28 +- .../conductor/grpc/events/event_service.pb.go | 991 +++++++++ .../grpc/metadata/metadata_service.pb.go | 867 ++++++++ .../conductor/grpc/metadata_service.pb.go | 640 ------ client/gogrpc/conductor/grpc/search.pb.go | 166 -- .../gogrpc/conductor/grpc/search/search.pb.go | 113 + .../gogrpc/conductor/grpc/task_service.pb.go | 934 --------- .../conductor/grpc/tasks/task_service.pb.go | 1757 ++++++++++++++++ .../conductor/grpc/workflow_service.pb.go | 1171 ----------- .../grpc/workflows/workflow_service.pb.go | 1822 +++++++++++++++++ .../conductor/model/dynamicforkjointask.pb.go | 54 +- .../model/dynamicforkjointasklist.pb.go | 30 +- .../conductor/model/eventexecution.pb.go | 83 +- .../gogrpc/conductor/model/eventhandler.pb.go | 379 ++++ client/gogrpc/conductor/model/polldata.pb.go | 39 +- .../model/rerunworkflowrequest.pb.go | 58 +- .../conductor/model/skiptaskrequest.pb.go | 72 +- .../model/startworkflowrequest.pb.go | 60 +- .../conductor/model/subworkflowparams.pb.go | 38 +- client/gogrpc/conductor/model/task.pb.go | 235 ++- client/gogrpc/conductor/model/taskdef.pb.go | 102 +- .../gogrpc/conductor/model/taskexeclog.pb.go | 36 +- .../gogrpc/conductor/model/taskresult.pb.go | 118 +- .../gogrpc/conductor/model/tasksummary.pb.go | 66 +- client/gogrpc/conductor/model/workflow.pb.go | 106 +- .../gogrpc/conductor/model/workflowdef.pb.go | 75 +- .../conductor/model/workflowsummary.pb.go | 64 +- .../gogrpc/conductor/model/workflowtask.pb.go | 124 +- client/gogrpc/conductor/worker.go | 11 +- grpc/src/main/proto/grpc/event_service.proto | 2 +- .../main/proto/grpc/metadata_service.proto | 2 +- grpc/src/main/proto/grpc/search.proto | 2 +- grpc/src/main/proto/grpc/task_service.proto | 2 +- .../main/proto/grpc/workflow_service.proto | 2 +- 36 files changed, 6694 insertions(+), 3572 deletions(-) create mode 100644 client/gogrpc/conductor/grpc/events/event_service.pb.go create mode 100644 client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go delete mode 100644 client/gogrpc/conductor/grpc/metadata_service.pb.go delete mode 100644 client/gogrpc/conductor/grpc/search.pb.go create mode 100644 client/gogrpc/conductor/grpc/search/search.pb.go delete mode 100644 client/gogrpc/conductor/grpc/task_service.pb.go create mode 100644 client/gogrpc/conductor/grpc/tasks/task_service.pb.go delete mode 100644 client/gogrpc/conductor/grpc/workflow_service.pb.go create mode 100644 client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go create mode 100644 client/gogrpc/conductor/model/eventhandler.pb.go diff --git a/client/gogrpc/Gopkg.lock b/client/gogrpc/Gopkg.lock index fad0e8c642..4a1314672e 100644 --- a/client/gogrpc/Gopkg.lock +++ b/client/gogrpc/Gopkg.lock @@ -108,6 +108,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "688b3887547a40128cd0161bf9a82edbfb79e87c4a5966cc12dd06cb01f4fa52" + inputs-digest = "89f331d8a132d464f65394ad8511cdef46cdda0597be7b071e5d22cf65c35d2b" solver-name = "gps-cdcl" solver-version = 1 diff --git a/client/gogrpc/Makefile b/client/gogrpc/Makefile index 33c9ed0f88..805a841fa7 100644 --- a/client/gogrpc/Makefile +++ b/client/gogrpc/Makefile @@ -1,5 +1,16 @@ PROTO_SRC = ../../grpc/src/main/proto -proto: +SERVICES = \ + $(PROTO_SRC)/grpc/event_service.pb.go \ + $(PROTO_SRC)/grpc/metadata_service.pb.go \ + $(PROTO_SRC)/grpc/search.pb.go \ + $(PROTO_SRC)/grpc/task_service.pb.go \ + $(PROTO_SRC)/grpc/workflow_service.pb.go + +$(SERVICES): %.pb.go: %.proto + protoc -I $(PROTO_SRC) $< --go_out=plugins=grpc:$(GOPATH)/src + +models: protoc -I $(PROTO_SRC) $(PROTO_SRC)/model/*.proto --go_out=$(GOPATH)/src - protoc -I $(PROTO_SRC) $(PROTO_SRC)/grpc/*.proto --go_out=plugins=grpc:$(GOPATH)/src \ No newline at end of file + +proto: models $(SERVICES) \ No newline at end of file diff --git a/client/gogrpc/conductor/client.go b/client/gogrpc/conductor/client.go index 65a4f51c42..264de7a861 100644 --- a/client/gogrpc/conductor/client.go +++ b/client/gogrpc/conductor/client.go @@ -1,28 +1,30 @@ package conductor import ( - pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata" + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows" grpc "google.golang.org/grpc" ) // TasksClient is a Conductor client that exposes the Conductor // Tasks API. type TasksClient interface { - Tasks() pb.TaskServiceClient + Tasks() tasks.TaskServiceClient Shutdown() } // MetadataClient is a Conductor client that exposes the Conductor // Metadata API. type MetadataClient interface { - Metadata() pb.MetadataServiceClient + Metadata() metadata.MetadataServiceClient Shutdown() } // WorkflowsClient is a Conductor client that exposes the Conductor // Workflows API. type WorkflowsClient interface { - Workflows() pb.WorkflowServiceClient + Workflows() workflows.WorkflowServiceClient Shutdown() } @@ -30,9 +32,9 @@ type WorkflowsClient interface { // the different services it exposes. type Client struct { conn *grpc.ClientConn - tasks pb.TaskServiceClient - metadata pb.MetadataServiceClient - workflows pb.WorkflowServiceClient + tasks tasks.TaskServiceClient + metadata metadata.MetadataServiceClient + workflows workflows.WorkflowServiceClient } // NewClient returns a new Client with a GRPC connection to the given address, @@ -51,25 +53,25 @@ func (client *Client) Shutdown() { } // Tasks returns the Tasks service for this client -func (client *Client) Tasks() pb.TaskServiceClient { +func (client *Client) Tasks() tasks.TaskServiceClient { if client.tasks == nil { - client.tasks = pb.NewTaskServiceClient(client.conn) + client.tasks = tasks.NewTaskServiceClient(client.conn) } return client.tasks } // Metadata returns the Metadata service for this client -func (client *Client) Metadata() pb.MetadataServiceClient { +func (client *Client) Metadata() metadata.MetadataServiceClient { if client.metadata == nil { - client.metadata = pb.NewMetadataServiceClient(client.conn) + client.metadata = metadata.NewMetadataServiceClient(client.conn) } return client.metadata } // Workflows returns the workflows service for this client -func (client *Client) Workflows() pb.WorkflowServiceClient { +func (client *Client) Workflows() workflows.WorkflowServiceClient { if client.workflows == nil { - client.workflows = pb.NewWorkflowServiceClient(client.conn) + client.workflows = workflows.NewWorkflowServiceClient(client.conn) } return client.workflows } diff --git a/client/gogrpc/conductor/grpc/events/event_service.pb.go b/client/gogrpc/conductor/grpc/events/event_service.pb.go new file mode 100644 index 0000000000..3283eca7f0 --- /dev/null +++ b/client/gogrpc/conductor/grpc/events/event_service.pb.go @@ -0,0 +1,991 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/event_service.proto + +package events // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/events" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type AddEventHandlerRequest struct { + Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler" json:"handler,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddEventHandlerRequest) Reset() { *m = AddEventHandlerRequest{} } +func (m *AddEventHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*AddEventHandlerRequest) ProtoMessage() {} +func (*AddEventHandlerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{0} +} +func (m *AddEventHandlerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddEventHandlerRequest.Unmarshal(m, b) +} +func (m *AddEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddEventHandlerRequest.Marshal(b, m, deterministic) +} +func (dst *AddEventHandlerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddEventHandlerRequest.Merge(dst, src) +} +func (m *AddEventHandlerRequest) XXX_Size() int { + return xxx_messageInfo_AddEventHandlerRequest.Size(m) +} +func (m *AddEventHandlerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddEventHandlerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddEventHandlerRequest proto.InternalMessageInfo + +func (m *AddEventHandlerRequest) GetHandler() *model.EventHandler { + if m != nil { + return m.Handler + } + return nil +} + +type AddEventHandlerResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddEventHandlerResponse) Reset() { *m = AddEventHandlerResponse{} } +func (m *AddEventHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*AddEventHandlerResponse) ProtoMessage() {} +func (*AddEventHandlerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{1} +} +func (m *AddEventHandlerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddEventHandlerResponse.Unmarshal(m, b) +} +func (m *AddEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddEventHandlerResponse.Marshal(b, m, deterministic) +} +func (dst *AddEventHandlerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddEventHandlerResponse.Merge(dst, src) +} +func (m *AddEventHandlerResponse) XXX_Size() int { + return xxx_messageInfo_AddEventHandlerResponse.Size(m) +} +func (m *AddEventHandlerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AddEventHandlerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AddEventHandlerResponse proto.InternalMessageInfo + +type UpdateEventHandlerRequest struct { + Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler" json:"handler,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateEventHandlerRequest) Reset() { *m = UpdateEventHandlerRequest{} } +func (m *UpdateEventHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateEventHandlerRequest) ProtoMessage() {} +func (*UpdateEventHandlerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{2} +} +func (m *UpdateEventHandlerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateEventHandlerRequest.Unmarshal(m, b) +} +func (m *UpdateEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateEventHandlerRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateEventHandlerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateEventHandlerRequest.Merge(dst, src) +} +func (m *UpdateEventHandlerRequest) XXX_Size() int { + return xxx_messageInfo_UpdateEventHandlerRequest.Size(m) +} +func (m *UpdateEventHandlerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateEventHandlerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateEventHandlerRequest proto.InternalMessageInfo + +func (m *UpdateEventHandlerRequest) GetHandler() *model.EventHandler { + if m != nil { + return m.Handler + } + return nil +} + +type UpdateEventHandlerResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateEventHandlerResponse) Reset() { *m = UpdateEventHandlerResponse{} } +func (m *UpdateEventHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateEventHandlerResponse) ProtoMessage() {} +func (*UpdateEventHandlerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{3} +} +func (m *UpdateEventHandlerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateEventHandlerResponse.Unmarshal(m, b) +} +func (m *UpdateEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateEventHandlerResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateEventHandlerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateEventHandlerResponse.Merge(dst, src) +} +func (m *UpdateEventHandlerResponse) XXX_Size() int { + return xxx_messageInfo_UpdateEventHandlerResponse.Size(m) +} +func (m *UpdateEventHandlerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateEventHandlerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateEventHandlerResponse proto.InternalMessageInfo + +type RemoveEventHandlerRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveEventHandlerRequest) Reset() { *m = RemoveEventHandlerRequest{} } +func (m *RemoveEventHandlerRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveEventHandlerRequest) ProtoMessage() {} +func (*RemoveEventHandlerRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{4} +} +func (m *RemoveEventHandlerRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveEventHandlerRequest.Unmarshal(m, b) +} +func (m *RemoveEventHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveEventHandlerRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveEventHandlerRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveEventHandlerRequest.Merge(dst, src) +} +func (m *RemoveEventHandlerRequest) XXX_Size() int { + return xxx_messageInfo_RemoveEventHandlerRequest.Size(m) +} +func (m *RemoveEventHandlerRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveEventHandlerRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveEventHandlerRequest proto.InternalMessageInfo + +func (m *RemoveEventHandlerRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +type RemoveEventHandlerResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveEventHandlerResponse) Reset() { *m = RemoveEventHandlerResponse{} } +func (m *RemoveEventHandlerResponse) String() string { return proto.CompactTextString(m) } +func (*RemoveEventHandlerResponse) ProtoMessage() {} +func (*RemoveEventHandlerResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{5} +} +func (m *RemoveEventHandlerResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveEventHandlerResponse.Unmarshal(m, b) +} +func (m *RemoveEventHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveEventHandlerResponse.Marshal(b, m, deterministic) +} +func (dst *RemoveEventHandlerResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveEventHandlerResponse.Merge(dst, src) +} +func (m *RemoveEventHandlerResponse) XXX_Size() int { + return xxx_messageInfo_RemoveEventHandlerResponse.Size(m) +} +func (m *RemoveEventHandlerResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveEventHandlerResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveEventHandlerResponse proto.InternalMessageInfo + +type GetEventHandlersRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetEventHandlersRequest) Reset() { *m = GetEventHandlersRequest{} } +func (m *GetEventHandlersRequest) String() string { return proto.CompactTextString(m) } +func (*GetEventHandlersRequest) ProtoMessage() {} +func (*GetEventHandlersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{6} +} +func (m *GetEventHandlersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetEventHandlersRequest.Unmarshal(m, b) +} +func (m *GetEventHandlersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetEventHandlersRequest.Marshal(b, m, deterministic) +} +func (dst *GetEventHandlersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetEventHandlersRequest.Merge(dst, src) +} +func (m *GetEventHandlersRequest) XXX_Size() int { + return xxx_messageInfo_GetEventHandlersRequest.Size(m) +} +func (m *GetEventHandlersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetEventHandlersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetEventHandlersRequest proto.InternalMessageInfo + +type GetEventHandlersForEventRequest struct { + Event string `protobuf:"bytes,1,opt,name=event" json:"event,omitempty"` + ActiveOnly bool `protobuf:"varint,2,opt,name=active_only,json=activeOnly" json:"active_only,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetEventHandlersForEventRequest) Reset() { *m = GetEventHandlersForEventRequest{} } +func (m *GetEventHandlersForEventRequest) String() string { return proto.CompactTextString(m) } +func (*GetEventHandlersForEventRequest) ProtoMessage() {} +func (*GetEventHandlersForEventRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{7} +} +func (m *GetEventHandlersForEventRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetEventHandlersForEventRequest.Unmarshal(m, b) +} +func (m *GetEventHandlersForEventRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetEventHandlersForEventRequest.Marshal(b, m, deterministic) +} +func (dst *GetEventHandlersForEventRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetEventHandlersForEventRequest.Merge(dst, src) +} +func (m *GetEventHandlersForEventRequest) XXX_Size() int { + return xxx_messageInfo_GetEventHandlersForEventRequest.Size(m) +} +func (m *GetEventHandlersForEventRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetEventHandlersForEventRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetEventHandlersForEventRequest proto.InternalMessageInfo + +func (m *GetEventHandlersForEventRequest) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *GetEventHandlersForEventRequest) GetActiveOnly() bool { + if m != nil { + return m.ActiveOnly + } + return false +} + +type GetQueuesRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueuesRequest) Reset() { *m = GetQueuesRequest{} } +func (m *GetQueuesRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueuesRequest) ProtoMessage() {} +func (*GetQueuesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{8} +} +func (m *GetQueuesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueuesRequest.Unmarshal(m, b) +} +func (m *GetQueuesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueuesRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueuesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueuesRequest.Merge(dst, src) +} +func (m *GetQueuesRequest) XXX_Size() int { + return xxx_messageInfo_GetQueuesRequest.Size(m) +} +func (m *GetQueuesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueuesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueuesRequest proto.InternalMessageInfo + +type GetQueuesResponse struct { + EventToQueueUri map[string]string `protobuf:"bytes,1,rep,name=event_to_queue_uri,json=eventToQueueUri" json:"event_to_queue_uri,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueuesResponse) Reset() { *m = GetQueuesResponse{} } +func (m *GetQueuesResponse) String() string { return proto.CompactTextString(m) } +func (*GetQueuesResponse) ProtoMessage() {} +func (*GetQueuesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{9} +} +func (m *GetQueuesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueuesResponse.Unmarshal(m, b) +} +func (m *GetQueuesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueuesResponse.Marshal(b, m, deterministic) +} +func (dst *GetQueuesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueuesResponse.Merge(dst, src) +} +func (m *GetQueuesResponse) XXX_Size() int { + return xxx_messageInfo_GetQueuesResponse.Size(m) +} +func (m *GetQueuesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueuesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueuesResponse proto.InternalMessageInfo + +func (m *GetQueuesResponse) GetEventToQueueUri() map[string]string { + if m != nil { + return m.EventToQueueUri + } + return nil +} + +type GetQueueSizesRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueSizesRequest) Reset() { *m = GetQueueSizesRequest{} } +func (m *GetQueueSizesRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueSizesRequest) ProtoMessage() {} +func (*GetQueueSizesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{10} +} +func (m *GetQueueSizesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueSizesRequest.Unmarshal(m, b) +} +func (m *GetQueueSizesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueSizesRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueSizesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueSizesRequest.Merge(dst, src) +} +func (m *GetQueueSizesRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueSizesRequest.Size(m) +} +func (m *GetQueueSizesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueSizesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueSizesRequest proto.InternalMessageInfo + +type GetQueueSizesResponse struct { + EventToQueueInfo map[string]*GetQueueSizesResponse_QueueInfo `protobuf:"bytes,2,rep,name=event_to_queue_info,json=eventToQueueInfo" json:"event_to_queue_info,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueSizesResponse) Reset() { *m = GetQueueSizesResponse{} } +func (m *GetQueueSizesResponse) String() string { return proto.CompactTextString(m) } +func (*GetQueueSizesResponse) ProtoMessage() {} +func (*GetQueueSizesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{11} +} +func (m *GetQueueSizesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueSizesResponse.Unmarshal(m, b) +} +func (m *GetQueueSizesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueSizesResponse.Marshal(b, m, deterministic) +} +func (dst *GetQueueSizesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueSizesResponse.Merge(dst, src) +} +func (m *GetQueueSizesResponse) XXX_Size() int { + return xxx_messageInfo_GetQueueSizesResponse.Size(m) +} +func (m *GetQueueSizesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueSizesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueSizesResponse proto.InternalMessageInfo + +func (m *GetQueueSizesResponse) GetEventToQueueInfo() map[string]*GetQueueSizesResponse_QueueInfo { + if m != nil { + return m.EventToQueueInfo + } + return nil +} + +type GetQueueSizesResponse_QueueInfo struct { + QueueSizes map[string]int64 `protobuf:"bytes,1,rep,name=queue_sizes,json=queueSizes" json:"queue_sizes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueSizesResponse_QueueInfo) Reset() { *m = GetQueueSizesResponse_QueueInfo{} } +func (m *GetQueueSizesResponse_QueueInfo) String() string { return proto.CompactTextString(m) } +func (*GetQueueSizesResponse_QueueInfo) ProtoMessage() {} +func (*GetQueueSizesResponse_QueueInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{11, 0} +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Unmarshal(m, b) +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Marshal(b, m, deterministic) +} +func (dst *GetQueueSizesResponse_QueueInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Merge(dst, src) +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_Size() int { + return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Size(m) +} +func (m *GetQueueSizesResponse_QueueInfo) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueSizesResponse_QueueInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueSizesResponse_QueueInfo proto.InternalMessageInfo + +func (m *GetQueueSizesResponse_QueueInfo) GetQueueSizes() map[string]int64 { + if m != nil { + return m.QueueSizes + } + return nil +} + +type GetQueueProvidersRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueProvidersRequest) Reset() { *m = GetQueueProvidersRequest{} } +func (m *GetQueueProvidersRequest) String() string { return proto.CompactTextString(m) } +func (*GetQueueProvidersRequest) ProtoMessage() {} +func (*GetQueueProvidersRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{12} +} +func (m *GetQueueProvidersRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueProvidersRequest.Unmarshal(m, b) +} +func (m *GetQueueProvidersRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueProvidersRequest.Marshal(b, m, deterministic) +} +func (dst *GetQueueProvidersRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueProvidersRequest.Merge(dst, src) +} +func (m *GetQueueProvidersRequest) XXX_Size() int { + return xxx_messageInfo_GetQueueProvidersRequest.Size(m) +} +func (m *GetQueueProvidersRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueProvidersRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueProvidersRequest proto.InternalMessageInfo + +type GetQueueProvidersResponse struct { + Providers []string `protobuf:"bytes,1,rep,name=providers" json:"providers,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetQueueProvidersResponse) Reset() { *m = GetQueueProvidersResponse{} } +func (m *GetQueueProvidersResponse) String() string { return proto.CompactTextString(m) } +func (*GetQueueProvidersResponse) ProtoMessage() {} +func (*GetQueueProvidersResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_event_service_913a1fde08d4f277, []int{13} +} +func (m *GetQueueProvidersResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetQueueProvidersResponse.Unmarshal(m, b) +} +func (m *GetQueueProvidersResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetQueueProvidersResponse.Marshal(b, m, deterministic) +} +func (dst *GetQueueProvidersResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetQueueProvidersResponse.Merge(dst, src) +} +func (m *GetQueueProvidersResponse) XXX_Size() int { + return xxx_messageInfo_GetQueueProvidersResponse.Size(m) +} +func (m *GetQueueProvidersResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetQueueProvidersResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetQueueProvidersResponse proto.InternalMessageInfo + +func (m *GetQueueProvidersResponse) GetProviders() []string { + if m != nil { + return m.Providers + } + return nil +} + +func init() { + proto.RegisterType((*AddEventHandlerRequest)(nil), "conductor.grpc.events.AddEventHandlerRequest") + proto.RegisterType((*AddEventHandlerResponse)(nil), "conductor.grpc.events.AddEventHandlerResponse") + proto.RegisterType((*UpdateEventHandlerRequest)(nil), "conductor.grpc.events.UpdateEventHandlerRequest") + proto.RegisterType((*UpdateEventHandlerResponse)(nil), "conductor.grpc.events.UpdateEventHandlerResponse") + proto.RegisterType((*RemoveEventHandlerRequest)(nil), "conductor.grpc.events.RemoveEventHandlerRequest") + proto.RegisterType((*RemoveEventHandlerResponse)(nil), "conductor.grpc.events.RemoveEventHandlerResponse") + proto.RegisterType((*GetEventHandlersRequest)(nil), "conductor.grpc.events.GetEventHandlersRequest") + proto.RegisterType((*GetEventHandlersForEventRequest)(nil), "conductor.grpc.events.GetEventHandlersForEventRequest") + proto.RegisterType((*GetQueuesRequest)(nil), "conductor.grpc.events.GetQueuesRequest") + proto.RegisterType((*GetQueuesResponse)(nil), "conductor.grpc.events.GetQueuesResponse") + proto.RegisterMapType((map[string]string)(nil), "conductor.grpc.events.GetQueuesResponse.EventToQueueUriEntry") + proto.RegisterType((*GetQueueSizesRequest)(nil), "conductor.grpc.events.GetQueueSizesRequest") + proto.RegisterType((*GetQueueSizesResponse)(nil), "conductor.grpc.events.GetQueueSizesResponse") + proto.RegisterMapType((map[string]*GetQueueSizesResponse_QueueInfo)(nil), "conductor.grpc.events.GetQueueSizesResponse.EventToQueueInfoEntry") + proto.RegisterType((*GetQueueSizesResponse_QueueInfo)(nil), "conductor.grpc.events.GetQueueSizesResponse.QueueInfo") + proto.RegisterMapType((map[string]int64)(nil), "conductor.grpc.events.GetQueueSizesResponse.QueueInfo.QueueSizesEntry") + proto.RegisterType((*GetQueueProvidersRequest)(nil), "conductor.grpc.events.GetQueueProvidersRequest") + proto.RegisterType((*GetQueueProvidersResponse)(nil), "conductor.grpc.events.GetQueueProvidersResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// EventServiceClient is the client API for EventService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type EventServiceClient interface { + // POST / + AddEventHandler(ctx context.Context, in *AddEventHandlerRequest, opts ...grpc.CallOption) (*AddEventHandlerResponse, error) + // PUT / + UpdateEventHandler(ctx context.Context, in *UpdateEventHandlerRequest, opts ...grpc.CallOption) (*UpdateEventHandlerResponse, error) + // DELETE /{name} + RemoveEventHandler(ctx context.Context, in *RemoveEventHandlerRequest, opts ...grpc.CallOption) (*RemoveEventHandlerResponse, error) + // GET / + GetEventHandlers(ctx context.Context, in *GetEventHandlersRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersClient, error) + // GET /{name} + GetEventHandlersForEvent(ctx context.Context, in *GetEventHandlersForEventRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersForEventClient, error) + // GET /queues + GetQueues(ctx context.Context, in *GetQueuesRequest, opts ...grpc.CallOption) (*GetQueuesResponse, error) + GetQueueSizes(ctx context.Context, in *GetQueueSizesRequest, opts ...grpc.CallOption) (*GetQueueSizesResponse, error) + // GET /queues/providers + GetQueueProviders(ctx context.Context, in *GetQueueProvidersRequest, opts ...grpc.CallOption) (*GetQueueProvidersResponse, error) +} + +type eventServiceClient struct { + cc *grpc.ClientConn +} + +func NewEventServiceClient(cc *grpc.ClientConn) EventServiceClient { + return &eventServiceClient{cc} +} + +func (c *eventServiceClient) AddEventHandler(ctx context.Context, in *AddEventHandlerRequest, opts ...grpc.CallOption) (*AddEventHandlerResponse, error) { + out := new(AddEventHandlerResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/AddEventHandler", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) UpdateEventHandler(ctx context.Context, in *UpdateEventHandlerRequest, opts ...grpc.CallOption) (*UpdateEventHandlerResponse, error) { + out := new(UpdateEventHandlerResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/UpdateEventHandler", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) RemoveEventHandler(ctx context.Context, in *RemoveEventHandlerRequest, opts ...grpc.CallOption) (*RemoveEventHandlerResponse, error) { + out := new(RemoveEventHandlerResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/RemoveEventHandler", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) GetEventHandlers(ctx context.Context, in *GetEventHandlersRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersClient, error) { + stream, err := c.cc.NewStream(ctx, &_EventService_serviceDesc.Streams[0], "/conductor.grpc.events.EventService/GetEventHandlers", opts...) + if err != nil { + return nil, err + } + x := &eventServiceGetEventHandlersClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type EventService_GetEventHandlersClient interface { + Recv() (*model.EventHandler, error) + grpc.ClientStream +} + +type eventServiceGetEventHandlersClient struct { + grpc.ClientStream +} + +func (x *eventServiceGetEventHandlersClient) Recv() (*model.EventHandler, error) { + m := new(model.EventHandler) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *eventServiceClient) GetEventHandlersForEvent(ctx context.Context, in *GetEventHandlersForEventRequest, opts ...grpc.CallOption) (EventService_GetEventHandlersForEventClient, error) { + stream, err := c.cc.NewStream(ctx, &_EventService_serviceDesc.Streams[1], "/conductor.grpc.events.EventService/GetEventHandlersForEvent", opts...) + if err != nil { + return nil, err + } + x := &eventServiceGetEventHandlersForEventClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type EventService_GetEventHandlersForEventClient interface { + Recv() (*model.EventHandler, error) + grpc.ClientStream +} + +type eventServiceGetEventHandlersForEventClient struct { + grpc.ClientStream +} + +func (x *eventServiceGetEventHandlersForEventClient) Recv() (*model.EventHandler, error) { + m := new(model.EventHandler) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *eventServiceClient) GetQueues(ctx context.Context, in *GetQueuesRequest, opts ...grpc.CallOption) (*GetQueuesResponse, error) { + out := new(GetQueuesResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueues", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) GetQueueSizes(ctx context.Context, in *GetQueueSizesRequest, opts ...grpc.CallOption) (*GetQueueSizesResponse, error) { + out := new(GetQueueSizesResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueueSizes", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *eventServiceClient) GetQueueProviders(ctx context.Context, in *GetQueueProvidersRequest, opts ...grpc.CallOption) (*GetQueueProvidersResponse, error) { + out := new(GetQueueProvidersResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.events.EventService/GetQueueProviders", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// EventServiceServer is the server API for EventService service. +type EventServiceServer interface { + // POST / + AddEventHandler(context.Context, *AddEventHandlerRequest) (*AddEventHandlerResponse, error) + // PUT / + UpdateEventHandler(context.Context, *UpdateEventHandlerRequest) (*UpdateEventHandlerResponse, error) + // DELETE /{name} + RemoveEventHandler(context.Context, *RemoveEventHandlerRequest) (*RemoveEventHandlerResponse, error) + // GET / + GetEventHandlers(*GetEventHandlersRequest, EventService_GetEventHandlersServer) error + // GET /{name} + GetEventHandlersForEvent(*GetEventHandlersForEventRequest, EventService_GetEventHandlersForEventServer) error + // GET /queues + GetQueues(context.Context, *GetQueuesRequest) (*GetQueuesResponse, error) + GetQueueSizes(context.Context, *GetQueueSizesRequest) (*GetQueueSizesResponse, error) + // GET /queues/providers + GetQueueProviders(context.Context, *GetQueueProvidersRequest) (*GetQueueProvidersResponse, error) +} + +func RegisterEventServiceServer(s *grpc.Server, srv EventServiceServer) { + s.RegisterService(&_EventService_serviceDesc, srv) +} + +func _EventService_AddEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddEventHandlerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).AddEventHandler(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/AddEventHandler", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).AddEventHandler(ctx, req.(*AddEventHandlerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_UpdateEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateEventHandlerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).UpdateEventHandler(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/UpdateEventHandler", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).UpdateEventHandler(ctx, req.(*UpdateEventHandlerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_RemoveEventHandler_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveEventHandlerRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).RemoveEventHandler(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/RemoveEventHandler", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).RemoveEventHandler(ctx, req.(*RemoveEventHandlerRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_GetEventHandlers_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetEventHandlersRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(EventServiceServer).GetEventHandlers(m, &eventServiceGetEventHandlersServer{stream}) +} + +type EventService_GetEventHandlersServer interface { + Send(*model.EventHandler) error + grpc.ServerStream +} + +type eventServiceGetEventHandlersServer struct { + grpc.ServerStream +} + +func (x *eventServiceGetEventHandlersServer) Send(m *model.EventHandler) error { + return x.ServerStream.SendMsg(m) +} + +func _EventService_GetEventHandlersForEvent_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(GetEventHandlersForEventRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(EventServiceServer).GetEventHandlersForEvent(m, &eventServiceGetEventHandlersForEventServer{stream}) +} + +type EventService_GetEventHandlersForEventServer interface { + Send(*model.EventHandler) error + grpc.ServerStream +} + +type eventServiceGetEventHandlersForEventServer struct { + grpc.ServerStream +} + +func (x *eventServiceGetEventHandlersForEventServer) Send(m *model.EventHandler) error { + return x.ServerStream.SendMsg(m) +} + +func _EventService_GetQueues_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueuesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).GetQueues(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/GetQueues", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).GetQueues(ctx, req.(*GetQueuesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_GetQueueSizes_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueSizesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).GetQueueSizes(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/GetQueueSizes", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).GetQueueSizes(ctx, req.(*GetQueueSizesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _EventService_GetQueueProviders_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetQueueProvidersRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(EventServiceServer).GetQueueProviders(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.events.EventService/GetQueueProviders", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(EventServiceServer).GetQueueProviders(ctx, req.(*GetQueueProvidersRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _EventService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.events.EventService", + HandlerType: (*EventServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "AddEventHandler", + Handler: _EventService_AddEventHandler_Handler, + }, + { + MethodName: "UpdateEventHandler", + Handler: _EventService_UpdateEventHandler_Handler, + }, + { + MethodName: "RemoveEventHandler", + Handler: _EventService_RemoveEventHandler_Handler, + }, + { + MethodName: "GetQueues", + Handler: _EventService_GetQueues_Handler, + }, + { + MethodName: "GetQueueSizes", + Handler: _EventService_GetQueueSizes_Handler, + }, + { + MethodName: "GetQueueProviders", + Handler: _EventService_GetQueueProviders_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "GetEventHandlers", + Handler: _EventService_GetEventHandlers_Handler, + ServerStreams: true, + }, + { + StreamName: "GetEventHandlersForEvent", + Handler: _EventService_GetEventHandlersForEvent_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc/event_service.proto", +} + +func init() { + proto.RegisterFile("grpc/event_service.proto", fileDescriptor_event_service_913a1fde08d4f277) +} + +var fileDescriptor_event_service_913a1fde08d4f277 = []byte{ + // 687 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5d, 0x6f, 0xd3, 0x3c, + 0x18, 0x55, 0xd6, 0xf7, 0x65, 0xf4, 0x29, 0xb0, 0x61, 0xf6, 0x91, 0x5a, 0x43, 0x9b, 0x7a, 0x43, + 0x25, 0xc0, 0x19, 0x45, 0x1a, 0x1f, 0xd2, 0x24, 0xa8, 0xb4, 0x0d, 0x24, 0x24, 0xb6, 0x6c, 0x93, + 0x10, 0x17, 0x54, 0x5d, 0xe2, 0x75, 0x19, 0xa9, 0xdd, 0x26, 0x4e, 0x44, 0xe1, 0x6f, 0xf0, 0x4f, + 0xb8, 0xe6, 0x8a, 0x3f, 0x86, 0x62, 0xa7, 0x49, 0x9a, 0x26, 0xb4, 0x45, 0xdc, 0xd5, 0xcf, 0xc7, + 0x39, 0x3e, 0xb6, 0x9f, 0xd3, 0x80, 0xde, 0xf3, 0x06, 0x96, 0x41, 0x43, 0xca, 0x44, 0xc7, 0xa7, + 0x5e, 0xe8, 0x58, 0x94, 0x0c, 0x3c, 0x2e, 0x38, 0x5a, 0xb7, 0x38, 0xb3, 0x03, 0x4b, 0x70, 0x8f, + 0x44, 0x35, 0x44, 0xd6, 0xf8, 0x58, 0xef, 0x73, 0x9b, 0xba, 0xaa, 0xe3, 0xaa, 0xcb, 0x6c, 0x97, + 0x7a, 0xaa, 0xa1, 0x71, 0x02, 0x1b, 0xaf, 0x6d, 0xfb, 0x20, 0x4a, 0xbc, 0x51, 0x09, 0x93, 0x0e, + 0x03, 0xea, 0x0b, 0xf4, 0x0c, 0x96, 0xe3, 0x52, 0x5d, 0xdb, 0xd1, 0x9a, 0xb5, 0xd6, 0x7d, 0x92, + 0x82, 0xcb, 0x66, 0x32, 0xd1, 0x36, 0xae, 0x6e, 0xd4, 0x61, 0x73, 0x0a, 0xd2, 0x1f, 0x70, 0xe6, + 0xd3, 0xc6, 0x19, 0xd4, 0xcf, 0x07, 0x76, 0x57, 0xd0, 0x7f, 0x4a, 0xb8, 0x05, 0xb8, 0x08, 0x35, + 0xe6, 0x34, 0xa0, 0x6e, 0xd2, 0x3e, 0x0f, 0x0b, 0x39, 0x11, 0xfc, 0xc7, 0xba, 0x7d, 0x2a, 0x09, + 0xab, 0xa6, 0xfc, 0x1d, 0xc1, 0x15, 0x35, 0xc4, 0x70, 0x75, 0xd8, 0x3c, 0xa2, 0x22, 0x9b, 0xf2, + 0x63, 0xb0, 0xc6, 0x07, 0xd8, 0xce, 0xa7, 0x0e, 0xb9, 0x27, 0xd7, 0x63, 0xbe, 0x35, 0xf8, 0x5f, + 0x5e, 0x42, 0x4c, 0xa8, 0x16, 0x68, 0x1b, 0x6a, 0x5d, 0x4b, 0x38, 0x21, 0xed, 0x70, 0xe6, 0x8e, + 0xf4, 0xa5, 0x1d, 0xad, 0x79, 0xd3, 0x04, 0x15, 0x7a, 0xcf, 0xdc, 0x51, 0x03, 0xc1, 0xea, 0x11, + 0x15, 0x27, 0x01, 0x0d, 0x68, 0xc2, 0xf6, 0x4b, 0x83, 0xbb, 0x99, 0xa0, 0xda, 0x1e, 0xba, 0x06, + 0xa4, 0xde, 0x85, 0xe0, 0x9d, 0x61, 0x94, 0xea, 0x04, 0x9e, 0xa3, 0x6b, 0x3b, 0x95, 0x66, 0xad, + 0xb5, 0x4f, 0x0a, 0x5f, 0x07, 0x99, 0x42, 0x51, 0xe7, 0x7c, 0xc6, 0x65, 0xf4, 0xdc, 0x73, 0x0e, + 0x98, 0xf0, 0x46, 0xe6, 0x0a, 0x9d, 0x8c, 0xe2, 0x36, 0xac, 0x15, 0x15, 0xa2, 0x55, 0xa8, 0x7c, + 0xa6, 0xa3, 0x58, 0x62, 0xf4, 0x33, 0x92, 0x1d, 0x76, 0xdd, 0x80, 0x4a, 0x69, 0x55, 0x53, 0x2d, + 0x5e, 0x2e, 0x3d, 0xd7, 0x1a, 0x1b, 0xb0, 0x36, 0xa6, 0x3f, 0x75, 0xbe, 0xa6, 0xea, 0x7e, 0x56, + 0x60, 0x3d, 0x97, 0x88, 0x15, 0x0e, 0xe1, 0x5e, 0x4e, 0xa1, 0xc3, 0x2e, 0xb9, 0xbe, 0x24, 0x25, + 0xb6, 0x67, 0x48, 0x9c, 0x80, 0x9a, 0x90, 0xf9, 0x96, 0x5d, 0x72, 0xa5, 0x73, 0x95, 0xe6, 0xc2, + 0xf8, 0x87, 0x06, 0xd5, 0x64, 0x85, 0x7a, 0x50, 0x53, 0xbc, 0x7e, 0x04, 0x16, 0x9f, 0xed, 0xe1, + 0x42, 0xc4, 0x09, 0x18, 0x49, 0x93, 0x8a, 0x1c, 0x86, 0x49, 0x00, 0xef, 0xc3, 0x4a, 0x2e, 0x3d, + 0xeb, 0x68, 0x2b, 0x99, 0xa3, 0xc5, 0xdf, 0x60, 0xbd, 0x50, 0x60, 0x01, 0xc8, 0xbb, 0x2c, 0x48, + 0xad, 0xb5, 0xf7, 0x77, 0x62, 0xb2, 0xf7, 0x8a, 0x41, 0x1f, 0x57, 0x1f, 0x7b, 0x3c, 0x74, 0xec, + 0xcc, 0x9c, 0xbc, 0x80, 0x7a, 0x41, 0x2e, 0xbe, 0xde, 0x2d, 0xa8, 0x0e, 0xc6, 0x41, 0x79, 0xb6, + 0x55, 0x33, 0x0d, 0xb4, 0xbe, 0x2f, 0xc3, 0x2d, 0x29, 0xea, 0x54, 0xd9, 0x1e, 0x1a, 0xc0, 0x4a, + 0xce, 0x6c, 0xd0, 0xe3, 0x92, 0xdd, 0x17, 0xfb, 0x1c, 0x26, 0xf3, 0x96, 0xc7, 0x1b, 0x1c, 0x01, + 0x9a, 0x76, 0x1b, 0xb4, 0x5b, 0x82, 0x52, 0x6a, 0x77, 0xf8, 0xc9, 0x02, 0x1d, 0x29, 0xf5, 0xb4, + 0x33, 0x95, 0x52, 0x97, 0xba, 0x5e, 0x29, 0x75, 0xb9, 0xed, 0x21, 0x4b, 0x3a, 0xd0, 0x84, 0xb7, + 0x21, 0x52, 0xfe, 0x4c, 0x8a, 0xfc, 0x11, 0xff, 0xd9, 0xcf, 0x77, 0x35, 0xe4, 0xcb, 0x47, 0x53, + 0x68, 0xa0, 0x68, 0x6f, 0x4e, 0xb2, 0x9c, 0xe3, 0xce, 0x26, 0xfd, 0x04, 0xd5, 0xc4, 0x00, 0xd1, + 0x83, 0xd9, 0x16, 0xa9, 0x60, 0x9b, 0xf3, 0x7a, 0x29, 0xba, 0x86, 0xdb, 0x13, 0x73, 0x83, 0x1e, + 0xce, 0x37, 0x5d, 0x8a, 0xe7, 0xd1, 0x22, 0xa3, 0x88, 0xc2, 0xf4, 0x2f, 0x21, 0x99, 0x2c, 0x64, + 0xcc, 0x80, 0xc8, 0xcf, 0x27, 0xde, 0x9d, 0xbf, 0x41, 0xf1, 0xb6, 0x19, 0x60, 0x8b, 0xf7, 0x09, + 0xa3, 0xe2, 0xd2, 0x75, 0xbe, 0xe4, 0xda, 0xdb, 0x77, 0xb2, 0x13, 0x7b, 0x7c, 0xf1, 0xf1, 0x55, + 0xcf, 0x11, 0x57, 0xc1, 0x05, 0xb1, 0x78, 0xdf, 0x88, 0x5b, 0x8c, 0xa4, 0xc5, 0xb0, 0x5c, 0x87, + 0x32, 0x61, 0xf4, 0xb8, 0xfc, 0xc8, 0x49, 0xe3, 0xe9, 0x37, 0x8f, 0x7f, 0x71, 0x43, 0xde, 0xe4, + 0xd3, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x65, 0xa5, 0x44, 0x1b, 0x09, 0x09, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go b/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go new file mode 100644 index 0000000000..95d4c3a980 --- /dev/null +++ b/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go @@ -0,0 +1,867 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/metadata_service.proto + +package metadata // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type CreateWorkflowRequest struct { + Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWorkflowRequest) Reset() { *m = CreateWorkflowRequest{} } +func (m *CreateWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*CreateWorkflowRequest) ProtoMessage() {} +func (*CreateWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{0} +} +func (m *CreateWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWorkflowRequest.Unmarshal(m, b) +} +func (m *CreateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *CreateWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWorkflowRequest.Merge(dst, src) +} +func (m *CreateWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_CreateWorkflowRequest.Size(m) +} +func (m *CreateWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWorkflowRequest proto.InternalMessageInfo + +func (m *CreateWorkflowRequest) GetWorkflow() *model.WorkflowDef { + if m != nil { + return m.Workflow + } + return nil +} + +type CreateWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateWorkflowResponse) Reset() { *m = CreateWorkflowResponse{} } +func (m *CreateWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*CreateWorkflowResponse) ProtoMessage() {} +func (*CreateWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{1} +} +func (m *CreateWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateWorkflowResponse.Unmarshal(m, b) +} +func (m *CreateWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *CreateWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateWorkflowResponse.Merge(dst, src) +} +func (m *CreateWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_CreateWorkflowResponse.Size(m) +} +func (m *CreateWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateWorkflowResponse proto.InternalMessageInfo + +type UpdateWorkflowsRequest struct { + Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowsRequest) Reset() { *m = UpdateWorkflowsRequest{} } +func (m *UpdateWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowsRequest) ProtoMessage() {} +func (*UpdateWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{2} +} +func (m *UpdateWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowsRequest.Unmarshal(m, b) +} +func (m *UpdateWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowsRequest.Merge(dst, src) +} +func (m *UpdateWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowsRequest.Size(m) +} +func (m *UpdateWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowsRequest proto.InternalMessageInfo + +func (m *UpdateWorkflowsRequest) GetDefs() []*model.WorkflowDef { + if m != nil { + return m.Defs + } + return nil +} + +type UpdateWorkflowsResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateWorkflowsResponse) Reset() { *m = UpdateWorkflowsResponse{} } +func (m *UpdateWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateWorkflowsResponse) ProtoMessage() {} +func (*UpdateWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{3} +} +func (m *UpdateWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateWorkflowsResponse.Unmarshal(m, b) +} +func (m *UpdateWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateWorkflowsResponse.Merge(dst, src) +} +func (m *UpdateWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_UpdateWorkflowsResponse.Size(m) +} +func (m *UpdateWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateWorkflowsResponse proto.InternalMessageInfo + +type GetWorkflowRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowRequest) Reset() { *m = GetWorkflowRequest{} } +func (m *GetWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowRequest) ProtoMessage() {} +func (*GetWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{4} +} +func (m *GetWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowRequest.Unmarshal(m, b) +} +func (m *GetWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowRequest.Merge(dst, src) +} +func (m *GetWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowRequest.Size(m) +} +func (m *GetWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowRequest proto.InternalMessageInfo + +func (m *GetWorkflowRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +type GetWorkflowResponse struct { + Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowResponse) Reset() { *m = GetWorkflowResponse{} } +func (m *GetWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowResponse) ProtoMessage() {} +func (*GetWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{5} +} +func (m *GetWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowResponse.Unmarshal(m, b) +} +func (m *GetWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowResponse.Merge(dst, src) +} +func (m *GetWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowResponse.Size(m) +} +func (m *GetWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowResponse proto.InternalMessageInfo + +func (m *GetWorkflowResponse) GetWorkflow() *model.WorkflowDef { + if m != nil { + return m.Workflow + } + return nil +} + +type CreateTasksRequest struct { + Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTasksRequest) Reset() { *m = CreateTasksRequest{} } +func (m *CreateTasksRequest) String() string { return proto.CompactTextString(m) } +func (*CreateTasksRequest) ProtoMessage() {} +func (*CreateTasksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{6} +} +func (m *CreateTasksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTasksRequest.Unmarshal(m, b) +} +func (m *CreateTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTasksRequest.Marshal(b, m, deterministic) +} +func (dst *CreateTasksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTasksRequest.Merge(dst, src) +} +func (m *CreateTasksRequest) XXX_Size() int { + return xxx_messageInfo_CreateTasksRequest.Size(m) +} +func (m *CreateTasksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTasksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTasksRequest proto.InternalMessageInfo + +func (m *CreateTasksRequest) GetDefs() []*model.TaskDef { + if m != nil { + return m.Defs + } + return nil +} + +type CreateTasksResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CreateTasksResponse) Reset() { *m = CreateTasksResponse{} } +func (m *CreateTasksResponse) String() string { return proto.CompactTextString(m) } +func (*CreateTasksResponse) ProtoMessage() {} +func (*CreateTasksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{7} +} +func (m *CreateTasksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_CreateTasksResponse.Unmarshal(m, b) +} +func (m *CreateTasksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_CreateTasksResponse.Marshal(b, m, deterministic) +} +func (dst *CreateTasksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_CreateTasksResponse.Merge(dst, src) +} +func (m *CreateTasksResponse) XXX_Size() int { + return xxx_messageInfo_CreateTasksResponse.Size(m) +} +func (m *CreateTasksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_CreateTasksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_CreateTasksResponse proto.InternalMessageInfo + +type UpdateTaskRequest struct { + Task *model.TaskDef `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } +func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskRequest) ProtoMessage() {} +func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{8} +} +func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) +} +func (m *UpdateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskRequest.Merge(dst, src) +} +func (m *UpdateTaskRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTaskRequest.Size(m) +} +func (m *UpdateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskRequest proto.InternalMessageInfo + +func (m *UpdateTaskRequest) GetTask() *model.TaskDef { + if m != nil { + return m.Task + } + return nil +} + +type UpdateTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } +func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskResponse) ProtoMessage() {} +func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{9} +} +func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) +} +func (m *UpdateTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskResponse.Merge(dst, src) +} +func (m *UpdateTaskResponse) XXX_Size() int { + return xxx_messageInfo_UpdateTaskResponse.Size(m) +} +func (m *UpdateTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo + +type GetTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{10} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +type GetTaskResponse struct { + Task *model.TaskDef `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } +func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskResponse) ProtoMessage() {} +func (*GetTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{11} +} +func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) +} +func (m *GetTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskResponse.Marshal(b, m, deterministic) +} +func (dst *GetTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskResponse.Merge(dst, src) +} +func (m *GetTaskResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskResponse.Size(m) +} +func (m *GetTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskResponse proto.InternalMessageInfo + +func (m *GetTaskResponse) GetTask() *model.TaskDef { + if m != nil { + return m.Task + } + return nil +} + +type DeleteTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } +func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskRequest) ProtoMessage() {} +func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{12} +} +func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) +} +func (m *DeleteTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskRequest.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskRequest.Merge(dst, src) +} +func (m *DeleteTaskRequest) XXX_Size() int { + return xxx_messageInfo_DeleteTaskRequest.Size(m) +} +func (m *DeleteTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskRequest proto.InternalMessageInfo + +func (m *DeleteTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +type DeleteTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DeleteTaskResponse) Reset() { *m = DeleteTaskResponse{} } +func (m *DeleteTaskResponse) String() string { return proto.CompactTextString(m) } +func (*DeleteTaskResponse) ProtoMessage() {} +func (*DeleteTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{13} +} +func (m *DeleteTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DeleteTaskResponse.Unmarshal(m, b) +} +func (m *DeleteTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DeleteTaskResponse.Marshal(b, m, deterministic) +} +func (dst *DeleteTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DeleteTaskResponse.Merge(dst, src) +} +func (m *DeleteTaskResponse) XXX_Size() int { + return xxx_messageInfo_DeleteTaskResponse.Size(m) +} +func (m *DeleteTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DeleteTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DeleteTaskResponse proto.InternalMessageInfo + +func init() { + proto.RegisterType((*CreateWorkflowRequest)(nil), "conductor.grpc.metadata.CreateWorkflowRequest") + proto.RegisterType((*CreateWorkflowResponse)(nil), "conductor.grpc.metadata.CreateWorkflowResponse") + proto.RegisterType((*UpdateWorkflowsRequest)(nil), "conductor.grpc.metadata.UpdateWorkflowsRequest") + proto.RegisterType((*UpdateWorkflowsResponse)(nil), "conductor.grpc.metadata.UpdateWorkflowsResponse") + proto.RegisterType((*GetWorkflowRequest)(nil), "conductor.grpc.metadata.GetWorkflowRequest") + proto.RegisterType((*GetWorkflowResponse)(nil), "conductor.grpc.metadata.GetWorkflowResponse") + proto.RegisterType((*CreateTasksRequest)(nil), "conductor.grpc.metadata.CreateTasksRequest") + proto.RegisterType((*CreateTasksResponse)(nil), "conductor.grpc.metadata.CreateTasksResponse") + proto.RegisterType((*UpdateTaskRequest)(nil), "conductor.grpc.metadata.UpdateTaskRequest") + proto.RegisterType((*UpdateTaskResponse)(nil), "conductor.grpc.metadata.UpdateTaskResponse") + proto.RegisterType((*GetTaskRequest)(nil), "conductor.grpc.metadata.GetTaskRequest") + proto.RegisterType((*GetTaskResponse)(nil), "conductor.grpc.metadata.GetTaskResponse") + proto.RegisterType((*DeleteTaskRequest)(nil), "conductor.grpc.metadata.DeleteTaskRequest") + proto.RegisterType((*DeleteTaskResponse)(nil), "conductor.grpc.metadata.DeleteTaskResponse") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// MetadataServiceClient is the client API for MetadataService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type MetadataServiceClient interface { + // POST /workflow + CreateWorkflow(ctx context.Context, in *CreateWorkflowRequest, opts ...grpc.CallOption) (*CreateWorkflowResponse, error) + // PUT /workflow + UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*UpdateWorkflowsResponse, error) + // GET /workflow/{name} + GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*GetWorkflowResponse, error) + // POST /taskdefs + CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*CreateTasksResponse, error) + // PUT /taskdefs + UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) + // GET /taskdefs/{tasktype} + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) + // DELETE /taskdefs/{tasktype} + DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*DeleteTaskResponse, error) +} + +type metadataServiceClient struct { + cc *grpc.ClientConn +} + +func NewMetadataServiceClient(cc *grpc.ClientConn) MetadataServiceClient { + return &metadataServiceClient{cc} +} + +func (c *metadataServiceClient) CreateWorkflow(ctx context.Context, in *CreateWorkflowRequest, opts ...grpc.CallOption) (*CreateWorkflowResponse, error) { + out := new(CreateWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/CreateWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*UpdateWorkflowsResponse, error) { + out := new(UpdateWorkflowsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/UpdateWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*GetWorkflowResponse, error) { + out := new(GetWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/GetWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*CreateTasksResponse, error) { + out := new(CreateTasksResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/CreateTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) { + out := new(UpdateTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/UpdateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) { + out := new(GetTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *metadataServiceClient) DeleteTask(ctx context.Context, in *DeleteTaskRequest, opts ...grpc.CallOption) (*DeleteTaskResponse, error) { + out := new(DeleteTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.metadata.MetadataService/DeleteTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// MetadataServiceServer is the server API for MetadataService service. +type MetadataServiceServer interface { + // POST /workflow + CreateWorkflow(context.Context, *CreateWorkflowRequest) (*CreateWorkflowResponse, error) + // PUT /workflow + UpdateWorkflows(context.Context, *UpdateWorkflowsRequest) (*UpdateWorkflowsResponse, error) + // GET /workflow/{name} + GetWorkflow(context.Context, *GetWorkflowRequest) (*GetWorkflowResponse, error) + // POST /taskdefs + CreateTasks(context.Context, *CreateTasksRequest) (*CreateTasksResponse, error) + // PUT /taskdefs + UpdateTask(context.Context, *UpdateTaskRequest) (*UpdateTaskResponse, error) + // GET /taskdefs/{tasktype} + GetTask(context.Context, *GetTaskRequest) (*GetTaskResponse, error) + // DELETE /taskdefs/{tasktype} + DeleteTask(context.Context, *DeleteTaskRequest) (*DeleteTaskResponse, error) +} + +func RegisterMetadataServiceServer(s *grpc.Server, srv MetadataServiceServer) { + s.RegisterService(&_MetadataService_serviceDesc, srv) +} + +func _MetadataService_CreateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).CreateWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/CreateWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).CreateWorkflow(ctx, req.(*CreateWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_UpdateWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).UpdateWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/UpdateWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).UpdateWorkflows(ctx, req.(*UpdateWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).GetWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/GetWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).GetWorkflow(ctx, req.(*GetWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_CreateTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(CreateTasksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).CreateTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/CreateTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).CreateTasks(ctx, req.(*CreateTasksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/UpdateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _MetadataService_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DeleteTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(MetadataServiceServer).DeleteTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.metadata.MetadataService/DeleteTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(MetadataServiceServer).DeleteTask(ctx, req.(*DeleteTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _MetadataService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.metadata.MetadataService", + HandlerType: (*MetadataServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "CreateWorkflow", + Handler: _MetadataService_CreateWorkflow_Handler, + }, + { + MethodName: "UpdateWorkflows", + Handler: _MetadataService_UpdateWorkflows_Handler, + }, + { + MethodName: "GetWorkflow", + Handler: _MetadataService_GetWorkflow_Handler, + }, + { + MethodName: "CreateTasks", + Handler: _MetadataService_CreateTasks_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _MetadataService_UpdateTask_Handler, + }, + { + MethodName: "GetTask", + Handler: _MetadataService_GetTask_Handler, + }, + { + MethodName: "DeleteTask", + Handler: _MetadataService_DeleteTask_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/metadata_service.proto", +} + +func init() { + proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_4778cc9d199e5aef) +} + +var fileDescriptor_metadata_service_4778cc9d199e5aef = []byte{ + // 526 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdf, 0x6b, 0xd3, 0x50, + 0x18, 0xa5, 0xba, 0xb9, 0xed, 0x1b, 0xac, 0xf4, 0x76, 0x5b, 0x63, 0xe6, 0x43, 0xc9, 0x8b, 0xc5, + 0xcd, 0x9b, 0x32, 0x5f, 0x7c, 0x13, 0xe3, 0x60, 0x20, 0x88, 0x5a, 0x27, 0x82, 0x08, 0x23, 0x4d, + 0xbe, 0x74, 0xa1, 0x49, 0x6e, 0x96, 0x7b, 0xbb, 0xd9, 0x7f, 0xdd, 0x27, 0x49, 0x72, 0xf3, 0xbb, + 0x4d, 0x2b, 0xbe, 0xb5, 0xf7, 0x9e, 0xef, 0x9c, 0x9c, 0x8f, 0x73, 0xb8, 0x70, 0x36, 0x8b, 0x42, + 0x4b, 0xf7, 0x51, 0x98, 0xb6, 0x29, 0xcc, 0x5b, 0x8e, 0xd1, 0x83, 0x6b, 0x21, 0x0d, 0x23, 0x26, + 0x18, 0x19, 0x58, 0x2c, 0xb0, 0x17, 0x96, 0x60, 0x11, 0x8d, 0x61, 0x34, 0x83, 0xa9, 0x7d, 0x9f, + 0xd9, 0xe8, 0xe9, 0xc2, 0xe4, 0x73, 0x1b, 0x9d, 0x14, 0xad, 0x0e, 0xd2, 0xc3, 0x47, 0x16, 0xcd, + 0x1d, 0x8f, 0x3d, 0xe6, 0x17, 0xda, 0x57, 0x38, 0xf9, 0x10, 0xa1, 0x29, 0xf0, 0x87, 0xbc, 0x9a, + 0xe0, 0xfd, 0x02, 0xb9, 0x20, 0x6f, 0x61, 0x3f, 0x43, 0x2b, 0x9d, 0x61, 0x67, 0x74, 0x78, 0xf9, + 0x82, 0x16, 0x92, 0xc9, 0x30, 0xcd, 0x66, 0xae, 0xd0, 0x99, 0xe4, 0x68, 0x4d, 0x81, 0xd3, 0x3a, + 0x25, 0x0f, 0x59, 0xc0, 0x51, 0xfb, 0x08, 0xa7, 0xdf, 0x43, 0xbb, 0x74, 0xc3, 0x33, 0xb5, 0x31, + 0xec, 0xd8, 0xe8, 0x70, 0xa5, 0x33, 0x7c, 0xba, 0x51, 0x29, 0x41, 0x6a, 0xcf, 0x61, 0xd0, 0xe0, + 0x92, 0x32, 0x06, 0x90, 0x6b, 0x14, 0x75, 0x43, 0x04, 0x76, 0x02, 0xd3, 0xc7, 0xc4, 0xcc, 0xc1, + 0x24, 0xf9, 0x4d, 0x14, 0xd8, 0x7b, 0xc0, 0x88, 0xbb, 0x2c, 0x50, 0x9e, 0x0c, 0x3b, 0xa3, 0xdd, + 0x49, 0xf6, 0x57, 0xfb, 0x0c, 0xfd, 0x0a, 0x47, 0x4a, 0xfd, 0x1f, 0x5b, 0x31, 0x80, 0xa4, 0x5b, + 0xb9, 0x31, 0xf9, 0x3c, 0xf7, 0x7d, 0x51, 0xf1, 0xad, 0x34, 0xb8, 0x62, 0x70, 0xe1, 0xf9, 0x04, + 0xfa, 0x15, 0x0e, 0xe9, 0xf7, 0x3d, 0xf4, 0xd2, 0x55, 0xc4, 0xc7, 0x25, 0xe6, 0x38, 0x02, 0xf2, + 0x2b, 0x5b, 0x98, 0x63, 0x94, 0x76, 0x0c, 0xa4, 0x4c, 0x21, 0x89, 0x5f, 0xc3, 0xd1, 0x35, 0x8a, + 0x32, 0xeb, 0x19, 0x1c, 0xc4, 0xf8, 0x5b, 0xb1, 0x0c, 0xb3, 0x4d, 0xee, 0xc7, 0x07, 0x37, 0xcb, + 0x10, 0xb5, 0x77, 0xd0, 0xcd, 0xe1, 0x72, 0x5f, 0xff, 0xf6, 0x15, 0x63, 0xe8, 0x5d, 0xa1, 0x87, + 0x55, 0x23, 0xad, 0x92, 0xc7, 0x40, 0xca, 0x13, 0xa9, 0xea, 0xe5, 0x9f, 0x5d, 0xe8, 0x7e, 0x92, + 0x7d, 0xf8, 0x96, 0xb6, 0x86, 0xdc, 0xc3, 0x51, 0x35, 0x95, 0x84, 0xd2, 0x35, 0x15, 0xa2, 0x2b, + 0x1b, 0xa1, 0xea, 0x5b, 0xe3, 0xa5, 0x79, 0x01, 0xdd, 0x5a, 0x44, 0xc9, 0x7a, 0x8e, 0xd5, 0xc5, + 0x50, 0xc7, 0xdb, 0x0f, 0x48, 0xd5, 0x3b, 0x38, 0x2c, 0x25, 0x97, 0x9c, 0xaf, 0x25, 0x68, 0x76, + 0x44, 0xbd, 0xd8, 0x0e, 0x5c, 0x28, 0x95, 0xe2, 0xd8, 0xa2, 0xd4, 0x0c, 0x7e, 0x8b, 0xd2, 0x8a, + 0x84, 0x13, 0x04, 0x28, 0xe2, 0x49, 0x5e, 0x6d, 0xd8, 0x49, 0x29, 0x3d, 0xea, 0xf9, 0x56, 0x58, + 0x29, 0xf3, 0x0b, 0xf6, 0x64, 0x80, 0xc9, 0xcb, 0xb6, 0x4d, 0x94, 0x05, 0x46, 0x9b, 0x81, 0x85, + 0x89, 0x22, 0xab, 0x2d, 0x26, 0x1a, 0x15, 0x68, 0x31, 0xd1, 0x0c, 0xbf, 0xc1, 0x41, 0xb5, 0x98, + 0x4f, 0x03, 0x14, 0x8e, 0xe7, 0xfe, 0xae, 0x4d, 0x1a, 0xbd, 0x5a, 0x2f, 0xbe, 0x4c, 0x7f, 0x1a, + 0x33, 0x57, 0xdc, 0x2d, 0xa6, 0xd4, 0x62, 0xbe, 0x2e, 0xa7, 0xf4, 0x7c, 0x4a, 0xb7, 0x3c, 0x17, + 0x03, 0xa1, 0xcf, 0x58, 0xf2, 0x18, 0x15, 0xe7, 0x95, 0xb7, 0x69, 0xfa, 0x2c, 0xa9, 0xf3, 0x9b, + 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x5f, 0x81, 0xa1, 0x07, 0xb3, 0x06, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/metadata_service.pb.go b/client/gogrpc/conductor/grpc/metadata_service.pb.go deleted file mode 100644 index 5782159370..0000000000 --- a/client/gogrpc/conductor/grpc/metadata_service.pb.go +++ /dev/null @@ -1,640 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/metadata_service.proto - -package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import empty "github.com/golang/protobuf/ptypes/empty" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type UpdateWorkflowsRequest struct { - Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *UpdateWorkflowsRequest) Reset() { *m = UpdateWorkflowsRequest{} } -func (m *UpdateWorkflowsRequest) String() string { return proto.CompactTextString(m) } -func (*UpdateWorkflowsRequest) ProtoMessage() {} -func (*UpdateWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{0} -} -func (m *UpdateWorkflowsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_UpdateWorkflowsRequest.Unmarshal(m, b) -} -func (m *UpdateWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_UpdateWorkflowsRequest.Marshal(b, m, deterministic) -} -func (dst *UpdateWorkflowsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_UpdateWorkflowsRequest.Merge(dst, src) -} -func (m *UpdateWorkflowsRequest) XXX_Size() int { - return xxx_messageInfo_UpdateWorkflowsRequest.Size(m) -} -func (m *UpdateWorkflowsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_UpdateWorkflowsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_UpdateWorkflowsRequest proto.InternalMessageInfo - -func (m *UpdateWorkflowsRequest) GetDefs() []*model.WorkflowDef { - if m != nil { - return m.Defs - } - return nil -} - -type CreateTasksRequest struct { - Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *CreateTasksRequest) Reset() { *m = CreateTasksRequest{} } -func (m *CreateTasksRequest) String() string { return proto.CompactTextString(m) } -func (*CreateTasksRequest) ProtoMessage() {} -func (*CreateTasksRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{1} -} -func (m *CreateTasksRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_CreateTasksRequest.Unmarshal(m, b) -} -func (m *CreateTasksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_CreateTasksRequest.Marshal(b, m, deterministic) -} -func (dst *CreateTasksRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_CreateTasksRequest.Merge(dst, src) -} -func (m *CreateTasksRequest) XXX_Size() int { - return xxx_messageInfo_CreateTasksRequest.Size(m) -} -func (m *CreateTasksRequest) XXX_DiscardUnknown() { - xxx_messageInfo_CreateTasksRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_CreateTasksRequest proto.InternalMessageInfo - -func (m *CreateTasksRequest) GetDefs() []*model.TaskDef { - if m != nil { - return m.Defs - } - return nil -} - -type GetWorkflowRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowRequest) Reset() { *m = GetWorkflowRequest{} } -func (m *GetWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowRequest) ProtoMessage() {} -func (*GetWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{2} -} -func (m *GetWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowRequest.Unmarshal(m, b) -} -func (m *GetWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowRequest.Merge(dst, src) -} -func (m *GetWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_GetWorkflowRequest.Size(m) -} -func (m *GetWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowRequest proto.InternalMessageInfo - -func (m *GetWorkflowRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetWorkflowRequest) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -type GetTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } -func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } -func (*GetTaskRequest) ProtoMessage() {} -func (*GetTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_0ab7c38dd4dbd338, []int{3} -} -func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) -} -func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) -} -func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetTaskRequest.Merge(dst, src) -} -func (m *GetTaskRequest) XXX_Size() int { - return xxx_messageInfo_GetTaskRequest.Size(m) -} -func (m *GetTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo - -func (m *GetTaskRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func init() { - proto.RegisterType((*UpdateWorkflowsRequest)(nil), "com.netflix.conductor.grpc.UpdateWorkflowsRequest") - proto.RegisterType((*CreateTasksRequest)(nil), "com.netflix.conductor.grpc.CreateTasksRequest") - proto.RegisterType((*GetWorkflowRequest)(nil), "com.netflix.conductor.grpc.GetWorkflowRequest") - proto.RegisterType((*GetTaskRequest)(nil), "com.netflix.conductor.grpc.GetTaskRequest") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// MetadataServiceClient is the client API for MetadataService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type MetadataServiceClient interface { - // POST /workflow - CreateWorkflow(ctx context.Context, in *model.WorkflowDef, opts ...grpc.CallOption) (*empty.Empty, error) - // PUT /workflow - UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*empty.Empty, error) - // GET /workflow/{name} - GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*model.WorkflowDef, error) - // GET /workflow - GetAllWorkflows(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllWorkflowsClient, error) - // POST /taskdefs - CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*empty.Empty, error) - // PUT /taskdefs - UpdateTask(ctx context.Context, in *model.TaskDef, opts ...grpc.CallOption) (*empty.Empty, error) - // GET /taskdefs/{tasktype} - GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*model.TaskDef, error) - // GET /taskdefs - GetAllTasks(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllTasksClient, error) - // DELETE /taskdefs/{tasktype} - DeleteTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) -} - -type metadataServiceClient struct { - cc *grpc.ClientConn -} - -func NewMetadataServiceClient(cc *grpc.ClientConn) MetadataServiceClient { - return &metadataServiceClient{cc} -} - -func (c *metadataServiceClient) CreateWorkflow(ctx context.Context, in *model.WorkflowDef, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/CreateWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) UpdateWorkflows(ctx context.Context, in *UpdateWorkflowsRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/UpdateWorkflows", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) GetWorkflow(ctx context.Context, in *GetWorkflowRequest, opts ...grpc.CallOption) (*model.WorkflowDef, error) { - out := new(model.WorkflowDef) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/GetWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) GetAllWorkflows(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllWorkflowsClient, error) { - stream, err := c.cc.NewStream(ctx, &_MetadataService_serviceDesc.Streams[0], "/com.netflix.conductor.grpc.MetadataService/GetAllWorkflows", opts...) - if err != nil { - return nil, err - } - x := &metadataServiceGetAllWorkflowsClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type MetadataService_GetAllWorkflowsClient interface { - Recv() (*model.WorkflowDef, error) - grpc.ClientStream -} - -type metadataServiceGetAllWorkflowsClient struct { - grpc.ClientStream -} - -func (x *metadataServiceGetAllWorkflowsClient) Recv() (*model.WorkflowDef, error) { - m := new(model.WorkflowDef) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *metadataServiceClient) CreateTasks(ctx context.Context, in *CreateTasksRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/CreateTasks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) UpdateTask(ctx context.Context, in *model.TaskDef, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/UpdateTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*model.TaskDef, error) { - out := new(model.TaskDef) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/GetTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *metadataServiceClient) GetAllTasks(ctx context.Context, in *empty.Empty, opts ...grpc.CallOption) (MetadataService_GetAllTasksClient, error) { - stream, err := c.cc.NewStream(ctx, &_MetadataService_serviceDesc.Streams[1], "/com.netflix.conductor.grpc.MetadataService/GetAllTasks", opts...) - if err != nil { - return nil, err - } - x := &metadataServiceGetAllTasksClient{stream} - if err := x.ClientStream.SendMsg(in); err != nil { - return nil, err - } - if err := x.ClientStream.CloseSend(); err != nil { - return nil, err - } - return x, nil -} - -type MetadataService_GetAllTasksClient interface { - Recv() (*model.TaskDef, error) - grpc.ClientStream -} - -type metadataServiceGetAllTasksClient struct { - grpc.ClientStream -} - -func (x *metadataServiceGetAllTasksClient) Recv() (*model.TaskDef, error) { - m := new(model.TaskDef) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *metadataServiceClient) DeleteTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.MetadataService/DeleteTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// MetadataServiceServer is the server API for MetadataService service. -type MetadataServiceServer interface { - // POST /workflow - CreateWorkflow(context.Context, *model.WorkflowDef) (*empty.Empty, error) - // PUT /workflow - UpdateWorkflows(context.Context, *UpdateWorkflowsRequest) (*empty.Empty, error) - // GET /workflow/{name} - GetWorkflow(context.Context, *GetWorkflowRequest) (*model.WorkflowDef, error) - // GET /workflow - GetAllWorkflows(*empty.Empty, MetadataService_GetAllWorkflowsServer) error - // POST /taskdefs - CreateTasks(context.Context, *CreateTasksRequest) (*empty.Empty, error) - // PUT /taskdefs - UpdateTask(context.Context, *model.TaskDef) (*empty.Empty, error) - // GET /taskdefs/{tasktype} - GetTask(context.Context, *GetTaskRequest) (*model.TaskDef, error) - // GET /taskdefs - GetAllTasks(*empty.Empty, MetadataService_GetAllTasksServer) error - // DELETE /taskdefs/{tasktype} - DeleteTask(context.Context, *GetTaskRequest) (*empty.Empty, error) -} - -func RegisterMetadataServiceServer(s *grpc.Server, srv MetadataServiceServer) { - s.RegisterService(&_MetadataService_serviceDesc, srv) -} - -func _MetadataService_CreateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.WorkflowDef) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).CreateWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/CreateWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).CreateWorkflow(ctx, req.(*model.WorkflowDef)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_UpdateWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(UpdateWorkflowsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).UpdateWorkflows(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/UpdateWorkflows", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).UpdateWorkflows(ctx, req.(*UpdateWorkflowsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_GetWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).GetWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/GetWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).GetWorkflow(ctx, req.(*GetWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_GetAllWorkflows_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(empty.Empty) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(MetadataServiceServer).GetAllWorkflows(m, &metadataServiceGetAllWorkflowsServer{stream}) -} - -type MetadataService_GetAllWorkflowsServer interface { - Send(*model.WorkflowDef) error - grpc.ServerStream -} - -type metadataServiceGetAllWorkflowsServer struct { - grpc.ServerStream -} - -func (x *metadataServiceGetAllWorkflowsServer) Send(m *model.WorkflowDef) error { - return x.ServerStream.SendMsg(m) -} - -func _MetadataService_CreateTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(CreateTasksRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).CreateTasks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/CreateTasks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).CreateTasks(ctx, req.(*CreateTasksRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.TaskDef) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).UpdateTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/UpdateTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).UpdateTask(ctx, req.(*model.TaskDef)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).GetTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/GetTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).GetTask(ctx, req.(*GetTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _MetadataService_GetAllTasks_Handler(srv interface{}, stream grpc.ServerStream) error { - m := new(empty.Empty) - if err := stream.RecvMsg(m); err != nil { - return err - } - return srv.(MetadataServiceServer).GetAllTasks(m, &metadataServiceGetAllTasksServer{stream}) -} - -type MetadataService_GetAllTasksServer interface { - Send(*model.TaskDef) error - grpc.ServerStream -} - -type metadataServiceGetAllTasksServer struct { - grpc.ServerStream -} - -func (x *metadataServiceGetAllTasksServer) Send(m *model.TaskDef) error { - return x.ServerStream.SendMsg(m) -} - -func _MetadataService_DeleteTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(MetadataServiceServer).DeleteTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.MetadataService/DeleteTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(MetadataServiceServer).DeleteTask(ctx, req.(*GetTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _MetadataService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "com.netflix.conductor.grpc.MetadataService", - HandlerType: (*MetadataServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "CreateWorkflow", - Handler: _MetadataService_CreateWorkflow_Handler, - }, - { - MethodName: "UpdateWorkflows", - Handler: _MetadataService_UpdateWorkflows_Handler, - }, - { - MethodName: "GetWorkflow", - Handler: _MetadataService_GetWorkflow_Handler, - }, - { - MethodName: "CreateTasks", - Handler: _MetadataService_CreateTasks_Handler, - }, - { - MethodName: "UpdateTask", - Handler: _MetadataService_UpdateTask_Handler, - }, - { - MethodName: "GetTask", - Handler: _MetadataService_GetTask_Handler, - }, - { - MethodName: "DeleteTask", - Handler: _MetadataService_DeleteTask_Handler, - }, - }, - Streams: []grpc.StreamDesc{ - { - StreamName: "GetAllWorkflows", - Handler: _MetadataService_GetAllWorkflows_Handler, - ServerStreams: true, - }, - { - StreamName: "GetAllTasks", - Handler: _MetadataService_GetAllTasks_Handler, - ServerStreams: true, - }, - }, - Metadata: "grpc/metadata_service.proto", -} - -func init() { - proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_0ab7c38dd4dbd338) -} - -var fileDescriptor_metadata_service_0ab7c38dd4dbd338 = []byte{ - // 468 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x94, 0xdf, 0x6b, 0xd3, 0x50, - 0x14, 0xc7, 0x89, 0xce, 0xcd, 0x9d, 0xc2, 0x8a, 0x47, 0x98, 0x25, 0x7d, 0x29, 0xc5, 0x87, 0x22, - 0x78, 0x33, 0xea, 0x8b, 0x82, 0x2f, 0xab, 0x93, 0x82, 0xb0, 0x31, 0xe2, 0xaa, 0xa0, 0xc8, 0x48, - 0x93, 0x93, 0x18, 0x7a, 0x93, 0x1b, 0x93, 0x93, 0xcd, 0xfe, 0x41, 0xfe, 0x9f, 0x72, 0xf3, 0xa3, - 0xad, 0x5b, 0x53, 0xb3, 0xb7, 0xde, 0x73, 0xef, 0xf9, 0x9c, 0x73, 0xbe, 0xdf, 0xd3, 0x40, 0x3f, - 0x48, 0x13, 0xd7, 0x8a, 0x88, 0x1d, 0xcf, 0x61, 0xe7, 0x3a, 0xa3, 0xf4, 0x26, 0x74, 0x49, 0x24, - 0xa9, 0x62, 0x85, 0xa6, 0xab, 0x22, 0x11, 0x13, 0xfb, 0x32, 0xfc, 0x2d, 0x5c, 0x15, 0x7b, 0xb9, - 0xcb, 0x2a, 0x15, 0x3a, 0xc5, 0xec, 0x07, 0x4a, 0x05, 0x92, 0xac, 0xe2, 0xe5, 0x3c, 0xf7, 0x2d, - 0x8a, 0x12, 0x5e, 0x96, 0x89, 0xe6, 0xf3, 0x48, 0x79, 0x24, 0x2d, 0x76, 0xb2, 0x85, 0x47, 0x7e, - 0x15, 0x7c, 0x51, 0x06, 0x6f, 0x55, 0xba, 0xf0, 0xa5, 0xba, 0x5d, 0x5d, 0x0c, 0xbf, 0xc0, 0xf1, - 0x2c, 0xf1, 0x1c, 0xa6, 0xaf, 0xd5, 0x55, 0x66, 0xd3, 0xaf, 0x9c, 0x32, 0xc6, 0xf7, 0xb0, 0xe7, - 0x91, 0x9f, 0xf5, 0x8c, 0xc1, 0xe3, 0x51, 0x67, 0x3c, 0x12, 0xdb, 0xfb, 0x29, 0x28, 0xa2, 0x4e, - 0x3e, 0x23, 0xdf, 0x2e, 0xb2, 0x86, 0x17, 0x80, 0x1f, 0x52, 0x72, 0x98, 0xae, 0x9c, 0x6c, 0xb1, - 0x62, 0xbe, 0xfd, 0x87, 0xf9, 0x72, 0x27, 0x53, 0x27, 0xae, 0x79, 0x13, 0xc0, 0x29, 0x71, 0x5d, - 0xa7, 0xe6, 0x21, 0xec, 0xc5, 0x4e, 0x44, 0x3d, 0x63, 0x60, 0x8c, 0x0e, 0xed, 0xe2, 0x37, 0xf6, - 0xe0, 0xe0, 0x86, 0xd2, 0x2c, 0x54, 0x71, 0xef, 0xd1, 0xc0, 0x18, 0x3d, 0xb1, 0xeb, 0xe3, 0xf0, - 0x35, 0x1c, 0x4d, 0x89, 0x35, 0xb7, 0xce, 0xef, 0xc3, 0xa1, 0xd6, 0xe9, 0x9a, 0x97, 0x49, 0x0d, - 0x79, 0xaa, 0x03, 0x57, 0xcb, 0x84, 0xc6, 0x7f, 0xf6, 0xa1, 0x7b, 0x5e, 0x99, 0xf3, 0xb9, 0xf4, - 0x06, 0x6d, 0x38, 0x2a, 0xc7, 0xaa, 0x3b, 0xc1, 0xd6, 0xc2, 0x98, 0xc7, 0xa2, 0xb4, 0x4d, 0xd4, - 0xb6, 0x89, 0x8f, 0xda, 0x36, 0xfc, 0x01, 0xdd, 0x3b, 0x16, 0xe0, 0x58, 0x34, 0xbb, 0x2f, 0xb6, - 0xfb, 0xd5, 0x88, 0x0f, 0xa0, 0xb3, 0xa1, 0x1c, 0x8a, 0x5d, 0xe8, 0xfb, 0x12, 0x9b, 0xad, 0xe7, - 0xc3, 0x19, 0x74, 0xa7, 0xc4, 0xa7, 0x52, 0xae, 0xe7, 0x68, 0xe8, 0xa9, 0x3d, 0xf4, 0xc4, 0xc0, - 0x19, 0x74, 0x36, 0x36, 0x69, 0x77, 0xff, 0xf7, 0x57, 0xae, 0x51, 0x96, 0x4f, 0x00, 0xa5, 0x90, - 0xfa, 0x35, 0xb6, 0x5a, 0xc5, 0x46, 0xd6, 0x77, 0x38, 0xa8, 0x16, 0x0b, 0x5f, 0xfd, 0x47, 0xde, - 0x8d, 0xed, 0x33, 0x5b, 0x15, 0xc5, 0xf3, 0xc2, 0xbf, 0x53, 0x29, 0xcb, 0xf9, 0x9b, 0x24, 0x6d, - 0x05, 0x3b, 0x31, 0xf0, 0x12, 0xe0, 0x8c, 0x24, 0x55, 0x73, 0x3f, 0xa4, 0xdd, 0x86, 0xca, 0x93, - 0x8b, 0xc9, 0xb3, 0x3b, 0x7f, 0x93, 0xcb, 0xf9, 0xb7, 0x77, 0x41, 0xc8, 0x3f, 0xf3, 0xb9, 0xc6, - 0x5b, 0x15, 0xde, 0x5a, 0xe1, 0x2d, 0x57, 0x86, 0x14, 0xb3, 0x15, 0xa8, 0xe2, 0x0b, 0xb8, 0x8e, - 0xeb, 0xe3, 0x7c, 0xbf, 0xe0, 0xbf, 0xf9, 0x1b, 0x00, 0x00, 0xff, 0xff, 0xa7, 0x96, 0x24, 0x65, - 0x1f, 0x05, 0x00, 0x00, -} diff --git a/client/gogrpc/conductor/grpc/search.pb.go b/client/gogrpc/conductor/grpc/search.pb.go deleted file mode 100644 index cebe7a9a12..0000000000 --- a/client/gogrpc/conductor/grpc/search.pb.go +++ /dev/null @@ -1,166 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/search.proto - -package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type SearchRequest struct { - Start int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - Size int32 `protobuf:"varint,2,opt,name=size" json:"size,omitempty"` - Sort string `protobuf:"bytes,3,opt,name=sort" json:"sort,omitempty"` - FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText" json:"free_text,omitempty"` - Query string `protobuf:"bytes,5,opt,name=query" json:"query,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SearchRequest) Reset() { *m = SearchRequest{} } -func (m *SearchRequest) String() string { return proto.CompactTextString(m) } -func (*SearchRequest) ProtoMessage() {} -func (*SearchRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_search_4602c68037d6c24f, []int{0} -} -func (m *SearchRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SearchRequest.Unmarshal(m, b) -} -func (m *SearchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SearchRequest.Marshal(b, m, deterministic) -} -func (dst *SearchRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_SearchRequest.Merge(dst, src) -} -func (m *SearchRequest) XXX_Size() int { - return xxx_messageInfo_SearchRequest.Size(m) -} -func (m *SearchRequest) XXX_DiscardUnknown() { - xxx_messageInfo_SearchRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_SearchRequest proto.InternalMessageInfo - -func (m *SearchRequest) GetStart() int32 { - if m != nil { - return m.Start - } - return 0 -} - -func (m *SearchRequest) GetSize() int32 { - if m != nil { - return m.Size - } - return 0 -} - -func (m *SearchRequest) GetSort() string { - if m != nil { - return m.Sort - } - return "" -} - -func (m *SearchRequest) GetFreeText() string { - if m != nil { - return m.FreeText - } - return "" -} - -func (m *SearchRequest) GetQuery() string { - if m != nil { - return m.Query - } - return "" -} - -type WorkflowSummarySearchResult struct { - TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits" json:"total_hits,omitempty"` - Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results" json:"results,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowSummarySearchResult) Reset() { *m = WorkflowSummarySearchResult{} } -func (m *WorkflowSummarySearchResult) String() string { return proto.CompactTextString(m) } -func (*WorkflowSummarySearchResult) ProtoMessage() {} -func (*WorkflowSummarySearchResult) Descriptor() ([]byte, []int) { - return fileDescriptor_search_4602c68037d6c24f, []int{1} -} -func (m *WorkflowSummarySearchResult) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowSummarySearchResult.Unmarshal(m, b) -} -func (m *WorkflowSummarySearchResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowSummarySearchResult.Marshal(b, m, deterministic) -} -func (dst *WorkflowSummarySearchResult) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowSummarySearchResult.Merge(dst, src) -} -func (m *WorkflowSummarySearchResult) XXX_Size() int { - return xxx_messageInfo_WorkflowSummarySearchResult.Size(m) -} -func (m *WorkflowSummarySearchResult) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowSummarySearchResult.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowSummarySearchResult proto.InternalMessageInfo - -func (m *WorkflowSummarySearchResult) GetTotalHits() int64 { - if m != nil { - return m.TotalHits - } - return 0 -} - -func (m *WorkflowSummarySearchResult) GetResults() []*model.WorkflowSummary { - if m != nil { - return m.Results - } - return nil -} - -func init() { - proto.RegisterType((*SearchRequest)(nil), "com.netflix.conductor.grpc.SearchRequest") - proto.RegisterType((*WorkflowSummarySearchResult)(nil), "com.netflix.conductor.grpc.WorkflowSummarySearchResult") -} - -func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_4602c68037d6c24f) } - -var fileDescriptor_search_4602c68037d6c24f = []byte{ - // 292 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x51, 0xcf, 0x4b, 0x33, 0x31, - 0x10, 0x65, 0xfb, 0xe3, 0xfb, 0xda, 0x88, 0x07, 0x83, 0x87, 0xd0, 0x22, 0x94, 0x9e, 0x7a, 0x90, - 0x04, 0xf4, 0xe4, 0xb5, 0xa0, 0x78, 0x94, 0xad, 0x20, 0x78, 0x29, 0xdb, 0x74, 0xba, 0x1b, 0xcc, - 0x6e, 0xda, 0x64, 0x42, 0xb7, 0x9e, 0x3c, 0xf8, 0x87, 0xcb, 0xce, 0xb6, 0x0a, 0xe2, 0x6d, 0xde, - 0x9b, 0x99, 0xf7, 0x5e, 0x32, 0xec, 0x22, 0xf7, 0x5b, 0xad, 0x02, 0x64, 0x5e, 0x17, 0x72, 0xeb, - 0x1d, 0x3a, 0x3e, 0xd2, 0xae, 0x94, 0x15, 0xe0, 0xc6, 0x9a, 0x5a, 0x6a, 0x57, 0xad, 0xa3, 0x46, - 0xe7, 0x65, 0x33, 0x38, 0x1a, 0x97, 0x6e, 0x0d, 0x56, 0xed, 0x9d, 0x7f, 0xdb, 0x58, 0xb7, 0x0f, - 0xb1, 0x2c, 0x33, 0x7f, 0x68, 0x17, 0xa7, 0x1f, 0x09, 0x3b, 0x5f, 0x90, 0x52, 0x0a, 0xbb, 0x08, - 0x01, 0xf9, 0x25, 0xeb, 0x07, 0xcc, 0x3c, 0x8a, 0x64, 0x92, 0xcc, 0xfa, 0x69, 0x0b, 0x38, 0x67, - 0xbd, 0x60, 0xde, 0x41, 0x74, 0x88, 0xa4, 0x9a, 0x38, 0xe7, 0x51, 0x74, 0x27, 0xc9, 0x6c, 0x98, - 0x52, 0xcd, 0xc7, 0x6c, 0xb8, 0xf1, 0x00, 0x4b, 0x84, 0x1a, 0x45, 0x8f, 0x1a, 0x83, 0x86, 0x78, - 0x86, 0x9a, 0xa4, 0x77, 0x11, 0xfc, 0x41, 0xf4, 0xa9, 0xd1, 0x82, 0xe9, 0x67, 0xc2, 0xc6, 0x2f, - 0xc7, 0x70, 0x8b, 0x36, 0xdc, 0x29, 0x51, 0x88, 0x16, 0xf9, 0x15, 0x63, 0xe8, 0x30, 0xb3, 0xcb, - 0xc2, 0x60, 0xa0, 0x54, 0xdd, 0x74, 0x48, 0xcc, 0xa3, 0xc1, 0xc0, 0x1f, 0xd8, 0x7f, 0x4f, 0x83, - 0x41, 0x74, 0x26, 0xdd, 0xd9, 0xd9, 0xcd, 0xb5, 0xfc, 0xfb, 0x33, 0xe8, 0xc1, 0xf2, 0x97, 0x53, - 0x7a, 0x5a, 0x9e, 0xdf, 0xcf, 0x07, 0xad, 0xed, 0xd3, 0xea, 0xf5, 0x2e, 0x37, 0x58, 0xc4, 0x55, - 0x23, 0xa4, 0x8e, 0x42, 0xea, 0x5b, 0x48, 0x69, 0x6b, 0xa0, 0x42, 0x95, 0x3b, 0xba, 0xc3, 0x0f, - 0xdf, 0xc0, 0xd5, 0x3f, 0xb2, 0xb9, 0xfd, 0x0a, 0x00, 0x00, 0xff, 0xff, 0xcf, 0x03, 0x79, 0x98, - 0xa5, 0x01, 0x00, 0x00, -} diff --git a/client/gogrpc/conductor/grpc/search/search.pb.go b/client/gogrpc/conductor/grpc/search/search.pb.go new file mode 100644 index 0000000000..1c7ade8fd2 --- /dev/null +++ b/client/gogrpc/conductor/grpc/search/search.pb.go @@ -0,0 +1,113 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/search.proto + +package search // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type Request struct { + Start int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` + Size int32 `protobuf:"varint,2,opt,name=size" json:"size,omitempty"` + Sort string `protobuf:"bytes,3,opt,name=sort" json:"sort,omitempty"` + FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText" json:"free_text,omitempty"` + Query string `protobuf:"bytes,5,opt,name=query" json:"query,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Request) Reset() { *m = Request{} } +func (m *Request) String() string { return proto.CompactTextString(m) } +func (*Request) ProtoMessage() {} +func (*Request) Descriptor() ([]byte, []int) { + return fileDescriptor_search_855a611014aa2143, []int{0} +} +func (m *Request) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_Request.Unmarshal(m, b) +} +func (m *Request) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_Request.Marshal(b, m, deterministic) +} +func (dst *Request) XXX_Merge(src proto.Message) { + xxx_messageInfo_Request.Merge(dst, src) +} +func (m *Request) XXX_Size() int { + return xxx_messageInfo_Request.Size(m) +} +func (m *Request) XXX_DiscardUnknown() { + xxx_messageInfo_Request.DiscardUnknown(m) +} + +var xxx_messageInfo_Request proto.InternalMessageInfo + +func (m *Request) GetStart() int32 { + if m != nil { + return m.Start + } + return 0 +} + +func (m *Request) GetSize() int32 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *Request) GetSort() string { + if m != nil { + return m.Sort + } + return "" +} + +func (m *Request) GetFreeText() string { + if m != nil { + return m.FreeText + } + return "" +} + +func (m *Request) GetQuery() string { + if m != nil { + return m.Query + } + return "" +} + +func init() { + proto.RegisterType((*Request)(nil), "conductor.grpc.search.Request") +} + +func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_855a611014aa2143) } + +var fileDescriptor_search_855a611014aa2143 = []byte{ + // 212 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xc1, 0x4a, 0xc4, 0x30, + 0x10, 0x86, 0xa9, 0x6e, 0x75, 0x37, 0x37, 0x83, 0x42, 0xd0, 0xcb, 0xe2, 0x69, 0x4f, 0xc9, 0xc1, + 0x17, 0x90, 0x7d, 0x02, 0xa9, 0x9e, 0xbc, 0xc8, 0x36, 0x4e, 0xdb, 0x40, 0xdb, 0x69, 0x27, 0x13, + 0xa8, 0x7d, 0x7a, 0xe9, 0x54, 0xd4, 0xbd, 0xcd, 0x7c, 0xdf, 0x10, 0xfe, 0xfc, 0xea, 0xa6, 0xa6, + 0xc1, 0xbb, 0x08, 0x27, 0xf2, 0x8d, 0x1d, 0x08, 0x19, 0xf5, 0x9d, 0xc7, 0xfe, 0x33, 0x79, 0x46, + 0xb2, 0x8b, 0xb4, 0xab, 0x7c, 0x9c, 0xd5, 0x75, 0x01, 0x63, 0x82, 0xc8, 0xfa, 0x56, 0xe5, 0x91, + 0x4f, 0xc4, 0x26, 0xdb, 0x67, 0x87, 0xbc, 0x58, 0x17, 0xad, 0xd5, 0x26, 0x86, 0x19, 0xcc, 0x85, + 0x40, 0x99, 0x85, 0x21, 0xb1, 0xb9, 0xdc, 0x67, 0x87, 0x5d, 0x21, 0xb3, 0x7e, 0x50, 0xbb, 0x8a, + 0x00, 0x3e, 0x18, 0x26, 0x36, 0x1b, 0x11, 0xdb, 0x05, 0xbc, 0xc1, 0x24, 0x4f, 0x8f, 0x09, 0xe8, + 0xcb, 0xe4, 0x22, 0xd6, 0xe5, 0xd8, 0xa8, 0x7b, 0x8f, 0x9d, 0xed, 0x81, 0xab, 0x36, 0x4c, 0xf6, + 0x3c, 0xe0, 0x71, 0xfb, 0x2a, 0x09, 0x5f, 0xca, 0xf7, 0xe7, 0x3a, 0x70, 0x93, 0x4a, 0xeb, 0xb1, + 0x73, 0x3f, 0xc7, 0xee, 0xf7, 0xd8, 0xf9, 0x36, 0x40, 0xcf, 0xae, 0x46, 0xf9, 0xf3, 0x1f, 0xff, + 0x57, 0x41, 0x79, 0x25, 0x1d, 0x3c, 0x7d, 0x07, 0x00, 0x00, 0xff, 0xff, 0x8d, 0x4d, 0x39, 0xe7, + 0x18, 0x01, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/task_service.pb.go b/client/gogrpc/conductor/grpc/task_service.pb.go deleted file mode 100644 index 68103b314f..0000000000 --- a/client/gogrpc/conductor/grpc/task_service.pb.go +++ /dev/null @@ -1,934 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/task_service.proto - -package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import empty "github.com/golang/protobuf/ptypes/empty" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type PollRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PollRequest) Reset() { *m = PollRequest{} } -func (m *PollRequest) String() string { return proto.CompactTextString(m) } -func (*PollRequest) ProtoMessage() {} -func (*PollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{0} -} -func (m *PollRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PollRequest.Unmarshal(m, b) -} -func (m *PollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PollRequest.Marshal(b, m, deterministic) -} -func (dst *PollRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PollRequest.Merge(dst, src) -} -func (m *PollRequest) XXX_Size() int { - return xxx_messageInfo_PollRequest.Size(m) -} -func (m *PollRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PollRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_PollRequest proto.InternalMessageInfo - -func (m *PollRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *PollRequest) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *PollRequest) GetDomain() string { - if m != nil { - return m.Domain - } - return "" -} - -type StreamingPollRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` - Capacity int32 `protobuf:"varint,4,opt,name=capacity" json:"capacity,omitempty"` - Completed []*model.TaskResult `protobuf:"bytes,5,rep,name=completed" json:"completed,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *StreamingPollRequest) Reset() { *m = StreamingPollRequest{} } -func (m *StreamingPollRequest) String() string { return proto.CompactTextString(m) } -func (*StreamingPollRequest) ProtoMessage() {} -func (*StreamingPollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{1} -} -func (m *StreamingPollRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_StreamingPollRequest.Unmarshal(m, b) -} -func (m *StreamingPollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_StreamingPollRequest.Marshal(b, m, deterministic) -} -func (dst *StreamingPollRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_StreamingPollRequest.Merge(dst, src) -} -func (m *StreamingPollRequest) XXX_Size() int { - return xxx_messageInfo_StreamingPollRequest.Size(m) -} -func (m *StreamingPollRequest) XXX_DiscardUnknown() { - xxx_messageInfo_StreamingPollRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_StreamingPollRequest proto.InternalMessageInfo - -func (m *StreamingPollRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *StreamingPollRequest) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -func (m *StreamingPollRequest) GetDomain() string { - if m != nil { - return m.Domain - } - return "" -} - -func (m *StreamingPollRequest) GetCapacity() int32 { - if m != nil { - return m.Capacity - } - return 0 -} - -func (m *StreamingPollRequest) GetCompleted() []*model.TaskResult { - if m != nil { - return m.Completed - } - return nil -} - -type TasksInProgressRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey" json:"start_key,omitempty"` - Count int32 `protobuf:"varint,3,opt,name=count" json:"count,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} } -func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } -func (*TasksInProgressRequest) ProtoMessage() {} -func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{2} -} -func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) -} -func (m *TasksInProgressRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TasksInProgressRequest.Marshal(b, m, deterministic) -} -func (dst *TasksInProgressRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_TasksInProgressRequest.Merge(dst, src) -} -func (m *TasksInProgressRequest) XXX_Size() int { - return xxx_messageInfo_TasksInProgressRequest.Size(m) -} -func (m *TasksInProgressRequest) XXX_DiscardUnknown() { - xxx_messageInfo_TasksInProgressRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_TasksInProgressRequest proto.InternalMessageInfo - -func (m *TasksInProgressRequest) GetTaskType() string { - if m != nil { - return m.TaskType - } - return "" -} - -func (m *TasksInProgressRequest) GetStartKey() string { - if m != nil { - return m.StartKey - } - return "" -} - -func (m *TasksInProgressRequest) GetCount() int32 { - if m != nil { - return m.Count - } - return 0 -} - -type TasksInProgressResponse struct { - Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse{} } -func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } -func (*TasksInProgressResponse) ProtoMessage() {} -func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{3} -} -func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) -} -func (m *TasksInProgressResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TasksInProgressResponse.Marshal(b, m, deterministic) -} -func (dst *TasksInProgressResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_TasksInProgressResponse.Merge(dst, src) -} -func (m *TasksInProgressResponse) XXX_Size() int { - return xxx_messageInfo_TasksInProgressResponse.Size(m) -} -func (m *TasksInProgressResponse) XXX_DiscardUnknown() { - xxx_messageInfo_TasksInProgressResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_TasksInProgressResponse proto.InternalMessageInfo - -func (m *TasksInProgressResponse) GetTasks() []*model.Task { - if m != nil { - return m.Tasks - } - return nil -} - -type PendingTaskRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName" json:"task_ref_name,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } -func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } -func (*PendingTaskRequest) ProtoMessage() {} -func (*PendingTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{4} -} -func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) -} -func (m *PendingTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_PendingTaskRequest.Marshal(b, m, deterministic) -} -func (dst *PendingTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_PendingTaskRequest.Merge(dst, src) -} -func (m *PendingTaskRequest) XXX_Size() int { - return xxx_messageInfo_PendingTaskRequest.Size(m) -} -func (m *PendingTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_PendingTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_PendingTaskRequest proto.InternalMessageInfo - -func (m *PendingTaskRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *PendingTaskRequest) GetTaskRefName() string { - if m != nil { - return m.TaskRefName - } - return "" -} - -type TaskUpdateResponse struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TaskUpdateResponse) Reset() { *m = TaskUpdateResponse{} } -func (m *TaskUpdateResponse) String() string { return proto.CompactTextString(m) } -func (*TaskUpdateResponse) ProtoMessage() {} -func (*TaskUpdateResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{5} -} -func (m *TaskUpdateResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TaskUpdateResponse.Unmarshal(m, b) -} -func (m *TaskUpdateResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TaskUpdateResponse.Marshal(b, m, deterministic) -} -func (dst *TaskUpdateResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_TaskUpdateResponse.Merge(dst, src) -} -func (m *TaskUpdateResponse) XXX_Size() int { - return xxx_messageInfo_TaskUpdateResponse.Size(m) -} -func (m *TaskUpdateResponse) XXX_DiscardUnknown() { - xxx_messageInfo_TaskUpdateResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_TaskUpdateResponse proto.InternalMessageInfo - -func (m *TaskUpdateResponse) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -type AckTaskRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } -func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } -func (*AckTaskRequest) ProtoMessage() {} -func (*AckTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{6} -} -func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) -} -func (m *AckTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AckTaskRequest.Marshal(b, m, deterministic) -} -func (dst *AckTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_AckTaskRequest.Merge(dst, src) -} -func (m *AckTaskRequest) XXX_Size() int { - return xxx_messageInfo_AckTaskRequest.Size(m) -} -func (m *AckTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_AckTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_AckTaskRequest proto.InternalMessageInfo - -func (m *AckTaskRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *AckTaskRequest) GetWorkerId() string { - if m != nil { - return m.WorkerId - } - return "" -} - -type AckTaskResponse struct { - Ack bool `protobuf:"varint,1,opt,name=ack" json:"ack,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } -func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } -func (*AckTaskResponse) ProtoMessage() {} -func (*AckTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{7} -} -func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) -} -func (m *AckTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AckTaskResponse.Marshal(b, m, deterministic) -} -func (dst *AckTaskResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_AckTaskResponse.Merge(dst, src) -} -func (m *AckTaskResponse) XXX_Size() int { - return xxx_messageInfo_AckTaskResponse.Size(m) -} -func (m *AckTaskResponse) XXX_DiscardUnknown() { - xxx_messageInfo_AckTaskResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_AckTaskResponse proto.InternalMessageInfo - -func (m *AckTaskResponse) GetAck() bool { - if m != nil { - return m.Ack - } - return false -} - -type AddLogRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - Log string `protobuf:"bytes,2,opt,name=log" json:"log,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } -func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } -func (*AddLogRequest) ProtoMessage() {} -func (*AddLogRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{8} -} -func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) -} -func (m *AddLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_AddLogRequest.Marshal(b, m, deterministic) -} -func (dst *AddLogRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_AddLogRequest.Merge(dst, src) -} -func (m *AddLogRequest) XXX_Size() int { - return xxx_messageInfo_AddLogRequest.Size(m) -} -func (m *AddLogRequest) XXX_DiscardUnknown() { - xxx_messageInfo_AddLogRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_AddLogRequest proto.InternalMessageInfo - -func (m *AddLogRequest) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -func (m *AddLogRequest) GetLog() string { - if m != nil { - return m.Log - } - return "" -} - -type TaskId struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TaskId) Reset() { *m = TaskId{} } -func (m *TaskId) String() string { return proto.CompactTextString(m) } -func (*TaskId) ProtoMessage() {} -func (*TaskId) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{9} -} -func (m *TaskId) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TaskId.Unmarshal(m, b) -} -func (m *TaskId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TaskId.Marshal(b, m, deterministic) -} -func (dst *TaskId) XXX_Merge(src proto.Message) { - xxx_messageInfo_TaskId.Merge(dst, src) -} -func (m *TaskId) XXX_Size() int { - return xxx_messageInfo_TaskId.Size(m) -} -func (m *TaskId) XXX_DiscardUnknown() { - xxx_messageInfo_TaskId.DiscardUnknown(m) -} - -var xxx_messageInfo_TaskId proto.InternalMessageInfo - -func (m *TaskId) GetTaskId() string { - if m != nil { - return m.TaskId - } - return "" -} - -type GetLogsResponse struct { - Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs" json:"logs,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetLogsResponse) Reset() { *m = GetLogsResponse{} } -func (m *GetLogsResponse) String() string { return proto.CompactTextString(m) } -func (*GetLogsResponse) ProtoMessage() {} -func (*GetLogsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_d215da243254e00c, []int{10} -} -func (m *GetLogsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetLogsResponse.Unmarshal(m, b) -} -func (m *GetLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetLogsResponse.Marshal(b, m, deterministic) -} -func (dst *GetLogsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetLogsResponse.Merge(dst, src) -} -func (m *GetLogsResponse) XXX_Size() int { - return xxx_messageInfo_GetLogsResponse.Size(m) -} -func (m *GetLogsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetLogsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetLogsResponse proto.InternalMessageInfo - -func (m *GetLogsResponse) GetLogs() []*model.TaskExecLog { - if m != nil { - return m.Logs - } - return nil -} - -func init() { - proto.RegisterType((*PollRequest)(nil), "com.netflix.conductor.grpc.PollRequest") - proto.RegisterType((*StreamingPollRequest)(nil), "com.netflix.conductor.grpc.StreamingPollRequest") - proto.RegisterType((*TasksInProgressRequest)(nil), "com.netflix.conductor.grpc.TasksInProgressRequest") - proto.RegisterType((*TasksInProgressResponse)(nil), "com.netflix.conductor.grpc.TasksInProgressResponse") - proto.RegisterType((*PendingTaskRequest)(nil), "com.netflix.conductor.grpc.PendingTaskRequest") - proto.RegisterType((*TaskUpdateResponse)(nil), "com.netflix.conductor.grpc.TaskUpdateResponse") - proto.RegisterType((*AckTaskRequest)(nil), "com.netflix.conductor.grpc.AckTaskRequest") - proto.RegisterType((*AckTaskResponse)(nil), "com.netflix.conductor.grpc.AckTaskResponse") - proto.RegisterType((*AddLogRequest)(nil), "com.netflix.conductor.grpc.AddLogRequest") - proto.RegisterType((*TaskId)(nil), "com.netflix.conductor.grpc.TaskId") - proto.RegisterType((*GetLogsResponse)(nil), "com.netflix.conductor.grpc.GetLogsResponse") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// TaskServiceClient is the client API for TaskService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type TaskServiceClient interface { - Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*model.Task, error) - PollStream(ctx context.Context, opts ...grpc.CallOption) (TaskService_PollStreamClient, error) - GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) - GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*model.Task, error) - UpdateTask(ctx context.Context, in *model.TaskResult, opts ...grpc.CallOption) (*TaskUpdateResponse, error) - AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) - AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) - GetLogs(ctx context.Context, in *TaskId, opts ...grpc.CallOption) (*GetLogsResponse, error) -} - -type taskServiceClient struct { - cc *grpc.ClientConn -} - -func NewTaskServiceClient(cc *grpc.ClientConn) TaskServiceClient { - return &taskServiceClient{cc} -} - -func (c *taskServiceClient) Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*model.Task, error) { - out := new(model.Task) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/Poll", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) PollStream(ctx context.Context, opts ...grpc.CallOption) (TaskService_PollStreamClient, error) { - stream, err := c.cc.NewStream(ctx, &_TaskService_serviceDesc.Streams[0], "/com.netflix.conductor.grpc.TaskService/PollStream", opts...) - if err != nil { - return nil, err - } - x := &taskServicePollStreamClient{stream} - return x, nil -} - -type TaskService_PollStreamClient interface { - Send(*StreamingPollRequest) error - Recv() (*model.Task, error) - grpc.ClientStream -} - -type taskServicePollStreamClient struct { - grpc.ClientStream -} - -func (x *taskServicePollStreamClient) Send(m *StreamingPollRequest) error { - return x.ClientStream.SendMsg(m) -} - -func (x *taskServicePollStreamClient) Recv() (*model.Task, error) { - m := new(model.Task) - if err := x.ClientStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func (c *taskServiceClient) GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) { - out := new(TasksInProgressResponse) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/GetTasksInProgress", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*model.Task, error) { - out := new(model.Task) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/GetPendingTaskForWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) UpdateTask(ctx context.Context, in *model.TaskResult, opts ...grpc.CallOption) (*TaskUpdateResponse, error) { - out := new(TaskUpdateResponse) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/UpdateTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) { - out := new(AckTaskResponse) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/AckTask", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/AddLog", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *taskServiceClient) GetLogs(ctx context.Context, in *TaskId, opts ...grpc.CallOption) (*GetLogsResponse, error) { - out := new(GetLogsResponse) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.TaskService/GetLogs", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// TaskServiceServer is the server API for TaskService service. -type TaskServiceServer interface { - Poll(context.Context, *PollRequest) (*model.Task, error) - PollStream(TaskService_PollStreamServer) error - GetTasksInProgress(context.Context, *TasksInProgressRequest) (*TasksInProgressResponse, error) - GetPendingTaskForWorkflow(context.Context, *PendingTaskRequest) (*model.Task, error) - UpdateTask(context.Context, *model.TaskResult) (*TaskUpdateResponse, error) - AckTask(context.Context, *AckTaskRequest) (*AckTaskResponse, error) - AddLog(context.Context, *AddLogRequest) (*empty.Empty, error) - GetLogs(context.Context, *TaskId) (*GetLogsResponse, error) -} - -func RegisterTaskServiceServer(s *grpc.Server, srv TaskServiceServer) { - s.RegisterService(&_TaskService_serviceDesc, srv) -} - -func _TaskService_Poll_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PollRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).Poll(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/Poll", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).Poll(ctx, req.(*PollRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_PollStream_Handler(srv interface{}, stream grpc.ServerStream) error { - return srv.(TaskServiceServer).PollStream(&taskServicePollStreamServer{stream}) -} - -type TaskService_PollStreamServer interface { - Send(*model.Task) error - Recv() (*StreamingPollRequest, error) - grpc.ServerStream -} - -type taskServicePollStreamServer struct { - grpc.ServerStream -} - -func (x *taskServicePollStreamServer) Send(m *model.Task) error { - return x.ServerStream.SendMsg(m) -} - -func (x *taskServicePollStreamServer) Recv() (*StreamingPollRequest, error) { - m := new(StreamingPollRequest) - if err := x.ServerStream.RecvMsg(m); err != nil { - return nil, err - } - return m, nil -} - -func _TaskService_GetTasksInProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TasksInProgressRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetTasksInProgress(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/GetTasksInProgress", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetTasksInProgress(ctx, req.(*TasksInProgressRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetPendingTaskForWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(PendingTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/GetPendingTaskForWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, req.(*PendingTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.TaskResult) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).UpdateTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/UpdateTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).UpdateTask(ctx, req.(*model.TaskResult)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_AckTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AckTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).AckTask(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/AckTask", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).AckTask(ctx, req.(*AckTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_AddLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(AddLogRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).AddLog(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/AddLog", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).AddLog(ctx, req.(*AddLogRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _TaskService_GetLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TaskId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(TaskServiceServer).GetLogs(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.TaskService/GetLogs", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(TaskServiceServer).GetLogs(ctx, req.(*TaskId)) - } - return interceptor(ctx, in, info, handler) -} - -var _TaskService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "com.netflix.conductor.grpc.TaskService", - HandlerType: (*TaskServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "Poll", - Handler: _TaskService_Poll_Handler, - }, - { - MethodName: "GetTasksInProgress", - Handler: _TaskService_GetTasksInProgress_Handler, - }, - { - MethodName: "GetPendingTaskForWorkflow", - Handler: _TaskService_GetPendingTaskForWorkflow_Handler, - }, - { - MethodName: "UpdateTask", - Handler: _TaskService_UpdateTask_Handler, - }, - { - MethodName: "AckTask", - Handler: _TaskService_AckTask_Handler, - }, - { - MethodName: "AddLog", - Handler: _TaskService_AddLog_Handler, - }, - { - MethodName: "GetLogs", - Handler: _TaskService_GetLogs_Handler, - }, - }, - Streams: []grpc.StreamDesc{ - { - StreamName: "PollStream", - Handler: _TaskService_PollStream_Handler, - ServerStreams: true, - ClientStreams: true, - }, - }, - Metadata: "grpc/task_service.proto", -} - -func init() { - proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_d215da243254e00c) -} - -var fileDescriptor_task_service_d215da243254e00c = []byte{ - // 701 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x55, 0x4f, 0x4f, 0xdb, 0x4a, - 0x10, 0x97, 0x5f, 0x48, 0x80, 0x89, 0x78, 0xa0, 0x15, 0x4a, 0xf2, 0xcc, 0xe1, 0x81, 0xdf, 0xe1, - 0xa5, 0xad, 0x6a, 0x23, 0x38, 0x54, 0xad, 0x7a, 0x01, 0x09, 0x50, 0x0a, 0x6a, 0x23, 0x43, 0x55, - 0x95, 0x4b, 0xe4, 0xac, 0x27, 0x8b, 0x15, 0xdb, 0xeb, 0xae, 0x37, 0x85, 0x1c, 0xfa, 0x89, 0xfa, - 0x51, 0xfa, 0xa5, 0xaa, 0xdd, 0x75, 0xfe, 0x94, 0x3f, 0x4e, 0x7a, 0xe8, 0xcd, 0x33, 0xb3, 0xf3, - 0x9b, 0xf9, 0xcd, 0xce, 0xcf, 0x0b, 0x4d, 0x26, 0x32, 0xea, 0xc9, 0x20, 0x1f, 0xf6, 0x72, 0x14, - 0x5f, 0x23, 0x8a, 0x6e, 0x26, 0xb8, 0xe4, 0xc4, 0xa6, 0x3c, 0x71, 0x53, 0x94, 0x83, 0x38, 0xba, - 0x73, 0x29, 0x4f, 0xc3, 0x11, 0x95, 0x5c, 0xb8, 0xea, 0xb8, 0xbd, 0xc3, 0x38, 0x67, 0x31, 0x7a, - 0xfa, 0x64, 0x7f, 0x34, 0xf0, 0x30, 0xc9, 0xe4, 0xd8, 0x24, 0xda, 0xcd, 0x84, 0x87, 0x18, 0x6b, - 0x48, 0xbc, 0x43, 0x1a, 0x73, 0x56, 0x04, 0x1a, 0xb3, 0x80, 0xc0, 0x7c, 0x14, 0xcb, 0xc2, 0xbf, - 0x35, 0xf3, 0x1b, 0x8f, 0xd3, 0x83, 0x7a, 0x97, 0xc7, 0xb1, 0x8f, 0x5f, 0x46, 0x98, 0x4b, 0xb2, - 0x03, 0xeb, 0xba, 0x41, 0x39, 0xce, 0xb0, 0x65, 0xed, 0x5a, 0xed, 0x75, 0x7f, 0x4d, 0x39, 0xae, - 0xc6, 0x19, 0xaa, 0xe0, 0x2d, 0x17, 0x43, 0x14, 0xbd, 0x28, 0x6c, 0xfd, 0x65, 0x82, 0xc6, 0xd1, - 0x09, 0x49, 0x03, 0x6a, 0x21, 0x4f, 0x82, 0x28, 0x6d, 0x55, 0x74, 0xa4, 0xb0, 0x9c, 0x1f, 0x16, - 0x6c, 0x5f, 0x4a, 0x81, 0x41, 0x12, 0xa5, 0xec, 0x8f, 0x96, 0x22, 0x36, 0xac, 0xd1, 0x20, 0x0b, - 0x68, 0x24, 0xc7, 0xad, 0x95, 0x5d, 0xab, 0x5d, 0xf5, 0xa7, 0x36, 0x39, 0x81, 0x75, 0xca, 0x93, - 0x2c, 0x46, 0x89, 0x61, 0xab, 0xba, 0x5b, 0x69, 0xd7, 0x0f, 0xfe, 0x77, 0x1f, 0x9f, 0xbb, 0x1e, - 0x8c, 0x7b, 0x15, 0xe4, 0x43, 0x5f, 0xcf, 0xce, 0x9f, 0x65, 0x3a, 0x37, 0xd0, 0x50, 0x81, 0xbc, - 0x93, 0x76, 0x05, 0x67, 0x02, 0xf3, 0x7c, 0x59, 0x3a, 0xb9, 0x0c, 0x84, 0xec, 0x0d, 0x71, 0x3c, - 0xa1, 0xa3, 0x1d, 0xe7, 0x38, 0x26, 0xdb, 0x50, 0xa5, 0x7c, 0x94, 0x4a, 0xcd, 0xa6, 0xea, 0x1b, - 0xc3, 0xf1, 0xa1, 0xf9, 0xa0, 0x52, 0x9e, 0xf1, 0x34, 0x47, 0xf2, 0x0a, 0xaa, 0x0a, 0x39, 0x6f, - 0x59, 0x9a, 0xc7, 0xde, 0x62, 0x1e, 0xe6, 0xbc, 0xf3, 0x19, 0x48, 0x17, 0xd3, 0x30, 0x4a, 0x99, - 0x61, 0x67, 0x3a, 0xff, 0x17, 0xea, 0x6a, 0xb4, 0x83, 0x98, 0xdf, 0xaa, 0x69, 0x9b, 0xde, 0x61, - 0xe2, 0xea, 0x84, 0xc4, 0x81, 0x0d, 0x4d, 0x4d, 0xe0, 0xa0, 0x97, 0x06, 0x09, 0x16, 0x0c, 0xea, - 0x52, 0x83, 0x0c, 0xde, 0x07, 0x09, 0x3a, 0x2f, 0x81, 0x28, 0xcc, 0x8f, 0x59, 0x18, 0x48, 0x9c, - 0x76, 0xda, 0x84, 0x55, 0x9d, 0x39, 0x85, 0xad, 0x29, 0xb3, 0x13, 0x3a, 0xa7, 0xf0, 0xf7, 0x11, - 0x1d, 0xce, 0x77, 0xf1, 0xd4, 0xd1, 0xd2, 0x55, 0x70, 0xfe, 0x83, 0xcd, 0x29, 0x4e, 0x51, 0x73, - 0x0b, 0x2a, 0x01, 0x1d, 0x6a, 0x90, 0x35, 0x5f, 0x7d, 0x3a, 0x6f, 0x60, 0xe3, 0x28, 0x0c, 0x2f, - 0x38, 0x5b, 0x58, 0x6b, 0x0b, 0x2a, 0x31, 0x67, 0x45, 0x15, 0xf5, 0xe9, 0xec, 0x41, 0xed, 0xca, - 0xc4, 0x9e, 0xe4, 0xf2, 0x01, 0x36, 0xcf, 0x50, 0x5e, 0x70, 0x36, 0xbb, 0xa1, 0xb7, 0xb0, 0x12, - 0x73, 0x36, 0xb9, 0xa0, 0xf6, 0xc2, 0x0b, 0x3a, 0xb9, 0x43, 0xaa, 0xfa, 0xd3, 0x59, 0x07, 0xdf, - 0x6b, 0x50, 0x57, 0xde, 0x4b, 0xf3, 0x97, 0x20, 0x3e, 0xac, 0x28, 0xe1, 0x90, 0xa7, 0x16, 0x56, - 0xfd, 0x28, 0xdc, 0x39, 0x69, 0xd9, 0x8b, 0x37, 0x82, 0x20, 0x80, 0xca, 0x30, 0xca, 0x24, 0xfb, - 0x65, 0xc8, 0x8f, 0xa9, 0x77, 0x89, 0x12, 0x6d, 0x6b, 0xdf, 0x22, 0xdf, 0x80, 0x9c, 0xa1, 0xbc, - 0xb7, 0xc8, 0xe4, 0xa0, 0xac, 0xdc, 0xe3, 0xfa, 0xb2, 0x0f, 0x7f, 0x2b, 0xa7, 0xb8, 0x87, 0x14, - 0xfe, 0x39, 0x43, 0x39, 0xb7, 0xf3, 0xa7, 0x5c, 0x7c, 0x2a, 0x16, 0x9b, 0xb8, 0xa5, 0xe3, 0x7c, - 0xa0, 0x93, 0x25, 0xa7, 0x6a, 0x14, 0xa0, 0xad, 0x65, 0x7f, 0x30, 0xb6, 0xbb, 0x88, 0xdb, 0x3d, - 0x59, 0xf5, 0x61, 0xb5, 0xd8, 0x7a, 0xf2, 0xbc, 0x2c, 0xf5, 0x57, 0x89, 0xd9, 0x2f, 0x96, 0x3a, - 0x5b, 0xd4, 0x38, 0x87, 0x9a, 0x11, 0x0d, 0x79, 0x56, 0x9a, 0x36, 0x2f, 0x2c, 0xbb, 0xe1, 0x9a, - 0xe7, 0xca, 0x9d, 0x3c, 0x57, 0xee, 0x89, 0x7a, 0xae, 0xc8, 0x35, 0xac, 0x16, 0x12, 0x21, 0xce, - 0x22, 0xae, 0x9d, 0xb0, 0xbc, 0xd1, 0x7b, 0x5a, 0x3b, 0x7e, 0x77, 0xbc, 0x31, 0x27, 0x96, 0x6e, - 0xff, 0xfa, 0x35, 0x8b, 0xe4, 0xcd, 0xa8, 0xaf, 0x30, 0xbc, 0x02, 0xc3, 0x9b, 0x62, 0x78, 0x34, - 0x8e, 0x30, 0x95, 0x1e, 0xe3, 0xfa, 0x35, 0x9e, 0xf9, 0x95, 0xd9, 0xaf, 0xe9, 0xbe, 0x0f, 0x7f, - 0x06, 0x00, 0x00, 0xff, 0xff, 0x57, 0x8d, 0x4b, 0x26, 0xab, 0x07, 0x00, 0x00, -} diff --git a/client/gogrpc/conductor/grpc/tasks/task_service.pb.go b/client/gogrpc/conductor/grpc/tasks/task_service.pb.go new file mode 100644 index 0000000000..6c74f322a2 --- /dev/null +++ b/client/gogrpc/conductor/grpc/tasks/task_service.pb.go @@ -0,0 +1,1757 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/task_service.proto + +package tasks // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type PollRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollRequest) Reset() { *m = PollRequest{} } +func (m *PollRequest) String() string { return proto.CompactTextString(m) } +func (*PollRequest) ProtoMessage() {} +func (*PollRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{0} +} +func (m *PollRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollRequest.Unmarshal(m, b) +} +func (m *PollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollRequest.Marshal(b, m, deterministic) +} +func (dst *PollRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollRequest.Merge(dst, src) +} +func (m *PollRequest) XXX_Size() int { + return xxx_messageInfo_PollRequest.Size(m) +} +func (m *PollRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PollRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PollRequest proto.InternalMessageInfo + +func (m *PollRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *PollRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *PollRequest) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +type PollResponse struct { + Task *model.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PollResponse) Reset() { *m = PollResponse{} } +func (m *PollResponse) String() string { return proto.CompactTextString(m) } +func (*PollResponse) ProtoMessage() {} +func (*PollResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{1} +} +func (m *PollResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PollResponse.Unmarshal(m, b) +} +func (m *PollResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PollResponse.Marshal(b, m, deterministic) +} +func (dst *PollResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PollResponse.Merge(dst, src) +} +func (m *PollResponse) XXX_Size() int { + return xxx_messageInfo_PollResponse.Size(m) +} +func (m *PollResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PollResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PollResponse proto.InternalMessageInfo + +func (m *PollResponse) GetTask() *model.Task { + if m != nil { + return m.Task + } + return nil +} + +type BatchPollRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` + Count int32 `protobuf:"varint,4,opt,name=count" json:"count,omitempty"` + Timeout int32 `protobuf:"varint,5,opt,name=timeout" json:"timeout,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *BatchPollRequest) Reset() { *m = BatchPollRequest{} } +func (m *BatchPollRequest) String() string { return proto.CompactTextString(m) } +func (*BatchPollRequest) ProtoMessage() {} +func (*BatchPollRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{2} +} +func (m *BatchPollRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_BatchPollRequest.Unmarshal(m, b) +} +func (m *BatchPollRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_BatchPollRequest.Marshal(b, m, deterministic) +} +func (dst *BatchPollRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_BatchPollRequest.Merge(dst, src) +} +func (m *BatchPollRequest) XXX_Size() int { + return xxx_messageInfo_BatchPollRequest.Size(m) +} +func (m *BatchPollRequest) XXX_DiscardUnknown() { + xxx_messageInfo_BatchPollRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_BatchPollRequest proto.InternalMessageInfo + +func (m *BatchPollRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *BatchPollRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +func (m *BatchPollRequest) GetDomain() string { + if m != nil { + return m.Domain + } + return "" +} + +func (m *BatchPollRequest) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +func (m *BatchPollRequest) GetTimeout() int32 { + if m != nil { + return m.Timeout + } + return 0 +} + +type TasksInProgressRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey" json:"start_key,omitempty"` + Count int32 `protobuf:"varint,3,opt,name=count" json:"count,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} } +func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } +func (*TasksInProgressRequest) ProtoMessage() {} +func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{3} +} +func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) +} +func (m *TasksInProgressRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TasksInProgressRequest.Marshal(b, m, deterministic) +} +func (dst *TasksInProgressRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TasksInProgressRequest.Merge(dst, src) +} +func (m *TasksInProgressRequest) XXX_Size() int { + return xxx_messageInfo_TasksInProgressRequest.Size(m) +} +func (m *TasksInProgressRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TasksInProgressRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TasksInProgressRequest proto.InternalMessageInfo + +func (m *TasksInProgressRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *TasksInProgressRequest) GetStartKey() string { + if m != nil { + return m.StartKey + } + return "" +} + +func (m *TasksInProgressRequest) GetCount() int32 { + if m != nil { + return m.Count + } + return 0 +} + +type TasksInProgressResponse struct { + Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse{} } +func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } +func (*TasksInProgressResponse) ProtoMessage() {} +func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{4} +} +func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) +} +func (m *TasksInProgressResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TasksInProgressResponse.Marshal(b, m, deterministic) +} +func (dst *TasksInProgressResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TasksInProgressResponse.Merge(dst, src) +} +func (m *TasksInProgressResponse) XXX_Size() int { + return xxx_messageInfo_TasksInProgressResponse.Size(m) +} +func (m *TasksInProgressResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TasksInProgressResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TasksInProgressResponse proto.InternalMessageInfo + +func (m *TasksInProgressResponse) GetTasks() []*model.Task { + if m != nil { + return m.Tasks + } + return nil +} + +type PendingTaskRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName" json:"task_ref_name,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } +func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } +func (*PendingTaskRequest) ProtoMessage() {} +func (*PendingTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{5} +} +func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) +} +func (m *PendingTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PendingTaskRequest.Marshal(b, m, deterministic) +} +func (dst *PendingTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PendingTaskRequest.Merge(dst, src) +} +func (m *PendingTaskRequest) XXX_Size() int { + return xxx_messageInfo_PendingTaskRequest.Size(m) +} +func (m *PendingTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PendingTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PendingTaskRequest proto.InternalMessageInfo + +func (m *PendingTaskRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *PendingTaskRequest) GetTaskRefName() string { + if m != nil { + return m.TaskRefName + } + return "" +} + +type PendingTaskResponse struct { + Task *model.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PendingTaskResponse) Reset() { *m = PendingTaskResponse{} } +func (m *PendingTaskResponse) String() string { return proto.CompactTextString(m) } +func (*PendingTaskResponse) ProtoMessage() {} +func (*PendingTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{6} +} +func (m *PendingTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PendingTaskResponse.Unmarshal(m, b) +} +func (m *PendingTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PendingTaskResponse.Marshal(b, m, deterministic) +} +func (dst *PendingTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PendingTaskResponse.Merge(dst, src) +} +func (m *PendingTaskResponse) XXX_Size() int { + return xxx_messageInfo_PendingTaskResponse.Size(m) +} +func (m *PendingTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PendingTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PendingTaskResponse proto.InternalMessageInfo + +func (m *PendingTaskResponse) GetTask() *model.Task { + if m != nil { + return m.Task + } + return nil +} + +type UpdateTaskRequest struct { + Result *model.TaskResult `protobuf:"bytes,1,opt,name=result" json:"result,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } +func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskRequest) ProtoMessage() {} +func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{7} +} +func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) +} +func (m *UpdateTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskRequest.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskRequest.Merge(dst, src) +} +func (m *UpdateTaskRequest) XXX_Size() int { + return xxx_messageInfo_UpdateTaskRequest.Size(m) +} +func (m *UpdateTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskRequest proto.InternalMessageInfo + +func (m *UpdateTaskRequest) GetResult() *model.TaskResult { + if m != nil { + return m.Result + } + return nil +} + +type UpdateTaskResponse struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } +func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } +func (*UpdateTaskResponse) ProtoMessage() {} +func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{8} +} +func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) +} +func (m *UpdateTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_UpdateTaskResponse.Marshal(b, m, deterministic) +} +func (dst *UpdateTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_UpdateTaskResponse.Merge(dst, src) +} +func (m *UpdateTaskResponse) XXX_Size() int { + return xxx_messageInfo_UpdateTaskResponse.Size(m) +} +func (m *UpdateTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_UpdateTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo + +func (m *UpdateTaskResponse) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type AckTaskRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } +func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } +func (*AckTaskRequest) ProtoMessage() {} +func (*AckTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{9} +} +func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) +} +func (m *AckTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AckTaskRequest.Marshal(b, m, deterministic) +} +func (dst *AckTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AckTaskRequest.Merge(dst, src) +} +func (m *AckTaskRequest) XXX_Size() int { + return xxx_messageInfo_AckTaskRequest.Size(m) +} +func (m *AckTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AckTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AckTaskRequest proto.InternalMessageInfo + +func (m *AckTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *AckTaskRequest) GetWorkerId() string { + if m != nil { + return m.WorkerId + } + return "" +} + +type AckTaskResponse struct { + Ack bool `protobuf:"varint,1,opt,name=ack" json:"ack,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } +func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } +func (*AckTaskResponse) ProtoMessage() {} +func (*AckTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{10} +} +func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) +} +func (m *AckTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AckTaskResponse.Marshal(b, m, deterministic) +} +func (dst *AckTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AckTaskResponse.Merge(dst, src) +} +func (m *AckTaskResponse) XXX_Size() int { + return xxx_messageInfo_AckTaskResponse.Size(m) +} +func (m *AckTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AckTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AckTaskResponse proto.InternalMessageInfo + +func (m *AckTaskResponse) GetAck() bool { + if m != nil { + return m.Ack + } + return false +} + +type AddLogRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + Log string `protobuf:"bytes,2,opt,name=log" json:"log,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } +func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } +func (*AddLogRequest) ProtoMessage() {} +func (*AddLogRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{11} +} +func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) +} +func (m *AddLogRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddLogRequest.Marshal(b, m, deterministic) +} +func (dst *AddLogRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddLogRequest.Merge(dst, src) +} +func (m *AddLogRequest) XXX_Size() int { + return xxx_messageInfo_AddLogRequest.Size(m) +} +func (m *AddLogRequest) XXX_DiscardUnknown() { + xxx_messageInfo_AddLogRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_AddLogRequest proto.InternalMessageInfo + +func (m *AddLogRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +func (m *AddLogRequest) GetLog() string { + if m != nil { + return m.Log + } + return "" +} + +type AddLogResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *AddLogResponse) Reset() { *m = AddLogResponse{} } +func (m *AddLogResponse) String() string { return proto.CompactTextString(m) } +func (*AddLogResponse) ProtoMessage() {} +func (*AddLogResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{12} +} +func (m *AddLogResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_AddLogResponse.Unmarshal(m, b) +} +func (m *AddLogResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_AddLogResponse.Marshal(b, m, deterministic) +} +func (dst *AddLogResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_AddLogResponse.Merge(dst, src) +} +func (m *AddLogResponse) XXX_Size() int { + return xxx_messageInfo_AddLogResponse.Size(m) +} +func (m *AddLogResponse) XXX_DiscardUnknown() { + xxx_messageInfo_AddLogResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_AddLogResponse proto.InternalMessageInfo + +type GetTaskLogsRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskLogsRequest) Reset() { *m = GetTaskLogsRequest{} } +func (m *GetTaskLogsRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskLogsRequest) ProtoMessage() {} +func (*GetTaskLogsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{13} +} +func (m *GetTaskLogsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskLogsRequest.Unmarshal(m, b) +} +func (m *GetTaskLogsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskLogsRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskLogsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskLogsRequest.Merge(dst, src) +} +func (m *GetTaskLogsRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskLogsRequest.Size(m) +} +func (m *GetTaskLogsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskLogsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskLogsRequest proto.InternalMessageInfo + +func (m *GetTaskLogsRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type GetTaskLogsResponse struct { + Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs" json:"logs,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskLogsResponse) Reset() { *m = GetTaskLogsResponse{} } +func (m *GetTaskLogsResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskLogsResponse) ProtoMessage() {} +func (*GetTaskLogsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{14} +} +func (m *GetTaskLogsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskLogsResponse.Unmarshal(m, b) +} +func (m *GetTaskLogsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskLogsResponse.Marshal(b, m, deterministic) +} +func (dst *GetTaskLogsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskLogsResponse.Merge(dst, src) +} +func (m *GetTaskLogsResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskLogsResponse.Size(m) +} +func (m *GetTaskLogsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskLogsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskLogsResponse proto.InternalMessageInfo + +func (m *GetTaskLogsResponse) GetLogs() []*model.TaskExecLog { + if m != nil { + return m.Logs + } + return nil +} + +type GetTaskRequest struct { + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } +func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } +func (*GetTaskRequest) ProtoMessage() {} +func (*GetTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{15} +} +func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) +} +func (m *GetTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskRequest.Marshal(b, m, deterministic) +} +func (dst *GetTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskRequest.Merge(dst, src) +} +func (m *GetTaskRequest) XXX_Size() int { + return xxx_messageInfo_GetTaskRequest.Size(m) +} +func (m *GetTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskRequest proto.InternalMessageInfo + +func (m *GetTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type GetTaskResponse struct { + Task *model.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } +func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } +func (*GetTaskResponse) ProtoMessage() {} +func (*GetTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{16} +} +func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) +} +func (m *GetTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetTaskResponse.Marshal(b, m, deterministic) +} +func (dst *GetTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetTaskResponse.Merge(dst, src) +} +func (m *GetTaskResponse) XXX_Size() int { + return xxx_messageInfo_GetTaskResponse.Size(m) +} +func (m *GetTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetTaskResponse proto.InternalMessageInfo + +func (m *GetTaskResponse) GetTask() *model.Task { + if m != nil { + return m.Task + } + return nil +} + +type RemoveTaskRequest struct { + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveTaskRequest) Reset() { *m = RemoveTaskRequest{} } +func (m *RemoveTaskRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveTaskRequest) ProtoMessage() {} +func (*RemoveTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{17} +} +func (m *RemoveTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveTaskRequest.Unmarshal(m, b) +} +func (m *RemoveTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveTaskRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveTaskRequest.Merge(dst, src) +} +func (m *RemoveTaskRequest) XXX_Size() int { + return xxx_messageInfo_RemoveTaskRequest.Size(m) +} +func (m *RemoveTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveTaskRequest proto.InternalMessageInfo + +func (m *RemoveTaskRequest) GetTaskType() string { + if m != nil { + return m.TaskType + } + return "" +} + +func (m *RemoveTaskRequest) GetTaskId() string { + if m != nil { + return m.TaskId + } + return "" +} + +type RemoveTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveTaskResponse) Reset() { *m = RemoveTaskResponse{} } +func (m *RemoveTaskResponse) String() string { return proto.CompactTextString(m) } +func (*RemoveTaskResponse) ProtoMessage() {} +func (*RemoveTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{18} +} +func (m *RemoveTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveTaskResponse.Unmarshal(m, b) +} +func (m *RemoveTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveTaskResponse.Marshal(b, m, deterministic) +} +func (dst *RemoveTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveTaskResponse.Merge(dst, src) +} +func (m *RemoveTaskResponse) XXX_Size() int { + return xxx_messageInfo_RemoveTaskResponse.Size(m) +} +func (m *RemoveTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveTaskResponse proto.InternalMessageInfo + +type QueueSizesRequest struct { + TaskTypes []string `protobuf:"bytes,1,rep,name=task_types,json=taskTypes" json:"task_types,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueSizesRequest) Reset() { *m = QueueSizesRequest{} } +func (m *QueueSizesRequest) String() string { return proto.CompactTextString(m) } +func (*QueueSizesRequest) ProtoMessage() {} +func (*QueueSizesRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{19} +} +func (m *QueueSizesRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueSizesRequest.Unmarshal(m, b) +} +func (m *QueueSizesRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueSizesRequest.Marshal(b, m, deterministic) +} +func (dst *QueueSizesRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueSizesRequest.Merge(dst, src) +} +func (m *QueueSizesRequest) XXX_Size() int { + return xxx_messageInfo_QueueSizesRequest.Size(m) +} +func (m *QueueSizesRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueueSizesRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueSizesRequest proto.InternalMessageInfo + +func (m *QueueSizesRequest) GetTaskTypes() []string { + if m != nil { + return m.TaskTypes + } + return nil +} + +type QueueSizesResponse struct { + QueueForTask map[string]int32 `protobuf:"bytes,1,rep,name=queue_for_task,json=queueForTask" json:"queue_for_task,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueSizesResponse) Reset() { *m = QueueSizesResponse{} } +func (m *QueueSizesResponse) String() string { return proto.CompactTextString(m) } +func (*QueueSizesResponse) ProtoMessage() {} +func (*QueueSizesResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{20} +} +func (m *QueueSizesResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueSizesResponse.Unmarshal(m, b) +} +func (m *QueueSizesResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueSizesResponse.Marshal(b, m, deterministic) +} +func (dst *QueueSizesResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueSizesResponse.Merge(dst, src) +} +func (m *QueueSizesResponse) XXX_Size() int { + return xxx_messageInfo_QueueSizesResponse.Size(m) +} +func (m *QueueSizesResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueueSizesResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueSizesResponse proto.InternalMessageInfo + +func (m *QueueSizesResponse) GetQueueForTask() map[string]int32 { + if m != nil { + return m.QueueForTask + } + return nil +} + +type QueueInfoRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueInfoRequest) Reset() { *m = QueueInfoRequest{} } +func (m *QueueInfoRequest) String() string { return proto.CompactTextString(m) } +func (*QueueInfoRequest) ProtoMessage() {} +func (*QueueInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{21} +} +func (m *QueueInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueInfoRequest.Unmarshal(m, b) +} +func (m *QueueInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueInfoRequest.Marshal(b, m, deterministic) +} +func (dst *QueueInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueInfoRequest.Merge(dst, src) +} +func (m *QueueInfoRequest) XXX_Size() int { + return xxx_messageInfo_QueueInfoRequest.Size(m) +} +func (m *QueueInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueueInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueInfoRequest proto.InternalMessageInfo + +type QueueInfoResponse struct { + Queues map[string]int64 `protobuf:"bytes,1,rep,name=queues" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueInfoResponse) Reset() { *m = QueueInfoResponse{} } +func (m *QueueInfoResponse) String() string { return proto.CompactTextString(m) } +func (*QueueInfoResponse) ProtoMessage() {} +func (*QueueInfoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{22} +} +func (m *QueueInfoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueInfoResponse.Unmarshal(m, b) +} +func (m *QueueInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueInfoResponse.Marshal(b, m, deterministic) +} +func (dst *QueueInfoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueInfoResponse.Merge(dst, src) +} +func (m *QueueInfoResponse) XXX_Size() int { + return xxx_messageInfo_QueueInfoResponse.Size(m) +} +func (m *QueueInfoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueueInfoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueInfoResponse proto.InternalMessageInfo + +func (m *QueueInfoResponse) GetQueues() map[string]int64 { + if m != nil { + return m.Queues + } + return nil +} + +type QueueAllInfoRequest struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoRequest) Reset() { *m = QueueAllInfoRequest{} } +func (m *QueueAllInfoRequest) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoRequest) ProtoMessage() {} +func (*QueueAllInfoRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{23} +} +func (m *QueueAllInfoRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoRequest.Unmarshal(m, b) +} +func (m *QueueAllInfoRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoRequest.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoRequest.Merge(dst, src) +} +func (m *QueueAllInfoRequest) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoRequest.Size(m) +} +func (m *QueueAllInfoRequest) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoRequest proto.InternalMessageInfo + +type QueueAllInfoResponse struct { + Queues map[string]*QueueAllInfoResponse_QueueInfo `protobuf:"bytes,1,rep,name=queues" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoResponse) Reset() { *m = QueueAllInfoResponse{} } +func (m *QueueAllInfoResponse) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoResponse) ProtoMessage() {} +func (*QueueAllInfoResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{24} +} +func (m *QueueAllInfoResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoResponse.Unmarshal(m, b) +} +func (m *QueueAllInfoResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoResponse.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoResponse.Merge(dst, src) +} +func (m *QueueAllInfoResponse) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoResponse.Size(m) +} +func (m *QueueAllInfoResponse) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoResponse proto.InternalMessageInfo + +func (m *QueueAllInfoResponse) GetQueues() map[string]*QueueAllInfoResponse_QueueInfo { + if m != nil { + return m.Queues + } + return nil +} + +type QueueAllInfoResponse_ShardInfo struct { + Size int64 `protobuf:"varint,1,opt,name=size" json:"size,omitempty"` + Uacked int64 `protobuf:"varint,2,opt,name=uacked" json:"uacked,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoResponse_ShardInfo) Reset() { *m = QueueAllInfoResponse_ShardInfo{} } +func (m *QueueAllInfoResponse_ShardInfo) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoResponse_ShardInfo) ProtoMessage() {} +func (*QueueAllInfoResponse_ShardInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 0} +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Unmarshal(m, b) +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoResponse_ShardInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Merge(dst, src) +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Size(m) +} +func (m *QueueAllInfoResponse_ShardInfo) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoResponse_ShardInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoResponse_ShardInfo proto.InternalMessageInfo + +func (m *QueueAllInfoResponse_ShardInfo) GetSize() int64 { + if m != nil { + return m.Size + } + return 0 +} + +func (m *QueueAllInfoResponse_ShardInfo) GetUacked() int64 { + if m != nil { + return m.Uacked + } + return 0 +} + +type QueueAllInfoResponse_QueueInfo struct { + Shards map[string]*QueueAllInfoResponse_ShardInfo `protobuf:"bytes,1,rep,name=shards" json:"shards,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *QueueAllInfoResponse_QueueInfo) Reset() { *m = QueueAllInfoResponse_QueueInfo{} } +func (m *QueueAllInfoResponse_QueueInfo) String() string { return proto.CompactTextString(m) } +func (*QueueAllInfoResponse_QueueInfo) ProtoMessage() {} +func (*QueueAllInfoResponse_QueueInfo) Descriptor() ([]byte, []int) { + return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 1} +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Unmarshal(m, b) +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Marshal(b, m, deterministic) +} +func (dst *QueueAllInfoResponse_QueueInfo) XXX_Merge(src proto.Message) { + xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Merge(dst, src) +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_Size() int { + return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Size(m) +} +func (m *QueueAllInfoResponse_QueueInfo) XXX_DiscardUnknown() { + xxx_messageInfo_QueueAllInfoResponse_QueueInfo.DiscardUnknown(m) +} + +var xxx_messageInfo_QueueAllInfoResponse_QueueInfo proto.InternalMessageInfo + +func (m *QueueAllInfoResponse_QueueInfo) GetShards() map[string]*QueueAllInfoResponse_ShardInfo { + if m != nil { + return m.Shards + } + return nil +} + +func init() { + proto.RegisterType((*PollRequest)(nil), "conductor.grpc.tasks.PollRequest") + proto.RegisterType((*PollResponse)(nil), "conductor.grpc.tasks.PollResponse") + proto.RegisterType((*BatchPollRequest)(nil), "conductor.grpc.tasks.BatchPollRequest") + proto.RegisterType((*TasksInProgressRequest)(nil), "conductor.grpc.tasks.TasksInProgressRequest") + proto.RegisterType((*TasksInProgressResponse)(nil), "conductor.grpc.tasks.TasksInProgressResponse") + proto.RegisterType((*PendingTaskRequest)(nil), "conductor.grpc.tasks.PendingTaskRequest") + proto.RegisterType((*PendingTaskResponse)(nil), "conductor.grpc.tasks.PendingTaskResponse") + proto.RegisterType((*UpdateTaskRequest)(nil), "conductor.grpc.tasks.UpdateTaskRequest") + proto.RegisterType((*UpdateTaskResponse)(nil), "conductor.grpc.tasks.UpdateTaskResponse") + proto.RegisterType((*AckTaskRequest)(nil), "conductor.grpc.tasks.AckTaskRequest") + proto.RegisterType((*AckTaskResponse)(nil), "conductor.grpc.tasks.AckTaskResponse") + proto.RegisterType((*AddLogRequest)(nil), "conductor.grpc.tasks.AddLogRequest") + proto.RegisterType((*AddLogResponse)(nil), "conductor.grpc.tasks.AddLogResponse") + proto.RegisterType((*GetTaskLogsRequest)(nil), "conductor.grpc.tasks.GetTaskLogsRequest") + proto.RegisterType((*GetTaskLogsResponse)(nil), "conductor.grpc.tasks.GetTaskLogsResponse") + proto.RegisterType((*GetTaskRequest)(nil), "conductor.grpc.tasks.GetTaskRequest") + proto.RegisterType((*GetTaskResponse)(nil), "conductor.grpc.tasks.GetTaskResponse") + proto.RegisterType((*RemoveTaskRequest)(nil), "conductor.grpc.tasks.RemoveTaskRequest") + proto.RegisterType((*RemoveTaskResponse)(nil), "conductor.grpc.tasks.RemoveTaskResponse") + proto.RegisterType((*QueueSizesRequest)(nil), "conductor.grpc.tasks.QueueSizesRequest") + proto.RegisterType((*QueueSizesResponse)(nil), "conductor.grpc.tasks.QueueSizesResponse") + proto.RegisterMapType((map[string]int32)(nil), "conductor.grpc.tasks.QueueSizesResponse.QueueForTaskEntry") + proto.RegisterType((*QueueInfoRequest)(nil), "conductor.grpc.tasks.QueueInfoRequest") + proto.RegisterType((*QueueInfoResponse)(nil), "conductor.grpc.tasks.QueueInfoResponse") + proto.RegisterMapType((map[string]int64)(nil), "conductor.grpc.tasks.QueueInfoResponse.QueuesEntry") + proto.RegisterType((*QueueAllInfoRequest)(nil), "conductor.grpc.tasks.QueueAllInfoRequest") + proto.RegisterType((*QueueAllInfoResponse)(nil), "conductor.grpc.tasks.QueueAllInfoResponse") + proto.RegisterMapType((map[string]*QueueAllInfoResponse_QueueInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueuesEntry") + proto.RegisterType((*QueueAllInfoResponse_ShardInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.ShardInfo") + proto.RegisterType((*QueueAllInfoResponse_QueueInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueueInfo") + proto.RegisterMapType((map[string]*QueueAllInfoResponse_ShardInfo)(nil), "conductor.grpc.tasks.QueueAllInfoResponse.QueueInfo.ShardsEntry") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// TaskServiceClient is the client API for TaskService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type TaskServiceClient interface { + // GET /poll/{tasktype} + Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*PollResponse, error) + // /poll/batch/{tasktype} + BatchPoll(ctx context.Context, in *BatchPollRequest, opts ...grpc.CallOption) (TaskService_BatchPollClient, error) + // GET /in_progress/{tasktype} + GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) + // GET /in_progress/{workflowId}/{taskRefName} + GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*PendingTaskResponse, error) + // POST / + UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) + // POST /{taskId}/ack + AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) + // POST /{taskId}/log + AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*AddLogResponse, error) + // GET {taskId}/log + GetTaskLogs(ctx context.Context, in *GetTaskLogsRequest, opts ...grpc.CallOption) (*GetTaskLogsResponse, error) + // GET /{taskId} + GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) + // DELETE /queue/{taskType}/{taskId} + RemoveTaskFromQueue(ctx context.Context, in *RemoveTaskRequest, opts ...grpc.CallOption) (*RemoveTaskResponse, error) + // GET /queue/sizes + GetQueueSizesForTasks(ctx context.Context, in *QueueSizesRequest, opts ...grpc.CallOption) (*QueueSizesResponse, error) + // GET /queue/all + GetQueueInfo(ctx context.Context, in *QueueInfoRequest, opts ...grpc.CallOption) (*QueueInfoResponse, error) + // GET /queue/all/verbose + GetQueueAllInfo(ctx context.Context, in *QueueAllInfoRequest, opts ...grpc.CallOption) (*QueueAllInfoResponse, error) +} + +type taskServiceClient struct { + cc *grpc.ClientConn +} + +func NewTaskServiceClient(cc *grpc.ClientConn) TaskServiceClient { + return &taskServiceClient{cc} +} + +func (c *taskServiceClient) Poll(ctx context.Context, in *PollRequest, opts ...grpc.CallOption) (*PollResponse, error) { + out := new(PollResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/Poll", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) BatchPoll(ctx context.Context, in *BatchPollRequest, opts ...grpc.CallOption) (TaskService_BatchPollClient, error) { + stream, err := c.cc.NewStream(ctx, &_TaskService_serviceDesc.Streams[0], "/conductor.grpc.tasks.TaskService/BatchPoll", opts...) + if err != nil { + return nil, err + } + x := &taskServiceBatchPollClient{stream} + if err := x.ClientStream.SendMsg(in); err != nil { + return nil, err + } + if err := x.ClientStream.CloseSend(); err != nil { + return nil, err + } + return x, nil +} + +type TaskService_BatchPollClient interface { + Recv() (*model.Task, error) + grpc.ClientStream +} + +type taskServiceBatchPollClient struct { + grpc.ClientStream +} + +func (x *taskServiceBatchPollClient) Recv() (*model.Task, error) { + m := new(model.Task) + if err := x.ClientStream.RecvMsg(m); err != nil { + return nil, err + } + return m, nil +} + +func (c *taskServiceClient) GetTasksInProgress(ctx context.Context, in *TasksInProgressRequest, opts ...grpc.CallOption) (*TasksInProgressResponse, error) { + out := new(TasksInProgressResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTasksInProgress", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetPendingTaskForWorkflow(ctx context.Context, in *PendingTaskRequest, opts ...grpc.CallOption) (*PendingTaskResponse, error) { + out := new(PendingTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetPendingTaskForWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) UpdateTask(ctx context.Context, in *UpdateTaskRequest, opts ...grpc.CallOption) (*UpdateTaskResponse, error) { + out := new(UpdateTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/UpdateTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) AckTask(ctx context.Context, in *AckTaskRequest, opts ...grpc.CallOption) (*AckTaskResponse, error) { + out := new(AckTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/AckTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) AddLog(ctx context.Context, in *AddLogRequest, opts ...grpc.CallOption) (*AddLogResponse, error) { + out := new(AddLogResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/AddLog", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetTaskLogs(ctx context.Context, in *GetTaskLogsRequest, opts ...grpc.CallOption) (*GetTaskLogsResponse, error) { + out := new(GetTaskLogsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTaskLogs", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetTask(ctx context.Context, in *GetTaskRequest, opts ...grpc.CallOption) (*GetTaskResponse, error) { + out := new(GetTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetTask", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) RemoveTaskFromQueue(ctx context.Context, in *RemoveTaskRequest, opts ...grpc.CallOption) (*RemoveTaskResponse, error) { + out := new(RemoveTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/RemoveTaskFromQueue", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetQueueSizesForTasks(ctx context.Context, in *QueueSizesRequest, opts ...grpc.CallOption) (*QueueSizesResponse, error) { + out := new(QueueSizesResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueSizesForTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetQueueInfo(ctx context.Context, in *QueueInfoRequest, opts ...grpc.CallOption) (*QueueInfoResponse, error) { + out := new(QueueInfoResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *taskServiceClient) GetQueueAllInfo(ctx context.Context, in *QueueAllInfoRequest, opts ...grpc.CallOption) (*QueueAllInfoResponse, error) { + out := new(QueueAllInfoResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.tasks.TaskService/GetQueueAllInfo", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// TaskServiceServer is the server API for TaskService service. +type TaskServiceServer interface { + // GET /poll/{tasktype} + Poll(context.Context, *PollRequest) (*PollResponse, error) + // /poll/batch/{tasktype} + BatchPoll(*BatchPollRequest, TaskService_BatchPollServer) error + // GET /in_progress/{tasktype} + GetTasksInProgress(context.Context, *TasksInProgressRequest) (*TasksInProgressResponse, error) + // GET /in_progress/{workflowId}/{taskRefName} + GetPendingTaskForWorkflow(context.Context, *PendingTaskRequest) (*PendingTaskResponse, error) + // POST / + UpdateTask(context.Context, *UpdateTaskRequest) (*UpdateTaskResponse, error) + // POST /{taskId}/ack + AckTask(context.Context, *AckTaskRequest) (*AckTaskResponse, error) + // POST /{taskId}/log + AddLog(context.Context, *AddLogRequest) (*AddLogResponse, error) + // GET {taskId}/log + GetTaskLogs(context.Context, *GetTaskLogsRequest) (*GetTaskLogsResponse, error) + // GET /{taskId} + GetTask(context.Context, *GetTaskRequest) (*GetTaskResponse, error) + // DELETE /queue/{taskType}/{taskId} + RemoveTaskFromQueue(context.Context, *RemoveTaskRequest) (*RemoveTaskResponse, error) + // GET /queue/sizes + GetQueueSizesForTasks(context.Context, *QueueSizesRequest) (*QueueSizesResponse, error) + // GET /queue/all + GetQueueInfo(context.Context, *QueueInfoRequest) (*QueueInfoResponse, error) + // GET /queue/all/verbose + GetQueueAllInfo(context.Context, *QueueAllInfoRequest) (*QueueAllInfoResponse, error) +} + +func RegisterTaskServiceServer(s *grpc.Server, srv TaskServiceServer) { + s.RegisterService(&_TaskService_serviceDesc, srv) +} + +func _TaskService_Poll_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PollRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).Poll(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/Poll", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).Poll(ctx, req.(*PollRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_BatchPoll_Handler(srv interface{}, stream grpc.ServerStream) error { + m := new(BatchPollRequest) + if err := stream.RecvMsg(m); err != nil { + return err + } + return srv.(TaskServiceServer).BatchPoll(m, &taskServiceBatchPollServer{stream}) +} + +type TaskService_BatchPollServer interface { + Send(*model.Task) error + grpc.ServerStream +} + +type taskServiceBatchPollServer struct { + grpc.ServerStream +} + +func (x *taskServiceBatchPollServer) Send(m *model.Task) error { + return x.ServerStream.SendMsg(m) +} + +func _TaskService_GetTasksInProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TasksInProgressRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTasksInProgress(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetTasksInProgress", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTasksInProgress(ctx, req.(*TasksInProgressRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetPendingTaskForWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PendingTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetPendingTaskForWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetPendingTaskForWorkflow(ctx, req.(*PendingTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_UpdateTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(UpdateTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).UpdateTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/UpdateTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).UpdateTask(ctx, req.(*UpdateTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_AckTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AckTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).AckTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/AckTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).AckTask(ctx, req.(*AckTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_AddLog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(AddLogRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).AddLog(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/AddLog", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).AddLog(ctx, req.(*AddLogRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetTaskLogs_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskLogsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTaskLogs(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetTaskLogs", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTaskLogs(ctx, req.(*GetTaskLogsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetTask_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetTask(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetTask", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetTask(ctx, req.(*GetTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_RemoveTaskFromQueue_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).RemoveTaskFromQueue(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/RemoveTaskFromQueue", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).RemoveTaskFromQueue(ctx, req.(*RemoveTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetQueueSizesForTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueueSizesRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetQueueSizesForTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueSizesForTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetQueueSizesForTasks(ctx, req.(*QueueSizesRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetQueueInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueueInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetQueueInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetQueueInfo(ctx, req.(*QueueInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _TaskService_GetQueueAllInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(QueueAllInfoRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(TaskServiceServer).GetQueueAllInfo(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.tasks.TaskService/GetQueueAllInfo", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(TaskServiceServer).GetQueueAllInfo(ctx, req.(*QueueAllInfoRequest)) + } + return interceptor(ctx, in, info, handler) +} + +var _TaskService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.tasks.TaskService", + HandlerType: (*TaskServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "Poll", + Handler: _TaskService_Poll_Handler, + }, + { + MethodName: "GetTasksInProgress", + Handler: _TaskService_GetTasksInProgress_Handler, + }, + { + MethodName: "GetPendingTaskForWorkflow", + Handler: _TaskService_GetPendingTaskForWorkflow_Handler, + }, + { + MethodName: "UpdateTask", + Handler: _TaskService_UpdateTask_Handler, + }, + { + MethodName: "AckTask", + Handler: _TaskService_AckTask_Handler, + }, + { + MethodName: "AddLog", + Handler: _TaskService_AddLog_Handler, + }, + { + MethodName: "GetTaskLogs", + Handler: _TaskService_GetTaskLogs_Handler, + }, + { + MethodName: "GetTask", + Handler: _TaskService_GetTask_Handler, + }, + { + MethodName: "RemoveTaskFromQueue", + Handler: _TaskService_RemoveTaskFromQueue_Handler, + }, + { + MethodName: "GetQueueSizesForTasks", + Handler: _TaskService_GetQueueSizesForTasks_Handler, + }, + { + MethodName: "GetQueueInfo", + Handler: _TaskService_GetQueueInfo_Handler, + }, + { + MethodName: "GetQueueAllInfo", + Handler: _TaskService_GetQueueAllInfo_Handler, + }, + }, + Streams: []grpc.StreamDesc{ + { + StreamName: "BatchPoll", + Handler: _TaskService_BatchPoll_Handler, + ServerStreams: true, + }, + }, + Metadata: "grpc/task_service.proto", +} + +func init() { + proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_2cd893b942ad08bb) +} + +var fileDescriptor_task_service_2cd893b942ad08bb = []byte{ + // 1114 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xdd, 0x72, 0xdb, 0x54, + 0x10, 0x1e, 0xc5, 0x89, 0x53, 0xaf, 0x93, 0xd4, 0x39, 0xf9, 0x33, 0x2a, 0x0c, 0x41, 0x2d, 0x6d, + 0x02, 0x54, 0xe9, 0x24, 0x0c, 0xd0, 0x0c, 0x33, 0x6d, 0x32, 0x43, 0x82, 0x69, 0xe8, 0x04, 0xa5, + 0x94, 0x9f, 0x1b, 0xa3, 0x48, 0xc7, 0xb2, 0xb0, 0xac, 0xe3, 0x48, 0x47, 0x69, 0xd2, 0xe7, 0xe0, + 0x8e, 0x07, 0xe1, 0x4d, 0x78, 0x07, 0x5e, 0x81, 0x2b, 0xe6, 0xfc, 0x48, 0x3a, 0x8e, 0x25, 0xdb, + 0x19, 0xa6, 0x77, 0x3a, 0xab, 0xdd, 0xfd, 0xbe, 0x5d, 0xad, 0xf7, 0x3b, 0x86, 0x0d, 0x2f, 0x1a, + 0x38, 0x3b, 0xd4, 0x8e, 0x7b, 0xed, 0x18, 0x47, 0x97, 0xbe, 0x83, 0xcd, 0x41, 0x44, 0x28, 0x41, + 0xab, 0x0e, 0x09, 0xdd, 0xc4, 0xa1, 0x24, 0x32, 0x99, 0x8b, 0xc9, 0x5c, 0x62, 0x7d, 0xa3, 0x4f, + 0x5c, 0x1c, 0x70, 0x7f, 0x7c, 0x85, 0x9d, 0x80, 0x78, 0xc2, 0x5d, 0x5f, 0xcf, 0x5f, 0x44, 0x38, + 0x4e, 0x02, 0x2a, 0xed, 0x8d, 0xdc, 0x2e, 0x2c, 0x46, 0x1b, 0xea, 0xa7, 0x24, 0x08, 0x2c, 0x7c, + 0x91, 0xe0, 0x98, 0xa2, 0x7b, 0x50, 0xe3, 0xe8, 0xf4, 0x7a, 0x80, 0x9b, 0xda, 0xa6, 0xb6, 0x55, + 0xb3, 0xee, 0x30, 0xc3, 0xab, 0xeb, 0x01, 0x66, 0x2f, 0xdf, 0x90, 0xa8, 0x87, 0xa3, 0xb6, 0xef, + 0x36, 0x67, 0xc4, 0x4b, 0x61, 0x68, 0xb9, 0x68, 0x1d, 0xaa, 0x2e, 0xe9, 0xdb, 0x7e, 0xd8, 0xac, + 0xf0, 0x37, 0xf2, 0x64, 0x3c, 0x85, 0x05, 0x01, 0x10, 0x0f, 0x48, 0x18, 0x63, 0xb4, 0x0d, 0xb3, + 0x2c, 0x21, 0x4f, 0x5e, 0xdf, 0x5d, 0x33, 0xf3, 0xc2, 0x38, 0x21, 0xf3, 0x95, 0x1d, 0xf7, 0x2c, + 0xee, 0x62, 0xfc, 0xa1, 0x41, 0xe3, 0xd0, 0xa6, 0x4e, 0xf7, 0x9d, 0x32, 0x44, 0xab, 0x30, 0xe7, + 0x90, 0x24, 0xa4, 0xcd, 0xd9, 0x4d, 0x6d, 0x6b, 0xce, 0x12, 0x07, 0xd4, 0x84, 0x79, 0xea, 0xf7, + 0x31, 0x49, 0x68, 0x73, 0x8e, 0xdb, 0xd3, 0xa3, 0xd1, 0x85, 0x75, 0x46, 0x32, 0x6e, 0x85, 0xa7, + 0x11, 0xf1, 0x22, 0x1c, 0xc7, 0xd3, 0x72, 0x8b, 0xa9, 0x1d, 0xd1, 0x76, 0x0f, 0x5f, 0xa7, 0xdc, + 0xb8, 0xe1, 0x05, 0xbe, 0xce, 0x39, 0x54, 0x14, 0x0e, 0xc6, 0x11, 0x6c, 0x8c, 0x20, 0xc9, 0x36, + 0x7e, 0x0a, 0x73, 0x7c, 0x06, 0x9a, 0xda, 0x66, 0xa5, 0xbc, 0x8f, 0xc2, 0xc7, 0xf8, 0x05, 0xd0, + 0x29, 0x0e, 0x5d, 0x3f, 0xf4, 0xb8, 0x55, 0xb2, 0xfd, 0x10, 0xea, 0xac, 0x37, 0x9d, 0x80, 0xbc, + 0x61, 0xed, 0x12, 0x7c, 0x21, 0x35, 0xb5, 0x5c, 0x64, 0xc0, 0x22, 0x2f, 0x27, 0xc2, 0x9d, 0x76, + 0x68, 0xf7, 0xb1, 0x64, 0x5d, 0xa7, 0x3c, 0x49, 0xe7, 0xa5, 0xdd, 0xc7, 0xc6, 0x73, 0x58, 0x19, + 0x4a, 0x7d, 0xfb, 0xaf, 0xfc, 0x2d, 0x2c, 0xff, 0x38, 0x70, 0x6d, 0x8a, 0x55, 0x6e, 0x7b, 0x50, + 0x15, 0x83, 0x2b, 0x33, 0xdc, 0x2b, 0xce, 0xc0, 0x5d, 0x2c, 0xe9, 0x6a, 0x3c, 0x06, 0xa4, 0x66, + 0x92, 0x54, 0x36, 0x60, 0x9e, 0x57, 0x91, 0x95, 0x58, 0x65, 0xc7, 0x96, 0x6b, 0x1c, 0xc1, 0xd2, + 0x81, 0xd3, 0x53, 0x51, 0xcb, 0x5c, 0xc7, 0xce, 0x95, 0x71, 0x1f, 0xee, 0x66, 0x79, 0x24, 0x66, + 0x03, 0x2a, 0xb6, 0x23, 0xaa, 0xbf, 0x63, 0xb1, 0x47, 0x63, 0x1f, 0x16, 0x0f, 0x5c, 0xf7, 0x84, + 0x78, 0x13, 0xb1, 0x1a, 0x50, 0x09, 0x88, 0x27, 0x51, 0xd8, 0xa3, 0xd1, 0x80, 0xa5, 0x34, 0x56, + 0xe4, 0x67, 0x95, 0x1e, 0x63, 0xca, 0x20, 0x4f, 0x88, 0x17, 0x4f, 0x4a, 0x69, 0x1c, 0xc3, 0xca, + 0x90, 0xbb, 0x64, 0xf9, 0x04, 0x66, 0x03, 0xe2, 0xa5, 0x23, 0xf4, 0x7e, 0x61, 0x8b, 0xbf, 0xb9, + 0xc2, 0x0e, 0x43, 0xe6, 0x9e, 0xc6, 0x36, 0x2c, 0xc9, 0x44, 0x13, 0x31, 0xbf, 0x86, 0xbb, 0x99, + 0xeb, 0xed, 0x87, 0xa2, 0x05, 0xcb, 0x16, 0xee, 0x93, 0xcb, 0xa1, 0xa1, 0x18, 0xfb, 0xf3, 0x52, + 0x88, 0xcc, 0x0c, 0x11, 0x59, 0x05, 0xa4, 0xa6, 0x92, 0x1d, 0xdc, 0x85, 0xe5, 0x1f, 0x12, 0x9c, + 0xe0, 0x33, 0xff, 0x2d, 0xce, 0x1a, 0xf8, 0x01, 0x40, 0x06, 0x20, 0xda, 0x52, 0xb3, 0x6a, 0x29, + 0x42, 0x6c, 0xfc, 0xa5, 0x01, 0x52, 0x83, 0x64, 0x59, 0xbf, 0xc1, 0xd2, 0x05, 0xb3, 0xb6, 0x3b, + 0x24, 0x6a, 0xcb, 0x02, 0x59, 0x43, 0xf7, 0xcd, 0xa2, 0xa5, 0x6d, 0x8e, 0x66, 0x10, 0xa6, 0x23, + 0x12, 0xf1, 0x86, 0x87, 0x34, 0xba, 0xb6, 0x16, 0x2e, 0x14, 0x93, 0xfe, 0x4c, 0x92, 0x55, 0x5d, + 0xd8, 0x9c, 0xb0, 0x4d, 0x22, 0xfa, 0xc0, 0x1e, 0xd9, 0x12, 0xb9, 0xb4, 0x83, 0x44, 0xfc, 0x4e, + 0xe7, 0x2c, 0x71, 0xd8, 0x9f, 0xf9, 0x4a, 0x33, 0x10, 0x34, 0x78, 0x82, 0x56, 0xd8, 0x21, 0xb2, + 0x58, 0xe3, 0x4f, 0x4d, 0x66, 0x15, 0x46, 0x59, 0xcc, 0x0b, 0xa8, 0x72, 0xe8, 0x74, 0x2a, 0xf6, + 0xc6, 0x14, 0xa1, 0x06, 0x0a, 0x4b, 0x2c, 0xd8, 0xcb, 0x14, 0xfa, 0x53, 0xa8, 0x2b, 0xe6, 0x49, + 0x8c, 0x2b, 0x2a, 0xe3, 0x35, 0x58, 0xe1, 0xa1, 0x07, 0x41, 0xa0, 0x92, 0xfe, 0xa7, 0x02, 0xab, + 0xc3, 0x76, 0xc9, 0xfb, 0xe5, 0x0d, 0xde, 0x5f, 0x8c, 0xe1, 0x7d, 0x23, 0xb6, 0x90, 0xfa, 0x97, + 0x50, 0x3b, 0xeb, 0xda, 0x91, 0xcb, 0x1c, 0x11, 0x82, 0xd9, 0xd8, 0x7f, 0x2b, 0x66, 0xae, 0x62, + 0xf1, 0x67, 0xa6, 0x26, 0x89, 0xed, 0xf4, 0xb0, 0x2b, 0xb9, 0xcb, 0x93, 0xfe, 0xb7, 0x06, 0xb5, + 0xac, 0x3b, 0xe8, 0x67, 0xa8, 0xc6, 0x2c, 0x4d, 0x4a, 0xeb, 0xf9, 0x6d, 0x69, 0x31, 0x8b, 0xc9, + 0x99, 0xa4, 0x04, 0x45, 0x3e, 0x9d, 0x40, 0x5d, 0x31, 0x17, 0xf4, 0xf6, 0x3b, 0xb5, 0xb7, 0xf5, + 0xdd, 0xcf, 0x6f, 0x81, 0x9c, 0x55, 0xae, 0x7c, 0x11, 0x06, 0x38, 0xfe, 0x63, 0xfe, 0x0f, 0xc0, + 0x7c, 0x9c, 0x72, 0xc0, 0xdd, 0x7f, 0x6b, 0x50, 0x67, 0xe3, 0x7e, 0x26, 0x6e, 0x42, 0xe8, 0x7b, + 0x98, 0x65, 0x17, 0x01, 0xf4, 0x51, 0x71, 0x62, 0xe5, 0x92, 0xa0, 0x1b, 0xe3, 0x5c, 0xe4, 0xc4, + 0x9c, 0x40, 0x2d, 0xbb, 0x5c, 0xa0, 0x87, 0xc5, 0x01, 0x37, 0x6f, 0x1f, 0x7a, 0xf1, 0xd2, 0x7a, + 0xa2, 0xa1, 0x8b, 0x6c, 0x23, 0x2b, 0x6a, 0x8d, 0x3e, 0x2b, 0x4e, 0x5b, 0x7c, 0x7d, 0xd0, 0x1f, + 0x4f, 0xe9, 0x2d, 0x0b, 0x18, 0xc0, 0x7b, 0xc7, 0x98, 0x2a, 0xea, 0x7b, 0x44, 0xa2, 0x9f, 0xa4, + 0x7a, 0xa3, 0xad, 0x92, 0x0e, 0x8c, 0x5c, 0x03, 0xf4, 0xed, 0x29, 0x3c, 0x25, 0x62, 0x1b, 0x20, + 0x17, 0x58, 0xf4, 0xa8, 0x38, 0x70, 0x44, 0xcc, 0xf5, 0xad, 0xc9, 0x8e, 0x12, 0xe0, 0x35, 0xcc, + 0x4b, 0x29, 0x45, 0x0f, 0x8a, 0x83, 0x86, 0x15, 0x5b, 0xff, 0x78, 0x82, 0x97, 0xcc, 0x7b, 0x06, + 0x55, 0xa1, 0xa0, 0xe8, 0x7e, 0x49, 0x80, 0xaa, 0xcd, 0xfa, 0x83, 0xf1, 0x4e, 0x32, 0xe9, 0x39, + 0xd4, 0x15, 0x55, 0x2d, 0xeb, 0xf8, 0xa8, 0x4e, 0x97, 0x75, 0xbc, 0x48, 0xa2, 0x5f, 0xc3, 0xbc, + 0x34, 0x97, 0x35, 0x64, 0x58, 0x8f, 0xcb, 0x1a, 0x72, 0x53, 0x8a, 0xbb, 0xb0, 0x92, 0x8b, 0xe2, + 0x51, 0x44, 0xfa, 0xfc, 0x17, 0x58, 0xf6, 0x49, 0x47, 0xa4, 0xb8, 0xec, 0x93, 0x8e, 0x0a, 0x2d, + 0xfa, 0x1d, 0xd6, 0x8e, 0x31, 0xcd, 0x45, 0x4f, 0x6a, 0x58, 0x5c, 0x86, 0x35, 0xa2, 0xca, 0x65, + 0x58, 0x05, 0x4a, 0xdc, 0x86, 0x85, 0x14, 0x8b, 0x6f, 0xdf, 0x87, 0x13, 0xc5, 0x4b, 0x20, 0x3c, + 0x9a, 0x52, 0xe4, 0x50, 0x97, 0x5f, 0x6a, 0xd4, 0x15, 0x86, 0xb6, 0xa7, 0x59, 0x73, 0x02, 0xe6, + 0x93, 0xe9, 0x37, 0xe2, 0x61, 0x00, 0xba, 0x43, 0xfa, 0x66, 0x88, 0x69, 0x27, 0xf0, 0xaf, 0x6e, + 0x04, 0x1e, 0x2e, 0x2a, 0x7b, 0xf1, 0xf4, 0xfc, 0xd7, 0x67, 0x9e, 0x4f, 0xbb, 0xc9, 0xb9, 0xe9, + 0x90, 0xfe, 0x8e, 0x8c, 0xd8, 0xc9, 0x22, 0x76, 0x9c, 0xc0, 0xc7, 0x21, 0xdd, 0xf1, 0x08, 0xff, + 0x73, 0x99, 0xdb, 0xb3, 0xff, 0x9a, 0xf1, 0x79, 0x95, 0xef, 0xb2, 0xbd, 0xff, 0x02, 0x00, 0x00, + 0xff, 0xff, 0xc5, 0xda, 0xa9, 0x5e, 0x80, 0x0e, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/grpc/workflow_service.pb.go b/client/gogrpc/conductor/grpc/workflow_service.pb.go deleted file mode 100644 index 37f12229ff..0000000000 --- a/client/gogrpc/conductor/grpc/workflow_service.pb.go +++ /dev/null @@ -1,1171 +0,0 @@ -// Code generated by protoc-gen-go. DO NOT EDIT. -// source: grpc/workflow_service.proto - -package grpc // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc" - -import proto "github.com/golang/protobuf/proto" -import fmt "fmt" -import math "math" -import empty "github.com/golang/protobuf/ptypes/empty" -import model "github.com/netflix/conductor/client/gogrpc/conductor/model" - -import ( - context "golang.org/x/net/context" - grpc "google.golang.org/grpc" -) - -// Reference imports to suppress errors if they are not otherwise used. -var _ = proto.Marshal -var _ = fmt.Errorf -var _ = math.Inf - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the proto package it is being compiled against. -// A compilation error at this line likely means your copy of the -// proto package needs to be updated. -const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package - -type GetWorkflowsRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed" json:"include_closed,omitempty"` - IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowsRequest) Reset() { *m = GetWorkflowsRequest{} } -func (m *GetWorkflowsRequest) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowsRequest) ProtoMessage() {} -func (*GetWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{0} -} -func (m *GetWorkflowsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowsRequest.Unmarshal(m, b) -} -func (m *GetWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowsRequest.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowsRequest.Merge(dst, src) -} -func (m *GetWorkflowsRequest) XXX_Size() int { - return xxx_messageInfo_GetWorkflowsRequest.Size(m) -} -func (m *GetWorkflowsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowsRequest proto.InternalMessageInfo - -func (m *GetWorkflowsRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetWorkflowsRequest) GetCorrelationId() []string { - if m != nil { - return m.CorrelationId - } - return nil -} - -func (m *GetWorkflowsRequest) GetIncludeClosed() bool { - if m != nil { - return m.IncludeClosed - } - return false -} - -func (m *GetWorkflowsRequest) GetIncludeTasks() bool { - if m != nil { - return m.IncludeTasks - } - return false -} - -type GetWorkflowsResponse struct { - WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowsResponse) Reset() { *m = GetWorkflowsResponse{} } -func (m *GetWorkflowsResponse) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowsResponse) ProtoMessage() {} -func (*GetWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{1} -} -func (m *GetWorkflowsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowsResponse.Unmarshal(m, b) -} -func (m *GetWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowsResponse.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowsResponse.Merge(dst, src) -} -func (m *GetWorkflowsResponse) XXX_Size() int { - return xxx_messageInfo_GetWorkflowsResponse.Size(m) -} -func (m *GetWorkflowsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowsResponse proto.InternalMessageInfo - -func (m *GetWorkflowsResponse) GetWorkflowsById() map[string]*GetWorkflowsResponse_Workflows { - if m != nil { - return m.WorkflowsById - } - return nil -} - -type GetWorkflowsResponse_Workflows struct { - Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows" json:"workflows,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowsResponse_Workflows) Reset() { *m = GetWorkflowsResponse_Workflows{} } -func (m *GetWorkflowsResponse_Workflows) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowsResponse_Workflows) ProtoMessage() {} -func (*GetWorkflowsResponse_Workflows) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{1, 0} -} -func (m *GetWorkflowsResponse_Workflows) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowsResponse_Workflows.Unmarshal(m, b) -} -func (m *GetWorkflowsResponse_Workflows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowsResponse_Workflows.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowsResponse_Workflows) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowsResponse_Workflows.Merge(dst, src) -} -func (m *GetWorkflowsResponse_Workflows) XXX_Size() int { - return xxx_messageInfo_GetWorkflowsResponse_Workflows.Size(m) -} -func (m *GetWorkflowsResponse_Workflows) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowsResponse_Workflows.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowsResponse_Workflows proto.InternalMessageInfo - -func (m *GetWorkflowsResponse_Workflows) GetWorkflows() []*model.Workflow { - if m != nil { - return m.Workflows - } - return nil -} - -type GetWorkflowStatusRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetWorkflowStatusRequest) Reset() { *m = GetWorkflowStatusRequest{} } -func (m *GetWorkflowStatusRequest) String() string { return proto.CompactTextString(m) } -func (*GetWorkflowStatusRequest) ProtoMessage() {} -func (*GetWorkflowStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{2} -} -func (m *GetWorkflowStatusRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetWorkflowStatusRequest.Unmarshal(m, b) -} -func (m *GetWorkflowStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetWorkflowStatusRequest.Marshal(b, m, deterministic) -} -func (dst *GetWorkflowStatusRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetWorkflowStatusRequest.Merge(dst, src) -} -func (m *GetWorkflowStatusRequest) XXX_Size() int { - return xxx_messageInfo_GetWorkflowStatusRequest.Size(m) -} -func (m *GetWorkflowStatusRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetWorkflowStatusRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetWorkflowStatusRequest proto.InternalMessageInfo - -func (m *GetWorkflowStatusRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *GetWorkflowStatusRequest) GetIncludeTasks() bool { - if m != nil { - return m.IncludeTasks - } - return false -} - -type RemoveWorkflowRequest struct { - WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId" json:"workflod_id,omitempty"` - ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow" json:"archive_workflow,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *RemoveWorkflowRequest) Reset() { *m = RemoveWorkflowRequest{} } -func (m *RemoveWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*RemoveWorkflowRequest) ProtoMessage() {} -func (*RemoveWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{3} -} -func (m *RemoveWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_RemoveWorkflowRequest.Unmarshal(m, b) -} -func (m *RemoveWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_RemoveWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *RemoveWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_RemoveWorkflowRequest.Merge(dst, src) -} -func (m *RemoveWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_RemoveWorkflowRequest.Size(m) -} -func (m *RemoveWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_RemoveWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_RemoveWorkflowRequest proto.InternalMessageInfo - -func (m *RemoveWorkflowRequest) GetWorkflodId() string { - if m != nil { - return m.WorkflodId - } - return "" -} - -func (m *RemoveWorkflowRequest) GetArchiveWorkflow() bool { - if m != nil { - return m.ArchiveWorkflow - } - return false -} - -type GetRunningWorkflowsRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` - StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"` - EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetRunningWorkflowsRequest) Reset() { *m = GetRunningWorkflowsRequest{} } -func (m *GetRunningWorkflowsRequest) String() string { return proto.CompactTextString(m) } -func (*GetRunningWorkflowsRequest) ProtoMessage() {} -func (*GetRunningWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{4} -} -func (m *GetRunningWorkflowsRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetRunningWorkflowsRequest.Unmarshal(m, b) -} -func (m *GetRunningWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetRunningWorkflowsRequest.Marshal(b, m, deterministic) -} -func (dst *GetRunningWorkflowsRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetRunningWorkflowsRequest.Merge(dst, src) -} -func (m *GetRunningWorkflowsRequest) XXX_Size() int { - return xxx_messageInfo_GetRunningWorkflowsRequest.Size(m) -} -func (m *GetRunningWorkflowsRequest) XXX_DiscardUnknown() { - xxx_messageInfo_GetRunningWorkflowsRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_GetRunningWorkflowsRequest proto.InternalMessageInfo - -func (m *GetRunningWorkflowsRequest) GetName() string { - if m != nil { - return m.Name - } - return "" -} - -func (m *GetRunningWorkflowsRequest) GetVersion() int32 { - if m != nil { - return m.Version - } - return 0 -} - -func (m *GetRunningWorkflowsRequest) GetStartTime() int64 { - if m != nil { - return m.StartTime - } - return 0 -} - -func (m *GetRunningWorkflowsRequest) GetEndTime() int64 { - if m != nil { - return m.EndTime - } - return 0 -} - -type GetRunningWorkflowsResponse struct { - WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds" json:"workflow_ids,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *GetRunningWorkflowsResponse) Reset() { *m = GetRunningWorkflowsResponse{} } -func (m *GetRunningWorkflowsResponse) String() string { return proto.CompactTextString(m) } -func (*GetRunningWorkflowsResponse) ProtoMessage() {} -func (*GetRunningWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{5} -} -func (m *GetRunningWorkflowsResponse) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_GetRunningWorkflowsResponse.Unmarshal(m, b) -} -func (m *GetRunningWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_GetRunningWorkflowsResponse.Marshal(b, m, deterministic) -} -func (dst *GetRunningWorkflowsResponse) XXX_Merge(src proto.Message) { - xxx_messageInfo_GetRunningWorkflowsResponse.Merge(dst, src) -} -func (m *GetRunningWorkflowsResponse) XXX_Size() int { - return xxx_messageInfo_GetRunningWorkflowsResponse.Size(m) -} -func (m *GetRunningWorkflowsResponse) XXX_DiscardUnknown() { - xxx_messageInfo_GetRunningWorkflowsResponse.DiscardUnknown(m) -} - -var xxx_messageInfo_GetRunningWorkflowsResponse proto.InternalMessageInfo - -func (m *GetRunningWorkflowsResponse) GetWorkflowIds() []string { - if m != nil { - return m.WorkflowIds - } - return nil -} - -type WorkflowId struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *WorkflowId) Reset() { *m = WorkflowId{} } -func (m *WorkflowId) String() string { return proto.CompactTextString(m) } -func (*WorkflowId) ProtoMessage() {} -func (*WorkflowId) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{6} -} -func (m *WorkflowId) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_WorkflowId.Unmarshal(m, b) -} -func (m *WorkflowId) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_WorkflowId.Marshal(b, m, deterministic) -} -func (dst *WorkflowId) XXX_Merge(src proto.Message) { - xxx_messageInfo_WorkflowId.Merge(dst, src) -} -func (m *WorkflowId) XXX_Size() int { - return xxx_messageInfo_WorkflowId.Size(m) -} -func (m *WorkflowId) XXX_DiscardUnknown() { - xxx_messageInfo_WorkflowId.DiscardUnknown(m) -} - -var xxx_messageInfo_WorkflowId proto.InternalMessageInfo - -func (m *WorkflowId) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -type SkipTaskRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName" json:"task_reference_name,omitempty"` - Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request" json:"request,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } -func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } -func (*SkipTaskRequest) ProtoMessage() {} -func (*SkipTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{7} -} -func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) -} -func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) -} -func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_SkipTaskRequest.Merge(dst, src) -} -func (m *SkipTaskRequest) XXX_Size() int { - return xxx_messageInfo_SkipTaskRequest.Size(m) -} -func (m *SkipTaskRequest) XXX_DiscardUnknown() { - xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo - -func (m *SkipTaskRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *SkipTaskRequest) GetTaskReferenceName() string { - if m != nil { - return m.TaskReferenceName - } - return "" -} - -func (m *SkipTaskRequest) GetRequest() *model.SkipTaskRequest { - if m != nil { - return m.Request - } - return nil -} - -type TerminateWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - Reason string `protobuf:"bytes,2,opt,name=reason" json:"reason,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *TerminateWorkflowRequest) Reset() { *m = TerminateWorkflowRequest{} } -func (m *TerminateWorkflowRequest) String() string { return proto.CompactTextString(m) } -func (*TerminateWorkflowRequest) ProtoMessage() {} -func (*TerminateWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_ebe287e56823ea2c, []int{8} -} -func (m *TerminateWorkflowRequest) XXX_Unmarshal(b []byte) error { - return xxx_messageInfo_TerminateWorkflowRequest.Unmarshal(m, b) -} -func (m *TerminateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - return xxx_messageInfo_TerminateWorkflowRequest.Marshal(b, m, deterministic) -} -func (dst *TerminateWorkflowRequest) XXX_Merge(src proto.Message) { - xxx_messageInfo_TerminateWorkflowRequest.Merge(dst, src) -} -func (m *TerminateWorkflowRequest) XXX_Size() int { - return xxx_messageInfo_TerminateWorkflowRequest.Size(m) -} -func (m *TerminateWorkflowRequest) XXX_DiscardUnknown() { - xxx_messageInfo_TerminateWorkflowRequest.DiscardUnknown(m) -} - -var xxx_messageInfo_TerminateWorkflowRequest proto.InternalMessageInfo - -func (m *TerminateWorkflowRequest) GetWorkflowId() string { - if m != nil { - return m.WorkflowId - } - return "" -} - -func (m *TerminateWorkflowRequest) GetReason() string { - if m != nil { - return m.Reason - } - return "" -} - -func init() { - proto.RegisterType((*GetWorkflowsRequest)(nil), "com.netflix.conductor.grpc.GetWorkflowsRequest") - proto.RegisterType((*GetWorkflowsResponse)(nil), "com.netflix.conductor.grpc.GetWorkflowsResponse") - proto.RegisterMapType((map[string]*GetWorkflowsResponse_Workflows)(nil), "com.netflix.conductor.grpc.GetWorkflowsResponse.WorkflowsByIdEntry") - proto.RegisterType((*GetWorkflowsResponse_Workflows)(nil), "com.netflix.conductor.grpc.GetWorkflowsResponse.Workflows") - proto.RegisterType((*GetWorkflowStatusRequest)(nil), "com.netflix.conductor.grpc.GetWorkflowStatusRequest") - proto.RegisterType((*RemoveWorkflowRequest)(nil), "com.netflix.conductor.grpc.RemoveWorkflowRequest") - proto.RegisterType((*GetRunningWorkflowsRequest)(nil), "com.netflix.conductor.grpc.GetRunningWorkflowsRequest") - proto.RegisterType((*GetRunningWorkflowsResponse)(nil), "com.netflix.conductor.grpc.GetRunningWorkflowsResponse") - proto.RegisterType((*WorkflowId)(nil), "com.netflix.conductor.grpc.WorkflowId") - proto.RegisterType((*SkipTaskRequest)(nil), "com.netflix.conductor.grpc.SkipTaskRequest") - proto.RegisterType((*TerminateWorkflowRequest)(nil), "com.netflix.conductor.grpc.TerminateWorkflowRequest") -} - -// Reference imports to suppress errors if they are not otherwise used. -var _ context.Context -var _ grpc.ClientConn - -// This is a compile-time assertion to ensure that this generated file -// is compatible with the grpc package it is being compiled against. -const _ = grpc.SupportPackageIsVersion4 - -// WorkflowServiceClient is the client API for WorkflowService service. -// -// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. -type WorkflowServiceClient interface { - // POST / - StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) - // GET /{name}/correlated/{correlationId} - GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) - // GET /{workflowId} - GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) - // DELETE /{workflodId}/remove - RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) - // GET /running/{name} - GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) - // PUT /decide/{workflowId} - DecideWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) - // PUT /{workflowId}/pause - PauseWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) - // PUT /{workflowId}/pause - ResumeWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) - // PUT /{workflowId}/skiptask/{taskReferenceName} - SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) - // POST /{workflowId}/rerun - RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) - // POST /{workflowId}/restart - RestartWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) - // POST /{workflowId}retry - RetryWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) - // POST /{workflowId}/resetcallbacks - ResetWorkflowCallbacks(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) - // DELETE /{workflowId} - TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) - // GET /search - Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) - SearchByTasks(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) -} - -type workflowServiceClient struct { - cc *grpc.ClientConn -} - -func NewWorkflowServiceClient(cc *grpc.ClientConn) WorkflowServiceClient { - return &workflowServiceClient{cc} -} - -func (c *workflowServiceClient) StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) { - out := new(WorkflowId) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/StartWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) { - out := new(GetWorkflowsResponse) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/GetWorkflows", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) { - out := new(model.Workflow) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/GetWorkflowStatus", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RemoveWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) { - out := new(GetRunningWorkflowsResponse) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/GetRunningWorkflows", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) DecideWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/DecideWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) PauseWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/PauseWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) ResumeWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/ResumeWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/SkipTaskFromWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*WorkflowId, error) { - out := new(WorkflowId) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RerunWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RestartWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RestartWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) RetryWorkflow(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/RetryWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) ResetWorkflowCallbacks(ctx context.Context, in *WorkflowId, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/ResetWorkflowCallbacks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*empty.Empty, error) { - out := new(empty.Empty) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/TerminateWorkflow", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) Search(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { - out := new(WorkflowSummarySearchResult) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/Search", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -func (c *workflowServiceClient) SearchByTasks(ctx context.Context, in *SearchRequest, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { - out := new(WorkflowSummarySearchResult) - err := c.cc.Invoke(ctx, "/com.netflix.conductor.grpc.WorkflowService/SearchByTasks", in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - -// WorkflowServiceServer is the server API for WorkflowService service. -type WorkflowServiceServer interface { - // POST / - StartWorkflow(context.Context, *model.StartWorkflowRequest) (*WorkflowId, error) - // GET /{name}/correlated/{correlationId} - GetWorkflows(context.Context, *GetWorkflowsRequest) (*GetWorkflowsResponse, error) - // GET /{workflowId} - GetWorkflowStatus(context.Context, *GetWorkflowStatusRequest) (*model.Workflow, error) - // DELETE /{workflodId}/remove - RemoveWorkflow(context.Context, *RemoveWorkflowRequest) (*empty.Empty, error) - // GET /running/{name} - GetRunningWorkflows(context.Context, *GetRunningWorkflowsRequest) (*GetRunningWorkflowsResponse, error) - // PUT /decide/{workflowId} - DecideWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) - // PUT /{workflowId}/pause - PauseWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) - // PUT /{workflowId}/pause - ResumeWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) - // PUT /{workflowId}/skiptask/{taskReferenceName} - SkipTaskFromWorkflow(context.Context, *SkipTaskRequest) (*empty.Empty, error) - // POST /{workflowId}/rerun - RerunWorkflow(context.Context, *model.RerunWorkflowRequest) (*WorkflowId, error) - // POST /{workflowId}/restart - RestartWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) - // POST /{workflowId}retry - RetryWorkflow(context.Context, *WorkflowId) (*empty.Empty, error) - // POST /{workflowId}/resetcallbacks - ResetWorkflowCallbacks(context.Context, *WorkflowId) (*empty.Empty, error) - // DELETE /{workflowId} - TerminateWorkflow(context.Context, *TerminateWorkflowRequest) (*empty.Empty, error) - // GET /search - Search(context.Context, *SearchRequest) (*WorkflowSummarySearchResult, error) - SearchByTasks(context.Context, *SearchRequest) (*WorkflowSummarySearchResult, error) -} - -func RegisterWorkflowServiceServer(s *grpc.Server, srv WorkflowServiceServer) { - s.RegisterService(&_WorkflowService_serviceDesc, srv) -} - -func _WorkflowService_StartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.StartWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).StartWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/StartWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).StartWorkflow(ctx, req.(*model.StartWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_GetWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetWorkflowsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).GetWorkflows(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/GetWorkflows", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).GetWorkflows(ctx, req.(*GetWorkflowsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_GetWorkflowStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetWorkflowStatusRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/GetWorkflowStatus", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, req.(*GetWorkflowStatusRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RemoveWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(RemoveWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RemoveWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, req.(*RemoveWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_GetRunningWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetRunningWorkflowsRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/GetRunningWorkflows", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, req.(*GetRunningWorkflowsRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_DecideWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).DecideWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/DecideWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).DecideWorkflow(ctx, req.(*WorkflowId)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_PauseWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).PauseWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/PauseWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).PauseWorkflow(ctx, req.(*WorkflowId)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_ResumeWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/ResumeWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, req.(*WorkflowId)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_SkipTaskFromWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SkipTaskRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/SkipTaskFromWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, req.(*SkipTaskRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RerunWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(model.RerunWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RerunWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RerunWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RerunWorkflow(ctx, req.(*model.RerunWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RestartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RestartWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RestartWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RestartWorkflow(ctx, req.(*WorkflowId)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_RetryWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).RetryWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/RetryWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).RetryWorkflow(ctx, req.(*WorkflowId)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_ResetWorkflowCallbacks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(WorkflowId) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/ResetWorkflowCallbacks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, req.(*WorkflowId)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_TerminateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(TerminateWorkflowRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/TerminateWorkflow", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, req.(*TerminateWorkflowRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SearchRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).Search(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/Search", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).Search(ctx, req.(*SearchRequest)) - } - return interceptor(ctx, in, info, handler) -} - -func _WorkflowService_SearchByTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(SearchRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(WorkflowServiceServer).SearchByTasks(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: "/com.netflix.conductor.grpc.WorkflowService/SearchByTasks", - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(WorkflowServiceServer).SearchByTasks(ctx, req.(*SearchRequest)) - } - return interceptor(ctx, in, info, handler) -} - -var _WorkflowService_serviceDesc = grpc.ServiceDesc{ - ServiceName: "com.netflix.conductor.grpc.WorkflowService", - HandlerType: (*WorkflowServiceServer)(nil), - Methods: []grpc.MethodDesc{ - { - MethodName: "StartWorkflow", - Handler: _WorkflowService_StartWorkflow_Handler, - }, - { - MethodName: "GetWorkflows", - Handler: _WorkflowService_GetWorkflows_Handler, - }, - { - MethodName: "GetWorkflowStatus", - Handler: _WorkflowService_GetWorkflowStatus_Handler, - }, - { - MethodName: "RemoveWorkflow", - Handler: _WorkflowService_RemoveWorkflow_Handler, - }, - { - MethodName: "GetRunningWorkflows", - Handler: _WorkflowService_GetRunningWorkflows_Handler, - }, - { - MethodName: "DecideWorkflow", - Handler: _WorkflowService_DecideWorkflow_Handler, - }, - { - MethodName: "PauseWorkflow", - Handler: _WorkflowService_PauseWorkflow_Handler, - }, - { - MethodName: "ResumeWorkflow", - Handler: _WorkflowService_ResumeWorkflow_Handler, - }, - { - MethodName: "SkipTaskFromWorkflow", - Handler: _WorkflowService_SkipTaskFromWorkflow_Handler, - }, - { - MethodName: "RerunWorkflow", - Handler: _WorkflowService_RerunWorkflow_Handler, - }, - { - MethodName: "RestartWorkflow", - Handler: _WorkflowService_RestartWorkflow_Handler, - }, - { - MethodName: "RetryWorkflow", - Handler: _WorkflowService_RetryWorkflow_Handler, - }, - { - MethodName: "ResetWorkflowCallbacks", - Handler: _WorkflowService_ResetWorkflowCallbacks_Handler, - }, - { - MethodName: "TerminateWorkflow", - Handler: _WorkflowService_TerminateWorkflow_Handler, - }, - { - MethodName: "Search", - Handler: _WorkflowService_Search_Handler, - }, - { - MethodName: "SearchByTasks", - Handler: _WorkflowService_SearchByTasks_Handler, - }, - }, - Streams: []grpc.StreamDesc{}, - Metadata: "grpc/workflow_service.proto", -} - -func init() { - proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_ebe287e56823ea2c) -} - -var fileDescriptor_workflow_service_ebe287e56823ea2c = []byte{ - // 919 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x56, 0x5f, 0x6f, 0x1b, 0x45, - 0x10, 0x97, 0xed, 0x34, 0xa9, 0xc7, 0x71, 0x5c, 0x6f, 0x43, 0x64, 0x2e, 0x42, 0x18, 0xa3, 0x22, - 0x57, 0xc0, 0x19, 0x02, 0xe2, 0x4f, 0x9f, 0x50, 0x42, 0x5b, 0xe5, 0xa5, 0x98, 0x75, 0xa4, 0x22, - 0xfa, 0x70, 0x9c, 0xef, 0x26, 0xee, 0xd5, 0x77, 0xb7, 0xee, 0xee, 0x9e, 0x83, 0x25, 0x1e, 0x11, - 0x9f, 0x82, 0x4f, 0xc0, 0x47, 0xe1, 0x95, 0x2f, 0x84, 0x76, 0x6f, 0xef, 0x6c, 0xc7, 0x3e, 0xd7, - 0x81, 0xa8, 0x6f, 0x77, 0xbf, 0x99, 0xfd, 0xcd, 0xec, 0xcc, 0xfc, 0x76, 0x17, 0x8e, 0x47, 0x7c, - 0xe2, 0xf5, 0xae, 0x18, 0x1f, 0x5f, 0x86, 0xec, 0xca, 0x11, 0xc8, 0xa7, 0x81, 0x87, 0xf6, 0x84, - 0x33, 0xc9, 0x88, 0xe5, 0xb1, 0xc8, 0x8e, 0x51, 0x5e, 0x86, 0xc1, 0xaf, 0xb6, 0xc7, 0x62, 0x3f, - 0xf1, 0x24, 0xe3, 0xb6, 0x5a, 0x62, 0x1d, 0x8f, 0x18, 0x1b, 0x85, 0xd8, 0xd3, 0x9e, 0xc3, 0xe4, - 0xb2, 0x87, 0xd1, 0x44, 0xce, 0xd2, 0x85, 0x56, 0x53, 0xb3, 0x0a, 0x74, 0xb9, 0xf7, 0xd2, 0x40, - 0x87, 0x11, 0xf3, 0x31, 0xcc, 0x23, 0x19, 0xf4, 0x38, 0x45, 0xc5, 0x38, 0x98, 0x48, 0x57, 0x8c, - 0x39, 0xbe, 0x4e, 0x50, 0x48, 0x63, 0x6c, 0x1b, 0xa3, 0x74, 0xb9, 0xcc, 0xd6, 0xad, 0xf5, 0xe0, - 0xc8, 0x93, 0x78, 0xad, 0x47, 0xe7, 0xcf, 0x12, 0xdc, 0x7f, 0x8a, 0xf2, 0xb9, 0x31, 0x0a, 0x9a, - 0x5a, 0x09, 0x81, 0x9d, 0xd8, 0x8d, 0xb0, 0x55, 0x6a, 0x97, 0xba, 0x55, 0xaa, 0xbf, 0xc9, 0x03, - 0x38, 0xf0, 0x18, 0xe7, 0x18, 0xba, 0x32, 0x60, 0xb1, 0x13, 0xf8, 0xad, 0x72, 0xbb, 0xd2, 0xad, - 0xd2, 0xfa, 0x02, 0x7a, 0xee, 0x2b, 0xb7, 0x20, 0xf6, 0xc2, 0xc4, 0x47, 0xc7, 0x0b, 0x99, 0x40, - 0xbf, 0x55, 0x69, 0x97, 0xba, 0x77, 0x69, 0xdd, 0xa0, 0x67, 0x1a, 0x24, 0x1f, 0x42, 0x06, 0x38, - 0x6a, 0x6b, 0xa2, 0xb5, 0xa3, 0xbd, 0xf6, 0x0d, 0x78, 0xa1, 0xb0, 0xce, 0x3f, 0x65, 0x38, 0x5c, - 0x4e, 0x4f, 0x4c, 0x58, 0x2c, 0x90, 0x8c, 0xa1, 0x91, 0x6d, 0x48, 0x38, 0xc3, 0x99, 0x4a, 0xa6, - 0xd4, 0xae, 0x74, 0x6b, 0x27, 0x67, 0x76, 0x71, 0x53, 0xec, 0x75, 0x54, 0x76, 0x8e, 0x9c, 0xce, - 0xce, 0xfd, 0xc7, 0xb1, 0xe4, 0x33, 0x5a, 0xbf, 0x5a, 0xc4, 0xac, 0x3e, 0x54, 0x73, 0x27, 0x72, - 0x06, 0xd5, 0xdc, 0x6a, 0x62, 0x3e, 0x28, 0x88, 0xa9, 0x4b, 0x9c, 0xf3, 0xd3, 0xf9, 0x3a, 0xeb, - 0x37, 0x20, 0xab, 0x61, 0xc9, 0x3d, 0xa8, 0x8c, 0x71, 0x66, 0x6a, 0xae, 0x3e, 0x49, 0x1f, 0xee, - 0x4c, 0xdd, 0x30, 0xc1, 0x56, 0xb9, 0x5d, 0xea, 0xd6, 0x4e, 0x1e, 0xfd, 0xf7, 0xcd, 0xd1, 0x94, - 0xe8, 0x51, 0xf9, 0x9b, 0x52, 0xe7, 0x17, 0x68, 0x2d, 0x38, 0x0f, 0xa4, 0x2b, 0x93, 0xbc, 0xf1, - 0xef, 0x43, 0x2d, 0x9f, 0x76, 0x5d, 0x54, 0x95, 0x0b, 0x64, 0xd0, 0xf9, 0x9a, 0xbe, 0x95, 0xd7, - 0xf4, 0xcd, 0x83, 0x77, 0x28, 0x46, 0x6c, 0x8a, 0xf9, 0xe6, 0x57, 0xe8, 0xfd, 0x55, 0x7a, 0xff, - 0xdc, 0x27, 0x0f, 0xe1, 0x9e, 0x52, 0x45, 0x30, 0x45, 0x27, 0x0b, 0x6a, 0x22, 0x34, 0x0c, 0x9e, - 0x51, 0x76, 0x7e, 0x2f, 0x81, 0xf5, 0x14, 0x25, 0x4d, 0xe2, 0x38, 0x88, 0x47, 0x5b, 0x8d, 0x70, - 0x0b, 0xf6, 0xa6, 0xc8, 0x45, 0xc0, 0x62, 0x4d, 0x7a, 0x87, 0x66, 0xbf, 0xe4, 0x3d, 0x00, 0x2d, - 0x24, 0x47, 0x06, 0x11, 0xea, 0x89, 0xad, 0xd0, 0xaa, 0x46, 0x2e, 0x82, 0x08, 0xc9, 0xbb, 0x70, - 0x17, 0x63, 0x3f, 0x35, 0xee, 0x68, 0xe3, 0x1e, 0xc6, 0xbe, 0x32, 0x75, 0xbe, 0x83, 0xe3, 0xb5, - 0x59, 0x98, 0x49, 0xfd, 0x00, 0xf6, 0x17, 0x0a, 0x9a, 0x8e, 0x4c, 0x95, 0xd6, 0xe6, 0x15, 0x15, - 0x9d, 0x4f, 0x01, 0x9e, 0xcf, 0x0b, 0xfc, 0xa6, 0x0e, 0x74, 0xfe, 0x2a, 0x41, 0x63, 0x30, 0x0e, - 0x26, 0xaa, 0xd4, 0x5b, 0xb7, 0xcd, 0x86, 0xfb, 0xaa, 0x5d, 0x0e, 0xc7, 0x4b, 0xe4, 0x18, 0x7b, - 0xe8, 0xe8, 0xe2, 0x94, 0xb5, 0x63, 0x53, 0x6a, 0x2a, 0x63, 0x79, 0xa6, 0x2a, 0xf5, 0x04, 0xf6, - 0xcc, 0x49, 0xa1, 0x8b, 0x51, 0x3b, 0xf9, 0x64, 0xe3, 0x90, 0x5f, 0xcb, 0x87, 0x66, 0x8b, 0x3b, - 0x03, 0x68, 0x5d, 0x20, 0x8f, 0x82, 0xd8, 0x95, 0x1b, 0x86, 0xa1, 0x20, 0xe9, 0x23, 0xd8, 0xe5, - 0xe8, 0x0a, 0xd3, 0xad, 0x2a, 0x35, 0x7f, 0x27, 0x7f, 0xef, 0x43, 0x23, 0x1f, 0xdf, 0xf4, 0x48, - 0x26, 0xaf, 0xa0, 0x3e, 0x50, 0xed, 0xca, 0x70, 0xf2, 0xf9, 0xe6, 0x84, 0x17, 0x7d, 0x4d, 0x42, - 0xd6, 0x47, 0x9b, 0xf4, 0xb5, 0xd0, 0xa2, 0xd7, 0xb0, 0xbf, 0xa8, 0x36, 0xd2, 0xdb, 0x5e, 0x97, - 0x69, 0xa0, 0xcf, 0x6e, 0x2a, 0x64, 0x32, 0x81, 0xe6, 0x8a, 0x66, 0xc9, 0x97, 0x5b, 0xd2, 0x2c, - 0x49, 0xdc, 0xda, 0xee, 0xb8, 0x22, 0x2f, 0xe0, 0x60, 0x59, 0xc3, 0x85, 0x15, 0xd5, 0xe1, 0xd6, - 0xea, 0xdd, 0x3a, 0xb2, 0xd3, 0x7b, 0xd0, 0xce, 0xee, 0x41, 0xfb, 0xb1, 0xba, 0x07, 0xc9, 0x1f, - 0xe9, 0xbd, 0x73, 0x5d, 0x35, 0xe4, 0xab, 0x37, 0xec, 0xa8, 0x40, 0xec, 0xd6, 0xd7, 0x37, 0x5e, - 0x67, 0xea, 0xda, 0x87, 0x83, 0xef, 0xd1, 0x0b, 0xfc, 0xf9, 0x2e, 0xb7, 0x1c, 0x82, 0xc2, 0xad, - 0xfd, 0x00, 0xf5, 0xbe, 0x9b, 0x88, 0xdb, 0x23, 0xec, 0xab, 0x46, 0x88, 0x24, 0xba, 0x3d, 0xc6, - 0x17, 0x70, 0x98, 0x09, 0xf6, 0x09, 0x67, 0x51, 0xce, 0xfb, 0xf1, 0x26, 0xde, 0x6b, 0x12, 0x2f, - 0x24, 0x7f, 0x05, 0x75, 0xaa, 0x1e, 0x1c, 0x5b, 0x0a, 0x71, 0xc9, 0xf7, 0xa6, 0x42, 0xfc, 0x11, - 0x1a, 0x14, 0xc5, 0x92, 0xec, 0x6f, 0xa1, 0x7d, 0x14, 0x25, 0x9f, 0xdd, 0x1a, 0xe1, 0x4f, 0x70, - 0x44, 0x51, 0xcc, 0xc5, 0x78, 0xe6, 0x86, 0xe1, 0xd0, 0xf5, 0xc6, 0xe2, 0x7f, 0x33, 0xbb, 0xd0, - 0x5c, 0x39, 0x5b, 0x37, 0x9f, 0x09, 0x45, 0x47, 0x71, 0x61, 0x88, 0x10, 0x76, 0x07, 0xfa, 0x99, - 0x4a, 0x1e, 0x6e, 0x9c, 0x0d, 0xed, 0xb3, 0x95, 0x18, 0xf3, 0x33, 0x29, 0x89, 0x22, 0x97, 0xcf, - 0xb2, 0x95, 0x22, 0x09, 0x25, 0x11, 0x50, 0x4f, 0xff, 0x4f, 0x67, 0xfa, 0x1d, 0xf1, 0x36, 0x82, - 0x9e, 0x3e, 0x3b, 0x6d, 0x5e, 0xbb, 0x4b, 0xfa, 0xc3, 0x9f, 0xbf, 0x1d, 0x05, 0xf2, 0x65, 0x32, - 0x54, 0x9c, 0x3d, 0xc3, 0xd9, 0xcb, 0x39, 0x7b, 0x5e, 0x18, 0x60, 0x2c, 0x7b, 0x23, 0xa6, 0xdf, - 0xf1, 0x73, 0x5c, 0xfd, 0x0e, 0x77, 0x75, 0x09, 0xbf, 0xf8, 0x37, 0x00, 0x00, 0xff, 0xff, 0xeb, - 0x48, 0x45, 0x4d, 0x3b, 0x0c, 0x00, 0x00, -} diff --git a/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go b/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go new file mode 100644 index 0000000000..fd87283a0e --- /dev/null +++ b/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go @@ -0,0 +1,1822 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: grpc/workflow_service.proto + +package workflows // import "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import search "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search" +import model "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import ( + context "golang.org/x/net/context" + grpc "google.golang.org/grpc" +) + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type StartWorkflowResponse struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *StartWorkflowResponse) Reset() { *m = StartWorkflowResponse{} } +func (m *StartWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*StartWorkflowResponse) ProtoMessage() {} +func (*StartWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{0} +} +func (m *StartWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_StartWorkflowResponse.Unmarshal(m, b) +} +func (m *StartWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_StartWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *StartWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_StartWorkflowResponse.Merge(dst, src) +} +func (m *StartWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_StartWorkflowResponse.Size(m) +} +func (m *StartWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_StartWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_StartWorkflowResponse proto.InternalMessageInfo + +func (m *StartWorkflowResponse) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type GetWorkflowsRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed" json:"include_closed,omitempty"` + IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsRequest) Reset() { *m = GetWorkflowsRequest{} } +func (m *GetWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsRequest) ProtoMessage() {} +func (*GetWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{1} +} +func (m *GetWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsRequest.Unmarshal(m, b) +} +func (m *GetWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsRequest.Merge(dst, src) +} +func (m *GetWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsRequest.Size(m) +} +func (m *GetWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsRequest proto.InternalMessageInfo + +func (m *GetWorkflowsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetWorkflowsRequest) GetCorrelationId() []string { + if m != nil { + return m.CorrelationId + } + return nil +} + +func (m *GetWorkflowsRequest) GetIncludeClosed() bool { + if m != nil { + return m.IncludeClosed + } + return false +} + +func (m *GetWorkflowsRequest) GetIncludeTasks() bool { + if m != nil { + return m.IncludeTasks + } + return false +} + +type GetWorkflowsResponse struct { + WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsResponse) Reset() { *m = GetWorkflowsResponse{} } +func (m *GetWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsResponse) ProtoMessage() {} +func (*GetWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2} +} +func (m *GetWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsResponse.Unmarshal(m, b) +} +func (m *GetWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsResponse.Merge(dst, src) +} +func (m *GetWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsResponse.Size(m) +} +func (m *GetWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsResponse proto.InternalMessageInfo + +func (m *GetWorkflowsResponse) GetWorkflowsById() map[string]*GetWorkflowsResponse_Workflows { + if m != nil { + return m.WorkflowsById + } + return nil +} + +type GetWorkflowsResponse_Workflows struct { + Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows" json:"workflows,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowsResponse_Workflows) Reset() { *m = GetWorkflowsResponse_Workflows{} } +func (m *GetWorkflowsResponse_Workflows) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowsResponse_Workflows) ProtoMessage() {} +func (*GetWorkflowsResponse_Workflows) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2, 0} +} +func (m *GetWorkflowsResponse_Workflows) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Unmarshal(m, b) +} +func (m *GetWorkflowsResponse_Workflows) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowsResponse_Workflows) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowsResponse_Workflows.Merge(dst, src) +} +func (m *GetWorkflowsResponse_Workflows) XXX_Size() int { + return xxx_messageInfo_GetWorkflowsResponse_Workflows.Size(m) +} +func (m *GetWorkflowsResponse_Workflows) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowsResponse_Workflows.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowsResponse_Workflows proto.InternalMessageInfo + +func (m *GetWorkflowsResponse_Workflows) GetWorkflows() []*model.Workflow { + if m != nil { + return m.Workflows + } + return nil +} + +type GetWorkflowStatusRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowStatusRequest) Reset() { *m = GetWorkflowStatusRequest{} } +func (m *GetWorkflowStatusRequest) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowStatusRequest) ProtoMessage() {} +func (*GetWorkflowStatusRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{3} +} +func (m *GetWorkflowStatusRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowStatusRequest.Unmarshal(m, b) +} +func (m *GetWorkflowStatusRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowStatusRequest.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowStatusRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowStatusRequest.Merge(dst, src) +} +func (m *GetWorkflowStatusRequest) XXX_Size() int { + return xxx_messageInfo_GetWorkflowStatusRequest.Size(m) +} +func (m *GetWorkflowStatusRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowStatusRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowStatusRequest proto.InternalMessageInfo + +func (m *GetWorkflowStatusRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *GetWorkflowStatusRequest) GetIncludeTasks() bool { + if m != nil { + return m.IncludeTasks + } + return false +} + +type GetWorkflowStatusResponse struct { + Workflow *model.Workflow `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetWorkflowStatusResponse) Reset() { *m = GetWorkflowStatusResponse{} } +func (m *GetWorkflowStatusResponse) String() string { return proto.CompactTextString(m) } +func (*GetWorkflowStatusResponse) ProtoMessage() {} +func (*GetWorkflowStatusResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{4} +} +func (m *GetWorkflowStatusResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetWorkflowStatusResponse.Unmarshal(m, b) +} +func (m *GetWorkflowStatusResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetWorkflowStatusResponse.Marshal(b, m, deterministic) +} +func (dst *GetWorkflowStatusResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetWorkflowStatusResponse.Merge(dst, src) +} +func (m *GetWorkflowStatusResponse) XXX_Size() int { + return xxx_messageInfo_GetWorkflowStatusResponse.Size(m) +} +func (m *GetWorkflowStatusResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetWorkflowStatusResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetWorkflowStatusResponse proto.InternalMessageInfo + +func (m *GetWorkflowStatusResponse) GetWorkflow() *model.Workflow { + if m != nil { + return m.Workflow + } + return nil +} + +type RemoveWorkflowRequest struct { + WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId" json:"workflod_id,omitempty"` + ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow" json:"archive_workflow,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveWorkflowRequest) Reset() { *m = RemoveWorkflowRequest{} } +func (m *RemoveWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RemoveWorkflowRequest) ProtoMessage() {} +func (*RemoveWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{5} +} +func (m *RemoveWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveWorkflowRequest.Unmarshal(m, b) +} +func (m *RemoveWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RemoveWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveWorkflowRequest.Merge(dst, src) +} +func (m *RemoveWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RemoveWorkflowRequest.Size(m) +} +func (m *RemoveWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveWorkflowRequest proto.InternalMessageInfo + +func (m *RemoveWorkflowRequest) GetWorkflodId() string { + if m != nil { + return m.WorkflodId + } + return "" +} + +func (m *RemoveWorkflowRequest) GetArchiveWorkflow() bool { + if m != nil { + return m.ArchiveWorkflow + } + return false +} + +type RemoveWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RemoveWorkflowResponse) Reset() { *m = RemoveWorkflowResponse{} } +func (m *RemoveWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RemoveWorkflowResponse) ProtoMessage() {} +func (*RemoveWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{6} +} +func (m *RemoveWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RemoveWorkflowResponse.Unmarshal(m, b) +} +func (m *RemoveWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RemoveWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RemoveWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RemoveWorkflowResponse.Merge(dst, src) +} +func (m *RemoveWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RemoveWorkflowResponse.Size(m) +} +func (m *RemoveWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RemoveWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RemoveWorkflowResponse proto.InternalMessageInfo + +type GetRunningWorkflowsRequest struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRunningWorkflowsRequest) Reset() { *m = GetRunningWorkflowsRequest{} } +func (m *GetRunningWorkflowsRequest) String() string { return proto.CompactTextString(m) } +func (*GetRunningWorkflowsRequest) ProtoMessage() {} +func (*GetRunningWorkflowsRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{7} +} +func (m *GetRunningWorkflowsRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRunningWorkflowsRequest.Unmarshal(m, b) +} +func (m *GetRunningWorkflowsRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRunningWorkflowsRequest.Marshal(b, m, deterministic) +} +func (dst *GetRunningWorkflowsRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRunningWorkflowsRequest.Merge(dst, src) +} +func (m *GetRunningWorkflowsRequest) XXX_Size() int { + return xxx_messageInfo_GetRunningWorkflowsRequest.Size(m) +} +func (m *GetRunningWorkflowsRequest) XXX_DiscardUnknown() { + xxx_messageInfo_GetRunningWorkflowsRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRunningWorkflowsRequest proto.InternalMessageInfo + +func (m *GetRunningWorkflowsRequest) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *GetRunningWorkflowsRequest) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *GetRunningWorkflowsRequest) GetStartTime() int64 { + if m != nil { + return m.StartTime + } + return 0 +} + +func (m *GetRunningWorkflowsRequest) GetEndTime() int64 { + if m != nil { + return m.EndTime + } + return 0 +} + +type GetRunningWorkflowsResponse struct { + WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds" json:"workflow_ids,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *GetRunningWorkflowsResponse) Reset() { *m = GetRunningWorkflowsResponse{} } +func (m *GetRunningWorkflowsResponse) String() string { return proto.CompactTextString(m) } +func (*GetRunningWorkflowsResponse) ProtoMessage() {} +func (*GetRunningWorkflowsResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{8} +} +func (m *GetRunningWorkflowsResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_GetRunningWorkflowsResponse.Unmarshal(m, b) +} +func (m *GetRunningWorkflowsResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_GetRunningWorkflowsResponse.Marshal(b, m, deterministic) +} +func (dst *GetRunningWorkflowsResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_GetRunningWorkflowsResponse.Merge(dst, src) +} +func (m *GetRunningWorkflowsResponse) XXX_Size() int { + return xxx_messageInfo_GetRunningWorkflowsResponse.Size(m) +} +func (m *GetRunningWorkflowsResponse) XXX_DiscardUnknown() { + xxx_messageInfo_GetRunningWorkflowsResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_GetRunningWorkflowsResponse proto.InternalMessageInfo + +func (m *GetRunningWorkflowsResponse) GetWorkflowIds() []string { + if m != nil { + return m.WorkflowIds + } + return nil +} + +type DecideWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DecideWorkflowRequest) Reset() { *m = DecideWorkflowRequest{} } +func (m *DecideWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*DecideWorkflowRequest) ProtoMessage() {} +func (*DecideWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{9} +} +func (m *DecideWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DecideWorkflowRequest.Unmarshal(m, b) +} +func (m *DecideWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DecideWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *DecideWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_DecideWorkflowRequest.Merge(dst, src) +} +func (m *DecideWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_DecideWorkflowRequest.Size(m) +} +func (m *DecideWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_DecideWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_DecideWorkflowRequest proto.InternalMessageInfo + +func (m *DecideWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type DecideWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *DecideWorkflowResponse) Reset() { *m = DecideWorkflowResponse{} } +func (m *DecideWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*DecideWorkflowResponse) ProtoMessage() {} +func (*DecideWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{10} +} +func (m *DecideWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_DecideWorkflowResponse.Unmarshal(m, b) +} +func (m *DecideWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_DecideWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *DecideWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_DecideWorkflowResponse.Merge(dst, src) +} +func (m *DecideWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_DecideWorkflowResponse.Size(m) +} +func (m *DecideWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_DecideWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_DecideWorkflowResponse proto.InternalMessageInfo + +type PauseWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseWorkflowRequest) Reset() { *m = PauseWorkflowRequest{} } +func (m *PauseWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*PauseWorkflowRequest) ProtoMessage() {} +func (*PauseWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{11} +} +func (m *PauseWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseWorkflowRequest.Unmarshal(m, b) +} +func (m *PauseWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *PauseWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseWorkflowRequest.Merge(dst, src) +} +func (m *PauseWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_PauseWorkflowRequest.Size(m) +} +func (m *PauseWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_PauseWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseWorkflowRequest proto.InternalMessageInfo + +func (m *PauseWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type PauseWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *PauseWorkflowResponse) Reset() { *m = PauseWorkflowResponse{} } +func (m *PauseWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*PauseWorkflowResponse) ProtoMessage() {} +func (*PauseWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{12} +} +func (m *PauseWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_PauseWorkflowResponse.Unmarshal(m, b) +} +func (m *PauseWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_PauseWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *PauseWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_PauseWorkflowResponse.Merge(dst, src) +} +func (m *PauseWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_PauseWorkflowResponse.Size(m) +} +func (m *PauseWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_PauseWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_PauseWorkflowResponse proto.InternalMessageInfo + +type ResumeWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeWorkflowRequest) Reset() { *m = ResumeWorkflowRequest{} } +func (m *ResumeWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*ResumeWorkflowRequest) ProtoMessage() {} +func (*ResumeWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{13} +} +func (m *ResumeWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeWorkflowRequest.Unmarshal(m, b) +} +func (m *ResumeWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *ResumeWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeWorkflowRequest.Merge(dst, src) +} +func (m *ResumeWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_ResumeWorkflowRequest.Size(m) +} +func (m *ResumeWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeWorkflowRequest proto.InternalMessageInfo + +func (m *ResumeWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type ResumeWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResumeWorkflowResponse) Reset() { *m = ResumeWorkflowResponse{} } +func (m *ResumeWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*ResumeWorkflowResponse) ProtoMessage() {} +func (*ResumeWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{14} +} +func (m *ResumeWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResumeWorkflowResponse.Unmarshal(m, b) +} +func (m *ResumeWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResumeWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *ResumeWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResumeWorkflowResponse.Merge(dst, src) +} +func (m *ResumeWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_ResumeWorkflowResponse.Size(m) +} +func (m *ResumeWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ResumeWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ResumeWorkflowResponse proto.InternalMessageInfo + +type SkipTaskRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName" json:"task_reference_name,omitempty"` + Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request" json:"request,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } +func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } +func (*SkipTaskRequest) ProtoMessage() {} +func (*SkipTaskRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{15} +} +func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) +} +func (m *SkipTaskRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskRequest.Marshal(b, m, deterministic) +} +func (dst *SkipTaskRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskRequest.Merge(dst, src) +} +func (m *SkipTaskRequest) XXX_Size() int { + return xxx_messageInfo_SkipTaskRequest.Size(m) +} +func (m *SkipTaskRequest) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskRequest proto.InternalMessageInfo + +func (m *SkipTaskRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *SkipTaskRequest) GetTaskReferenceName() string { + if m != nil { + return m.TaskReferenceName + } + return "" +} + +func (m *SkipTaskRequest) GetRequest() *model.SkipTaskRequest { + if m != nil { + return m.Request + } + return nil +} + +type SkipTaskResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *SkipTaskResponse) Reset() { *m = SkipTaskResponse{} } +func (m *SkipTaskResponse) String() string { return proto.CompactTextString(m) } +func (*SkipTaskResponse) ProtoMessage() {} +func (*SkipTaskResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{16} +} +func (m *SkipTaskResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_SkipTaskResponse.Unmarshal(m, b) +} +func (m *SkipTaskResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_SkipTaskResponse.Marshal(b, m, deterministic) +} +func (dst *SkipTaskResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_SkipTaskResponse.Merge(dst, src) +} +func (m *SkipTaskResponse) XXX_Size() int { + return xxx_messageInfo_SkipTaskResponse.Size(m) +} +func (m *SkipTaskResponse) XXX_DiscardUnknown() { + xxx_messageInfo_SkipTaskResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_SkipTaskResponse proto.InternalMessageInfo + +type RerunWorkflowResponse struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RerunWorkflowResponse) Reset() { *m = RerunWorkflowResponse{} } +func (m *RerunWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RerunWorkflowResponse) ProtoMessage() {} +func (*RerunWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{17} +} +func (m *RerunWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RerunWorkflowResponse.Unmarshal(m, b) +} +func (m *RerunWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RerunWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RerunWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RerunWorkflowResponse.Merge(dst, src) +} +func (m *RerunWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RerunWorkflowResponse.Size(m) +} +func (m *RerunWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RerunWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RerunWorkflowResponse proto.InternalMessageInfo + +func (m *RerunWorkflowResponse) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type RestartWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestartWorkflowRequest) Reset() { *m = RestartWorkflowRequest{} } +func (m *RestartWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RestartWorkflowRequest) ProtoMessage() {} +func (*RestartWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{18} +} +func (m *RestartWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestartWorkflowRequest.Unmarshal(m, b) +} +func (m *RestartWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestartWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RestartWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestartWorkflowRequest.Merge(dst, src) +} +func (m *RestartWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RestartWorkflowRequest.Size(m) +} +func (m *RestartWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RestartWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RestartWorkflowRequest proto.InternalMessageInfo + +func (m *RestartWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type RestartWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RestartWorkflowResponse) Reset() { *m = RestartWorkflowResponse{} } +func (m *RestartWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RestartWorkflowResponse) ProtoMessage() {} +func (*RestartWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{19} +} +func (m *RestartWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RestartWorkflowResponse.Unmarshal(m, b) +} +func (m *RestartWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RestartWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RestartWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RestartWorkflowResponse.Merge(dst, src) +} +func (m *RestartWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RestartWorkflowResponse.Size(m) +} +func (m *RestartWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RestartWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RestartWorkflowResponse proto.InternalMessageInfo + +type RetryWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryWorkflowRequest) Reset() { *m = RetryWorkflowRequest{} } +func (m *RetryWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*RetryWorkflowRequest) ProtoMessage() {} +func (*RetryWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{20} +} +func (m *RetryWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryWorkflowRequest.Unmarshal(m, b) +} +func (m *RetryWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *RetryWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryWorkflowRequest.Merge(dst, src) +} +func (m *RetryWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_RetryWorkflowRequest.Size(m) +} +func (m *RetryWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_RetryWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryWorkflowRequest proto.InternalMessageInfo + +func (m *RetryWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type RetryWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *RetryWorkflowResponse) Reset() { *m = RetryWorkflowResponse{} } +func (m *RetryWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*RetryWorkflowResponse) ProtoMessage() {} +func (*RetryWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{21} +} +func (m *RetryWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_RetryWorkflowResponse.Unmarshal(m, b) +} +func (m *RetryWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_RetryWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *RetryWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_RetryWorkflowResponse.Merge(dst, src) +} +func (m *RetryWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_RetryWorkflowResponse.Size(m) +} +func (m *RetryWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_RetryWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_RetryWorkflowResponse proto.InternalMessageInfo + +type ResetWorkflowCallbacksRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResetWorkflowCallbacksRequest) Reset() { *m = ResetWorkflowCallbacksRequest{} } +func (m *ResetWorkflowCallbacksRequest) String() string { return proto.CompactTextString(m) } +func (*ResetWorkflowCallbacksRequest) ProtoMessage() {} +func (*ResetWorkflowCallbacksRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{22} +} +func (m *ResetWorkflowCallbacksRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResetWorkflowCallbacksRequest.Unmarshal(m, b) +} +func (m *ResetWorkflowCallbacksRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResetWorkflowCallbacksRequest.Marshal(b, m, deterministic) +} +func (dst *ResetWorkflowCallbacksRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResetWorkflowCallbacksRequest.Merge(dst, src) +} +func (m *ResetWorkflowCallbacksRequest) XXX_Size() int { + return xxx_messageInfo_ResetWorkflowCallbacksRequest.Size(m) +} +func (m *ResetWorkflowCallbacksRequest) XXX_DiscardUnknown() { + xxx_messageInfo_ResetWorkflowCallbacksRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_ResetWorkflowCallbacksRequest proto.InternalMessageInfo + +func (m *ResetWorkflowCallbacksRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +type ResetWorkflowCallbacksResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *ResetWorkflowCallbacksResponse) Reset() { *m = ResetWorkflowCallbacksResponse{} } +func (m *ResetWorkflowCallbacksResponse) String() string { return proto.CompactTextString(m) } +func (*ResetWorkflowCallbacksResponse) ProtoMessage() {} +func (*ResetWorkflowCallbacksResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{23} +} +func (m *ResetWorkflowCallbacksResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_ResetWorkflowCallbacksResponse.Unmarshal(m, b) +} +func (m *ResetWorkflowCallbacksResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_ResetWorkflowCallbacksResponse.Marshal(b, m, deterministic) +} +func (dst *ResetWorkflowCallbacksResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_ResetWorkflowCallbacksResponse.Merge(dst, src) +} +func (m *ResetWorkflowCallbacksResponse) XXX_Size() int { + return xxx_messageInfo_ResetWorkflowCallbacksResponse.Size(m) +} +func (m *ResetWorkflowCallbacksResponse) XXX_DiscardUnknown() { + xxx_messageInfo_ResetWorkflowCallbacksResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_ResetWorkflowCallbacksResponse proto.InternalMessageInfo + +type TerminateWorkflowRequest struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + Reason string `protobuf:"bytes,2,opt,name=reason" json:"reason,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TerminateWorkflowRequest) Reset() { *m = TerminateWorkflowRequest{} } +func (m *TerminateWorkflowRequest) String() string { return proto.CompactTextString(m) } +func (*TerminateWorkflowRequest) ProtoMessage() {} +func (*TerminateWorkflowRequest) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{24} +} +func (m *TerminateWorkflowRequest) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TerminateWorkflowRequest.Unmarshal(m, b) +} +func (m *TerminateWorkflowRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TerminateWorkflowRequest.Marshal(b, m, deterministic) +} +func (dst *TerminateWorkflowRequest) XXX_Merge(src proto.Message) { + xxx_messageInfo_TerminateWorkflowRequest.Merge(dst, src) +} +func (m *TerminateWorkflowRequest) XXX_Size() int { + return xxx_messageInfo_TerminateWorkflowRequest.Size(m) +} +func (m *TerminateWorkflowRequest) XXX_DiscardUnknown() { + xxx_messageInfo_TerminateWorkflowRequest.DiscardUnknown(m) +} + +var xxx_messageInfo_TerminateWorkflowRequest proto.InternalMessageInfo + +func (m *TerminateWorkflowRequest) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *TerminateWorkflowRequest) GetReason() string { + if m != nil { + return m.Reason + } + return "" +} + +type TerminateWorkflowResponse struct { + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *TerminateWorkflowResponse) Reset() { *m = TerminateWorkflowResponse{} } +func (m *TerminateWorkflowResponse) String() string { return proto.CompactTextString(m) } +func (*TerminateWorkflowResponse) ProtoMessage() {} +func (*TerminateWorkflowResponse) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{25} +} +func (m *TerminateWorkflowResponse) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_TerminateWorkflowResponse.Unmarshal(m, b) +} +func (m *TerminateWorkflowResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_TerminateWorkflowResponse.Marshal(b, m, deterministic) +} +func (dst *TerminateWorkflowResponse) XXX_Merge(src proto.Message) { + xxx_messageInfo_TerminateWorkflowResponse.Merge(dst, src) +} +func (m *TerminateWorkflowResponse) XXX_Size() int { + return xxx_messageInfo_TerminateWorkflowResponse.Size(m) +} +func (m *TerminateWorkflowResponse) XXX_DiscardUnknown() { + xxx_messageInfo_TerminateWorkflowResponse.DiscardUnknown(m) +} + +var xxx_messageInfo_TerminateWorkflowResponse proto.InternalMessageInfo + +type WorkflowSummarySearchResult struct { + TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits" json:"total_hits,omitempty"` + Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results" json:"results,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *WorkflowSummarySearchResult) Reset() { *m = WorkflowSummarySearchResult{} } +func (m *WorkflowSummarySearchResult) String() string { return proto.CompactTextString(m) } +func (*WorkflowSummarySearchResult) ProtoMessage() {} +func (*WorkflowSummarySearchResult) Descriptor() ([]byte, []int) { + return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{26} +} +func (m *WorkflowSummarySearchResult) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_WorkflowSummarySearchResult.Unmarshal(m, b) +} +func (m *WorkflowSummarySearchResult) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_WorkflowSummarySearchResult.Marshal(b, m, deterministic) +} +func (dst *WorkflowSummarySearchResult) XXX_Merge(src proto.Message) { + xxx_messageInfo_WorkflowSummarySearchResult.Merge(dst, src) +} +func (m *WorkflowSummarySearchResult) XXX_Size() int { + return xxx_messageInfo_WorkflowSummarySearchResult.Size(m) +} +func (m *WorkflowSummarySearchResult) XXX_DiscardUnknown() { + xxx_messageInfo_WorkflowSummarySearchResult.DiscardUnknown(m) +} + +var xxx_messageInfo_WorkflowSummarySearchResult proto.InternalMessageInfo + +func (m *WorkflowSummarySearchResult) GetTotalHits() int64 { + if m != nil { + return m.TotalHits + } + return 0 +} + +func (m *WorkflowSummarySearchResult) GetResults() []*model.WorkflowSummary { + if m != nil { + return m.Results + } + return nil +} + +func init() { + proto.RegisterType((*StartWorkflowResponse)(nil), "conductor.grpc.workflows.StartWorkflowResponse") + proto.RegisterType((*GetWorkflowsRequest)(nil), "conductor.grpc.workflows.GetWorkflowsRequest") + proto.RegisterType((*GetWorkflowsResponse)(nil), "conductor.grpc.workflows.GetWorkflowsResponse") + proto.RegisterMapType((map[string]*GetWorkflowsResponse_Workflows)(nil), "conductor.grpc.workflows.GetWorkflowsResponse.WorkflowsByIdEntry") + proto.RegisterType((*GetWorkflowsResponse_Workflows)(nil), "conductor.grpc.workflows.GetWorkflowsResponse.Workflows") + proto.RegisterType((*GetWorkflowStatusRequest)(nil), "conductor.grpc.workflows.GetWorkflowStatusRequest") + proto.RegisterType((*GetWorkflowStatusResponse)(nil), "conductor.grpc.workflows.GetWorkflowStatusResponse") + proto.RegisterType((*RemoveWorkflowRequest)(nil), "conductor.grpc.workflows.RemoveWorkflowRequest") + proto.RegisterType((*RemoveWorkflowResponse)(nil), "conductor.grpc.workflows.RemoveWorkflowResponse") + proto.RegisterType((*GetRunningWorkflowsRequest)(nil), "conductor.grpc.workflows.GetRunningWorkflowsRequest") + proto.RegisterType((*GetRunningWorkflowsResponse)(nil), "conductor.grpc.workflows.GetRunningWorkflowsResponse") + proto.RegisterType((*DecideWorkflowRequest)(nil), "conductor.grpc.workflows.DecideWorkflowRequest") + proto.RegisterType((*DecideWorkflowResponse)(nil), "conductor.grpc.workflows.DecideWorkflowResponse") + proto.RegisterType((*PauseWorkflowRequest)(nil), "conductor.grpc.workflows.PauseWorkflowRequest") + proto.RegisterType((*PauseWorkflowResponse)(nil), "conductor.grpc.workflows.PauseWorkflowResponse") + proto.RegisterType((*ResumeWorkflowRequest)(nil), "conductor.grpc.workflows.ResumeWorkflowRequest") + proto.RegisterType((*ResumeWorkflowResponse)(nil), "conductor.grpc.workflows.ResumeWorkflowResponse") + proto.RegisterType((*SkipTaskRequest)(nil), "conductor.grpc.workflows.SkipTaskRequest") + proto.RegisterType((*SkipTaskResponse)(nil), "conductor.grpc.workflows.SkipTaskResponse") + proto.RegisterType((*RerunWorkflowResponse)(nil), "conductor.grpc.workflows.RerunWorkflowResponse") + proto.RegisterType((*RestartWorkflowRequest)(nil), "conductor.grpc.workflows.RestartWorkflowRequest") + proto.RegisterType((*RestartWorkflowResponse)(nil), "conductor.grpc.workflows.RestartWorkflowResponse") + proto.RegisterType((*RetryWorkflowRequest)(nil), "conductor.grpc.workflows.RetryWorkflowRequest") + proto.RegisterType((*RetryWorkflowResponse)(nil), "conductor.grpc.workflows.RetryWorkflowResponse") + proto.RegisterType((*ResetWorkflowCallbacksRequest)(nil), "conductor.grpc.workflows.ResetWorkflowCallbacksRequest") + proto.RegisterType((*ResetWorkflowCallbacksResponse)(nil), "conductor.grpc.workflows.ResetWorkflowCallbacksResponse") + proto.RegisterType((*TerminateWorkflowRequest)(nil), "conductor.grpc.workflows.TerminateWorkflowRequest") + proto.RegisterType((*TerminateWorkflowResponse)(nil), "conductor.grpc.workflows.TerminateWorkflowResponse") + proto.RegisterType((*WorkflowSummarySearchResult)(nil), "conductor.grpc.workflows.WorkflowSummarySearchResult") +} + +// Reference imports to suppress errors if they are not otherwise used. +var _ context.Context +var _ grpc.ClientConn + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the grpc package it is being compiled against. +const _ = grpc.SupportPackageIsVersion4 + +// WorkflowServiceClient is the client API for WorkflowService service. +// +// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream. +type WorkflowServiceClient interface { + // POST / + StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*StartWorkflowResponse, error) + // GET /{name}/correlated/{correlationId} + GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) + // GET /{workflowId} + GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) + // DELETE /{workflodId}/remove + RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*RemoveWorkflowResponse, error) + // GET /running/{name} + GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) + // PUT /decide/{workflowId} + DecideWorkflow(ctx context.Context, in *DecideWorkflowRequest, opts ...grpc.CallOption) (*DecideWorkflowResponse, error) + // PUT /{workflowId}/pause + PauseWorkflow(ctx context.Context, in *PauseWorkflowRequest, opts ...grpc.CallOption) (*PauseWorkflowResponse, error) + // PUT /{workflowId}/pause + ResumeWorkflow(ctx context.Context, in *ResumeWorkflowRequest, opts ...grpc.CallOption) (*ResumeWorkflowResponse, error) + // PUT /{workflowId}/skiptask/{taskReferenceName} + SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*SkipTaskResponse, error) + // POST /{workflowId}/rerun + RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*RerunWorkflowResponse, error) + // POST /{workflowId}/restart + RestartWorkflow(ctx context.Context, in *RestartWorkflowRequest, opts ...grpc.CallOption) (*RestartWorkflowResponse, error) + // POST /{workflowId}retry + RetryWorkflow(ctx context.Context, in *RetryWorkflowRequest, opts ...grpc.CallOption) (*RetryWorkflowResponse, error) + // POST /{workflowId}/resetcallbacks + ResetWorkflowCallbacks(ctx context.Context, in *ResetWorkflowCallbacksRequest, opts ...grpc.CallOption) (*ResetWorkflowCallbacksResponse, error) + // DELETE /{workflowId} + TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*TerminateWorkflowResponse, error) + // GET /search + Search(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) + SearchByTasks(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) +} + +type workflowServiceClient struct { + cc *grpc.ClientConn +} + +func NewWorkflowServiceClient(cc *grpc.ClientConn) WorkflowServiceClient { + return &workflowServiceClient{cc} +} + +func (c *workflowServiceClient) StartWorkflow(ctx context.Context, in *model.StartWorkflowRequest, opts ...grpc.CallOption) (*StartWorkflowResponse, error) { + out := new(StartWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/StartWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetWorkflows(ctx context.Context, in *GetWorkflowsRequest, opts ...grpc.CallOption) (*GetWorkflowsResponse, error) { + out := new(GetWorkflowsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetWorkflowStatus(ctx context.Context, in *GetWorkflowStatusRequest, opts ...grpc.CallOption) (*model.Workflow, error) { + out := new(model.Workflow) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetWorkflowStatus", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RemoveWorkflow(ctx context.Context, in *RemoveWorkflowRequest, opts ...grpc.CallOption) (*RemoveWorkflowResponse, error) { + out := new(RemoveWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RemoveWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) GetRunningWorkflows(ctx context.Context, in *GetRunningWorkflowsRequest, opts ...grpc.CallOption) (*GetRunningWorkflowsResponse, error) { + out := new(GetRunningWorkflowsResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/GetRunningWorkflows", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) DecideWorkflow(ctx context.Context, in *DecideWorkflowRequest, opts ...grpc.CallOption) (*DecideWorkflowResponse, error) { + out := new(DecideWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/DecideWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) PauseWorkflow(ctx context.Context, in *PauseWorkflowRequest, opts ...grpc.CallOption) (*PauseWorkflowResponse, error) { + out := new(PauseWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/PauseWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) ResumeWorkflow(ctx context.Context, in *ResumeWorkflowRequest, opts ...grpc.CallOption) (*ResumeWorkflowResponse, error) { + out := new(ResumeWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/ResumeWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) SkipTaskFromWorkflow(ctx context.Context, in *SkipTaskRequest, opts ...grpc.CallOption) (*SkipTaskResponse, error) { + out := new(SkipTaskResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/SkipTaskFromWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RerunWorkflow(ctx context.Context, in *model.RerunWorkflowRequest, opts ...grpc.CallOption) (*RerunWorkflowResponse, error) { + out := new(RerunWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RerunWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RestartWorkflow(ctx context.Context, in *RestartWorkflowRequest, opts ...grpc.CallOption) (*RestartWorkflowResponse, error) { + out := new(RestartWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RestartWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) RetryWorkflow(ctx context.Context, in *RetryWorkflowRequest, opts ...grpc.CallOption) (*RetryWorkflowResponse, error) { + out := new(RetryWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/RetryWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) ResetWorkflowCallbacks(ctx context.Context, in *ResetWorkflowCallbacksRequest, opts ...grpc.CallOption) (*ResetWorkflowCallbacksResponse, error) { + out := new(ResetWorkflowCallbacksResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/ResetWorkflowCallbacks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) TerminateWorkflow(ctx context.Context, in *TerminateWorkflowRequest, opts ...grpc.CallOption) (*TerminateWorkflowResponse, error) { + out := new(TerminateWorkflowResponse) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/TerminateWorkflow", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) Search(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { + out := new(WorkflowSummarySearchResult) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/Search", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +func (c *workflowServiceClient) SearchByTasks(ctx context.Context, in *search.Request, opts ...grpc.CallOption) (*WorkflowSummarySearchResult, error) { + out := new(WorkflowSummarySearchResult) + err := c.cc.Invoke(ctx, "/conductor.grpc.workflows.WorkflowService/SearchByTasks", in, out, opts...) + if err != nil { + return nil, err + } + return out, nil +} + +// WorkflowServiceServer is the server API for WorkflowService service. +type WorkflowServiceServer interface { + // POST / + StartWorkflow(context.Context, *model.StartWorkflowRequest) (*StartWorkflowResponse, error) + // GET /{name}/correlated/{correlationId} + GetWorkflows(context.Context, *GetWorkflowsRequest) (*GetWorkflowsResponse, error) + // GET /{workflowId} + GetWorkflowStatus(context.Context, *GetWorkflowStatusRequest) (*model.Workflow, error) + // DELETE /{workflodId}/remove + RemoveWorkflow(context.Context, *RemoveWorkflowRequest) (*RemoveWorkflowResponse, error) + // GET /running/{name} + GetRunningWorkflows(context.Context, *GetRunningWorkflowsRequest) (*GetRunningWorkflowsResponse, error) + // PUT /decide/{workflowId} + DecideWorkflow(context.Context, *DecideWorkflowRequest) (*DecideWorkflowResponse, error) + // PUT /{workflowId}/pause + PauseWorkflow(context.Context, *PauseWorkflowRequest) (*PauseWorkflowResponse, error) + // PUT /{workflowId}/pause + ResumeWorkflow(context.Context, *ResumeWorkflowRequest) (*ResumeWorkflowResponse, error) + // PUT /{workflowId}/skiptask/{taskReferenceName} + SkipTaskFromWorkflow(context.Context, *SkipTaskRequest) (*SkipTaskResponse, error) + // POST /{workflowId}/rerun + RerunWorkflow(context.Context, *model.RerunWorkflowRequest) (*RerunWorkflowResponse, error) + // POST /{workflowId}/restart + RestartWorkflow(context.Context, *RestartWorkflowRequest) (*RestartWorkflowResponse, error) + // POST /{workflowId}retry + RetryWorkflow(context.Context, *RetryWorkflowRequest) (*RetryWorkflowResponse, error) + // POST /{workflowId}/resetcallbacks + ResetWorkflowCallbacks(context.Context, *ResetWorkflowCallbacksRequest) (*ResetWorkflowCallbacksResponse, error) + // DELETE /{workflowId} + TerminateWorkflow(context.Context, *TerminateWorkflowRequest) (*TerminateWorkflowResponse, error) + // GET /search + Search(context.Context, *search.Request) (*WorkflowSummarySearchResult, error) + SearchByTasks(context.Context, *search.Request) (*WorkflowSummarySearchResult, error) +} + +func RegisterWorkflowServiceServer(s *grpc.Server, srv WorkflowServiceServer) { + s.RegisterService(&_WorkflowService_serviceDesc, srv) +} + +func _WorkflowService_StartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.StartWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).StartWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/StartWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).StartWorkflow(ctx, req.(*model.StartWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/GetWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetWorkflows(ctx, req.(*GetWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetWorkflowStatus_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetWorkflowStatusRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/GetWorkflowStatus", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetWorkflowStatus(ctx, req.(*GetWorkflowStatusRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RemoveWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RemoveWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RemoveWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RemoveWorkflow(ctx, req.(*RemoveWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_GetRunningWorkflows_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(GetRunningWorkflowsRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/GetRunningWorkflows", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).GetRunningWorkflows(ctx, req.(*GetRunningWorkflowsRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_DecideWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(DecideWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).DecideWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/DecideWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).DecideWorkflow(ctx, req.(*DecideWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_PauseWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(PauseWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).PauseWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/PauseWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).PauseWorkflow(ctx, req.(*PauseWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_ResumeWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResumeWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/ResumeWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).ResumeWorkflow(ctx, req.(*ResumeWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_SkipTaskFromWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(SkipTaskRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/SkipTaskFromWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).SkipTaskFromWorkflow(ctx, req.(*SkipTaskRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RerunWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(model.RerunWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RerunWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RerunWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RerunWorkflow(ctx, req.(*model.RerunWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RestartWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RestartWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RestartWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RestartWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RestartWorkflow(ctx, req.(*RestartWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_RetryWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(RetryWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).RetryWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/RetryWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).RetryWorkflow(ctx, req.(*RetryWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_ResetWorkflowCallbacks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(ResetWorkflowCallbacksRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/ResetWorkflowCallbacks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).ResetWorkflowCallbacks(ctx, req.(*ResetWorkflowCallbacksRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_TerminateWorkflow_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(TerminateWorkflowRequest) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/TerminateWorkflow", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).TerminateWorkflow(ctx, req.(*TerminateWorkflowRequest)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_Search_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(search.Request) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).Search(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/Search", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).Search(ctx, req.(*search.Request)) + } + return interceptor(ctx, in, info, handler) +} + +func _WorkflowService_SearchByTasks_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { + in := new(search.Request) + if err := dec(in); err != nil { + return nil, err + } + if interceptor == nil { + return srv.(WorkflowServiceServer).SearchByTasks(ctx, in) + } + info := &grpc.UnaryServerInfo{ + Server: srv, + FullMethod: "/conductor.grpc.workflows.WorkflowService/SearchByTasks", + } + handler := func(ctx context.Context, req interface{}) (interface{}, error) { + return srv.(WorkflowServiceServer).SearchByTasks(ctx, req.(*search.Request)) + } + return interceptor(ctx, in, info, handler) +} + +var _WorkflowService_serviceDesc = grpc.ServiceDesc{ + ServiceName: "conductor.grpc.workflows.WorkflowService", + HandlerType: (*WorkflowServiceServer)(nil), + Methods: []grpc.MethodDesc{ + { + MethodName: "StartWorkflow", + Handler: _WorkflowService_StartWorkflow_Handler, + }, + { + MethodName: "GetWorkflows", + Handler: _WorkflowService_GetWorkflows_Handler, + }, + { + MethodName: "GetWorkflowStatus", + Handler: _WorkflowService_GetWorkflowStatus_Handler, + }, + { + MethodName: "RemoveWorkflow", + Handler: _WorkflowService_RemoveWorkflow_Handler, + }, + { + MethodName: "GetRunningWorkflows", + Handler: _WorkflowService_GetRunningWorkflows_Handler, + }, + { + MethodName: "DecideWorkflow", + Handler: _WorkflowService_DecideWorkflow_Handler, + }, + { + MethodName: "PauseWorkflow", + Handler: _WorkflowService_PauseWorkflow_Handler, + }, + { + MethodName: "ResumeWorkflow", + Handler: _WorkflowService_ResumeWorkflow_Handler, + }, + { + MethodName: "SkipTaskFromWorkflow", + Handler: _WorkflowService_SkipTaskFromWorkflow_Handler, + }, + { + MethodName: "RerunWorkflow", + Handler: _WorkflowService_RerunWorkflow_Handler, + }, + { + MethodName: "RestartWorkflow", + Handler: _WorkflowService_RestartWorkflow_Handler, + }, + { + MethodName: "RetryWorkflow", + Handler: _WorkflowService_RetryWorkflow_Handler, + }, + { + MethodName: "ResetWorkflowCallbacks", + Handler: _WorkflowService_ResetWorkflowCallbacks_Handler, + }, + { + MethodName: "TerminateWorkflow", + Handler: _WorkflowService_TerminateWorkflow_Handler, + }, + { + MethodName: "Search", + Handler: _WorkflowService_Search_Handler, + }, + { + MethodName: "SearchByTasks", + Handler: _WorkflowService_SearchByTasks_Handler, + }, + }, + Streams: []grpc.StreamDesc{}, + Metadata: "grpc/workflow_service.proto", +} + +func init() { + proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_fc7b0bf1a282d9fc) +} + +var fileDescriptor_workflow_service_fc7b0bf1a282d9fc = []byte{ + // 1121 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xcd, 0x6e, 0xdb, 0x46, + 0x10, 0x86, 0xa4, 0xc4, 0xb6, 0xc6, 0x96, 0x7f, 0x36, 0xb6, 0x43, 0xd3, 0x48, 0xaa, 0xb2, 0x08, + 0xe0, 0x14, 0x28, 0xd5, 0x2a, 0x0d, 0xac, 0xe6, 0x94, 0xda, 0x69, 0x53, 0x5f, 0x82, 0x60, 0x65, + 0xa0, 0x40, 0x2f, 0x2c, 0x45, 0xae, 0x65, 0x42, 0xfc, 0x51, 0x77, 0x97, 0x72, 0x54, 0xf4, 0x54, + 0xf4, 0xd6, 0x57, 0x28, 0xfa, 0x20, 0xbd, 0xf6, 0xc5, 0x0a, 0x2e, 0x97, 0x94, 0x48, 0xad, 0x18, + 0xc9, 0x40, 0x6f, 0xd2, 0xcc, 0x7c, 0x33, 0xb3, 0x33, 0xb3, 0xf3, 0xad, 0x04, 0xa7, 0x43, 0x3a, + 0x76, 0x3a, 0x77, 0x11, 0x1d, 0xdd, 0xf8, 0xd1, 0x9d, 0xc5, 0x08, 0x9d, 0x78, 0x0e, 0x31, 0xc7, + 0x34, 0xe2, 0x11, 0xd2, 0x9c, 0x28, 0x74, 0x63, 0x87, 0x47, 0xd4, 0x4c, 0xcc, 0xcc, 0xcc, 0x8c, + 0xe9, 0x07, 0x02, 0xc6, 0x88, 0x4d, 0x9d, 0xdb, 0xd4, 0x58, 0x3f, 0x0c, 0x22, 0x97, 0xf8, 0xb9, + 0x2b, 0x29, 0x3d, 0x2d, 0x4a, 0x59, 0x1c, 0x04, 0x36, 0x9d, 0x16, 0x95, 0x6c, 0xe4, 0x8d, 0xb9, + 0xcd, 0x46, 0x94, 0xfc, 0x12, 0x13, 0xc6, 0xa5, 0xb2, 0x2d, 0x95, 0xdc, 0xa6, 0x3c, 0x83, 0x2b, + 0x2d, 0x28, 0xa1, 0x71, 0xa8, 0xb4, 0x30, 0x7a, 0x70, 0xd4, 0x4f, 0xf0, 0x3f, 0x4a, 0x2d, 0x26, + 0x6c, 0x1c, 0x85, 0x8c, 0xa0, 0x4f, 0x60, 0x3b, 0x3f, 0xb3, 0xe7, 0x6a, 0xb5, 0x76, 0xed, 0xac, + 0x89, 0x21, 0x13, 0x5d, 0xb9, 0xc6, 0x5f, 0x35, 0x78, 0xf4, 0x96, 0xe4, 0x40, 0x86, 0x53, 0xbf, + 0x08, 0xc1, 0x83, 0xd0, 0x0e, 0x88, 0x44, 0x88, 0xcf, 0xe8, 0x19, 0xec, 0x3a, 0x11, 0xa5, 0xc4, + 0xb7, 0xb9, 0x17, 0x85, 0x89, 0xbf, 0x7a, 0xbb, 0x71, 0xd6, 0xc4, 0xad, 0x39, 0xe9, 0x95, 0x9b, + 0x98, 0x79, 0xa1, 0xe3, 0xc7, 0x2e, 0xb1, 0x1c, 0x3f, 0x62, 0xc4, 0xd5, 0x1a, 0xed, 0xda, 0xd9, + 0x16, 0x6e, 0x49, 0xe9, 0xa5, 0x10, 0xa2, 0xcf, 0x20, 0x13, 0x58, 0x49, 0x51, 0x98, 0xf6, 0x40, + 0x58, 0xed, 0x48, 0xe1, 0x75, 0x22, 0x33, 0xfe, 0xad, 0xc3, 0x61, 0x31, 0x3d, 0x79, 0x30, 0x0f, + 0xf6, 0xf2, 0x5a, 0x5b, 0x83, 0x69, 0x7a, 0xb8, 0xc6, 0xd9, 0x76, 0xf7, 0x5b, 0x73, 0x59, 0x33, + 0x4d, 0x95, 0x23, 0x33, 0x97, 0x5c, 0x4c, 0xaf, 0xdc, 0xef, 0x42, 0x4e, 0xa7, 0xb8, 0x75, 0x37, + 0x2f, 0xd3, 0xdf, 0x40, 0x33, 0x37, 0x42, 0xe7, 0xd0, 0xcc, 0xb5, 0x32, 0xe2, 0xc9, 0x5c, 0x44, + 0xd1, 0x8e, 0xdc, 0x27, 0x9e, 0xd9, 0xea, 0xbf, 0x02, 0x5a, 0x0c, 0x85, 0xf6, 0xa1, 0x31, 0x22, + 0x53, 0x59, 0xe5, 0xe4, 0x23, 0x7a, 0x07, 0x0f, 0x27, 0xb6, 0x1f, 0x13, 0xad, 0xde, 0xae, 0x9d, + 0x6d, 0x77, 0x7b, 0xf7, 0x3d, 0x0e, 0x4e, 0xdd, 0xbc, 0xaa, 0xf7, 0x6a, 0xc6, 0xcf, 0xa0, 0xcd, + 0x19, 0xf7, 0xb9, 0xcd, 0xe3, 0xbc, 0xd1, 0x1f, 0x9b, 0x90, 0xc5, 0x3e, 0xd5, 0x15, 0x7d, 0xc2, + 0x70, 0xa2, 0x88, 0x20, 0x7b, 0xf5, 0x12, 0xb6, 0x32, 0x7f, 0xc2, 0x7f, 0x65, 0xc9, 0x72, 0x53, + 0xc3, 0x81, 0x23, 0x4c, 0x82, 0x68, 0x42, 0x66, 0x53, 0x5d, 0x4e, 0xd9, 0x5d, 0x4c, 0xd9, 0xbd, + 0x72, 0xd1, 0x73, 0xd8, 0x4f, 0x2e, 0xac, 0x37, 0x21, 0x56, 0x1e, 0x38, 0xcd, 0x7a, 0x4f, 0xca, + 0x33, 0x97, 0x86, 0x06, 0xc7, 0xe5, 0x20, 0x69, 0xd6, 0xc6, 0x1f, 0x35, 0xd0, 0xdf, 0x12, 0x8e, + 0xe3, 0x30, 0xf4, 0xc2, 0xe1, 0x4a, 0x17, 0x44, 0x83, 0xcd, 0x09, 0xa1, 0xcc, 0x8b, 0x42, 0x11, + 0xee, 0x21, 0xce, 0xbe, 0xa2, 0x27, 0x00, 0xe2, 0x82, 0x5b, 0xdc, 0x0b, 0x88, 0xb8, 0x0f, 0x0d, + 0xdc, 0x14, 0x92, 0x6b, 0x2f, 0x20, 0xe8, 0x04, 0xb6, 0x48, 0xe8, 0xa6, 0xca, 0x07, 0x42, 0xb9, + 0x49, 0x42, 0x37, 0x51, 0x19, 0xaf, 0xe1, 0x54, 0x99, 0x85, 0xac, 0xed, 0xa7, 0xb0, 0x33, 0xd7, + 0xbe, 0x74, 0x24, 0x9b, 0x78, 0x7b, 0xd6, 0x3f, 0x96, 0x2c, 0x87, 0x37, 0xc4, 0xf1, 0xdc, 0x8a, + 0x3a, 0x2e, 0x59, 0x0e, 0x1a, 0x1c, 0x97, 0x91, 0xb2, 0x38, 0xe7, 0x70, 0xf8, 0xde, 0x8e, 0xd9, + 0xfa, 0x2e, 0x1f, 0xc3, 0x51, 0x09, 0x28, 0x3d, 0xf6, 0x92, 0x6e, 0xb3, 0x38, 0xb8, 0x57, 0x96, + 0x65, 0xa4, 0xf4, 0xf9, 0x77, 0x0d, 0xf6, 0xfa, 0x23, 0x6f, 0x9c, 0xcc, 0xe8, 0xca, 0xf3, 0x6e, + 0xc2, 0xa3, 0x64, 0xce, 0x2d, 0x4a, 0x6e, 0x08, 0x25, 0xa1, 0x43, 0x2c, 0xd1, 0xe7, 0xba, 0x30, + 0x3c, 0xe0, 0xc2, 0x95, 0xd4, 0xbc, 0x4b, 0x9a, 0xfe, 0x0a, 0x36, 0xe5, 0x32, 0x16, 0x7d, 0xdd, + 0xee, 0xb6, 0x17, 0x86, 0xbb, 0x94, 0x03, 0xce, 0x00, 0x06, 0x82, 0xfd, 0x99, 0x6e, 0xbe, 0x10, + 0x34, 0x0e, 0xd7, 0xdf, 0xe5, 0xdf, 0x88, 0x42, 0x14, 0x79, 0x60, 0xc5, 0x1a, 0x9e, 0xc0, 0xe3, + 0x05, 0xe8, 0xac, 0xd5, 0x98, 0x70, 0x3a, 0xbd, 0x4f, 0xab, 0x4b, 0x40, 0xe9, 0xf1, 0x35, 0x3c, + 0xc1, 0x84, 0xcd, 0xd6, 0xc5, 0xa5, 0xed, 0xfb, 0x03, 0xdb, 0x19, 0xad, 0xbc, 0x93, 0x8c, 0x36, + 0x3c, 0x5d, 0xe6, 0x41, 0xc6, 0xe8, 0x83, 0x76, 0x4d, 0x68, 0xe0, 0x85, 0x36, 0x5f, 0x7b, 0xa2, + 0xd0, 0x31, 0x6c, 0x50, 0x62, 0x33, 0x79, 0x8d, 0x9b, 0x58, 0x7e, 0x33, 0x4e, 0xe1, 0x44, 0xe1, + 0x54, 0x46, 0xfc, 0x00, 0xa7, 0xf9, 0xfe, 0x4b, 0xd9, 0xbf, 0x2f, 0x9e, 0x0d, 0xc9, 0x6c, 0xfa, + 0x3c, 0xd9, 0x00, 0x3c, 0xe2, 0xb6, 0x6f, 0xdd, 0x7a, 0x9c, 0x89, 0x98, 0x0d, 0xdc, 0x14, 0x92, + 0x1f, 0x3c, 0xce, 0xd2, 0x29, 0x4a, 0x0c, 0x99, 0x20, 0x55, 0xd5, 0x14, 0x95, 0xbc, 0xe3, 0x0c, + 0xd0, 0xfd, 0x67, 0x17, 0xf6, 0x72, 0x65, 0xfa, 0xb0, 0x41, 0x43, 0x68, 0x15, 0x5e, 0x04, 0xe8, + 0xd9, 0xe2, 0x54, 0x2a, 0x26, 0x45, 0xef, 0x2c, 0xe7, 0x1b, 0xf5, 0x0b, 0x23, 0x80, 0x9d, 0x79, + 0x22, 0x42, 0x5f, 0xac, 0x4a, 0x58, 0x69, 0x3c, 0x73, 0x3d, 0x7e, 0x43, 0x03, 0x38, 0x58, 0x20, + 0x1a, 0xd4, 0x5d, 0xc9, 0x49, 0x81, 0xf7, 0xf4, 0xe5, 0x14, 0x84, 0x18, 0xec, 0x16, 0x39, 0x01, + 0x55, 0x54, 0x45, 0x49, 0x51, 0xfa, 0x97, 0xab, 0x03, 0xe4, 0xc1, 0x7e, 0x4f, 0x1f, 0x62, 0xe5, + 0x45, 0x8f, 0xbe, 0xae, 0x3c, 0xdb, 0x12, 0x76, 0xd2, 0x5f, 0xae, 0x89, 0x92, 0x49, 0x30, 0xd8, + 0x2d, 0x2e, 0xfc, 0xaa, 0x93, 0x2b, 0x49, 0xa5, 0xea, 0xe4, 0x6a, 0x2e, 0x41, 0x63, 0x68, 0x15, + 0x28, 0x01, 0x55, 0xcc, 0x84, 0x8a, 0x74, 0xaa, 0x66, 0x56, 0xc9, 0x35, 0x69, 0x83, 0xe7, 0x19, + 0xa3, 0xba, 0xc1, 0x0a, 0x56, 0xaa, 0x6e, 0xb0, 0x8a, 0x8c, 0x50, 0x00, 0x87, 0xd9, 0xae, 0xff, + 0x9e, 0x46, 0x41, 0x1e, 0xfa, 0x79, 0xc5, 0x8d, 0x2b, 0xf2, 0x86, 0xfe, 0xf9, 0x2a, 0xa6, 0x32, + 0xdc, 0x10, 0x5a, 0x05, 0x1a, 0x51, 0x2c, 0x80, 0x12, 0xcd, 0x7c, 0xb4, 0x98, 0x6a, 0x5a, 0x9a, + 0xc0, 0x5e, 0x89, 0x3a, 0x50, 0x75, 0x71, 0x54, 0x6b, 0xe7, 0xab, 0x35, 0x10, 0xb3, 0xb1, 0x29, + 0xd0, 0x4b, 0xd5, 0xd8, 0xa8, 0x08, 0xac, 0xfa, 0xa4, 0x0a, 0xde, 0x42, 0x7f, 0xd6, 0x04, 0xc1, + 0x2a, 0x68, 0x07, 0x9d, 0x57, 0xe6, 0xbf, 0x9c, 0xea, 0xf4, 0xde, 0xfa, 0x40, 0x99, 0xcd, 0x6f, + 0x70, 0xb0, 0x40, 0x46, 0x55, 0x9b, 0x70, 0x19, 0x1d, 0xea, 0x2f, 0xd6, 0xc2, 0xc8, 0xe8, 0x16, + 0x6c, 0xa4, 0xf4, 0x86, 0x9e, 0x96, 0xe1, 0xf2, 0xd7, 0xf2, 0x0a, 0xab, 0xa8, 0x8a, 0x2f, 0x6f, + 0xa0, 0x95, 0x7e, 0xbf, 0x98, 0x8a, 0x9f, 0x18, 0xff, 0x53, 0x9c, 0x0b, 0x0e, 0xba, 0x13, 0x05, + 0x66, 0x48, 0xf8, 0x8d, 0xef, 0x7d, 0x28, 0xf9, 0xb8, 0x38, 0x28, 0xf1, 0xea, 0xfb, 0xc1, 0x4f, + 0x97, 0x43, 0x8f, 0xdf, 0xc6, 0x03, 0xd3, 0x89, 0x82, 0x8e, 0x44, 0x75, 0x72, 0x54, 0xc7, 0xf1, + 0x3d, 0x12, 0xf2, 0xce, 0x30, 0x12, 0x7f, 0x1c, 0xcc, 0xe4, 0x85, 0xbf, 0x1f, 0xd8, 0x60, 0x43, + 0xdc, 0xc0, 0x17, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0x39, 0x12, 0x2a, 0x80, 0x97, 0x10, 0x00, + 0x00, +} diff --git a/client/gogrpc/conductor/model/dynamicforkjointask.pb.go b/client/gogrpc/conductor/model/dynamicforkjointask.pb.go index 5fc8eb16ce..f7d710983a 100644 --- a/client/gogrpc/conductor/model/dynamicforkjointask.pb.go +++ b/client/gogrpc/conductor/model/dynamicforkjointask.pb.go @@ -34,7 +34,7 @@ func (m *DynamicForkJoinTask) Reset() { *m = DynamicForkJoinTask{} } func (m *DynamicForkJoinTask) String() string { return proto.CompactTextString(m) } func (*DynamicForkJoinTask) ProtoMessage() {} func (*DynamicForkJoinTask) Descriptor() ([]byte, []int) { - return fileDescriptor_dynamicforkjointask_d18821af65a16be7, []int{0} + return fileDescriptor_dynamicforkjointask_60f4ea3626679478, []int{0} } func (m *DynamicForkJoinTask) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DynamicForkJoinTask.Unmarshal(m, b) @@ -90,35 +90,35 @@ func (m *DynamicForkJoinTask) GetType() string { } func init() { - proto.RegisterType((*DynamicForkJoinTask)(nil), "com.netflix.conductor.proto.DynamicForkJoinTask") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.DynamicForkJoinTask.InputEntry") + proto.RegisterType((*DynamicForkJoinTask)(nil), "conductor.proto.DynamicForkJoinTask") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.DynamicForkJoinTask.InputEntry") } func init() { - proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_d18821af65a16be7) + proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_60f4ea3626679478) } -var fileDescriptor_dynamicforkjointask_d18821af65a16be7 = []byte{ - // 324 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x91, 0x4f, 0x4b, 0x03, 0x31, - 0x10, 0xc5, 0xe9, 0x3f, 0xb1, 0xa9, 0x15, 0x89, 0x28, 0xa5, 0x15, 0x2c, 0x8a, 0xd0, 0x83, 0x24, - 0x50, 0x2f, 0x52, 0x6f, 0x45, 0x05, 0x3d, 0x48, 0xad, 0xe2, 0xc1, 0x8b, 0xec, 0xa6, 0xb3, 0x6b, - 0xdc, 0xdd, 0xcc, 0x92, 0x66, 0xad, 0xfb, 0xc9, 0xfc, 0x7a, 0x92, 0xa4, 0xad, 0x1e, 0x8a, 0xb7, - 0xd9, 0x37, 0xbf, 0x9d, 0x37, 0x6f, 0x42, 0x8e, 0x33, 0x9c, 0x41, 0xca, 0x67, 0xa5, 0x0a, 0x32, - 0x29, 0x22, 0xd4, 0xc9, 0x07, 0x4a, 0x65, 0x82, 0x79, 0xc2, 0x72, 0x8d, 0x06, 0x69, 0x4f, 0x60, - 0xc6, 0x14, 0x98, 0x28, 0x95, 0x5f, 0x4c, 0xa0, 0x9a, 0x15, 0xc2, 0xa0, 0xf6, 0xcd, 0xee, 0x51, - 0x8c, 0x18, 0xa7, 0xc0, 0xdd, 0x57, 0x58, 0x44, 0x7c, 0x6e, 0x74, 0x21, 0x8c, 0xef, 0x9e, 0x7c, - 0x57, 0xc9, 0xfe, 0xb5, 0x1f, 0x7c, 0x8b, 0x3a, 0xb9, 0x47, 0xa9, 0x9e, 0x83, 0x79, 0x42, 0x7b, - 0xa4, 0x69, 0x0d, 0xde, 0x54, 0x90, 0x41, 0xa7, 0xd2, 0xaf, 0x0c, 0x9a, 0xd3, 0x6d, 0x2b, 0x3c, - 0x04, 0x19, 0xd0, 0x53, 0xd2, 0x5e, 0xa0, 0x4e, 0xa2, 0x14, 0x17, 0x1e, 0xa8, 0x3a, 0x60, 0x67, - 0x25, 0x3a, 0xe8, 0x8c, 0xec, 0x6a, 0x88, 0x40, 0x83, 0x12, 0xe0, 0xa9, 0x9a, 0xa3, 0xda, 0x6b, - 0xd5, 0x61, 0x8f, 0xa4, 0x21, 0x55, 0x5e, 0x98, 0x4e, 0xbd, 0x5f, 0x1b, 0xb4, 0x86, 0x57, 0xec, - 0x9f, 0x2c, 0x6c, 0xc3, 0xa6, 0xec, 0xce, 0xfe, 0x7d, 0xa3, 0x8c, 0x2e, 0xa7, 0x7e, 0x12, 0xa5, - 0xa4, 0x6e, 0xca, 0x1c, 0x3a, 0x0d, 0xe7, 0xe7, 0xea, 0xee, 0x84, 0x90, 0x5f, 0x90, 0xee, 0x91, - 0x5a, 0x02, 0xe5, 0x32, 0x97, 0x2d, 0xe9, 0x39, 0x69, 0x7c, 0x06, 0x69, 0xe1, 0xa3, 0xb4, 0x86, - 0x87, 0xcc, 0x5f, 0x8d, 0xad, 0xae, 0xc6, 0x5e, 0x6c, 0x77, 0xea, 0xa1, 0x51, 0xf5, 0xb2, 0x32, - 0x7e, 0x1a, 0x1f, 0x6c, 0x58, 0x67, 0x12, 0xbe, 0x8e, 0x62, 0x69, 0xde, 0x8b, 0xd0, 0x06, 0xe1, - 0xcb, 0x20, 0x7c, 0x1d, 0x84, 0x8b, 0x54, 0x82, 0x32, 0x3c, 0xc6, 0x58, 0xe7, 0xe2, 0x8f, 0xee, - 0xde, 0x38, 0xdc, 0x72, 0x7e, 0x17, 0x3f, 0x01, 0x00, 0x00, 0xff, 0xff, 0xe5, 0xd6, 0x52, 0xf3, - 0xf3, 0x01, 0x00, 0x00, +var fileDescriptor_dynamicforkjointask_60f4ea3626679478 = []byte{ + // 325 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0x4f, 0x4b, 0x03, 0x31, + 0x10, 0xc5, 0xe9, 0x3f, 0xb1, 0xa9, 0x55, 0x89, 0x28, 0xa5, 0x15, 0x2c, 0x8a, 0xd0, 0x83, 0x24, + 0x50, 0x2f, 0xd2, 0x63, 0xb1, 0x82, 0x1e, 0xa4, 0x14, 0xf1, 0xe0, 0x45, 0x76, 0xd3, 0xd9, 0x35, + 0xee, 0x6e, 0x66, 0x49, 0x13, 0xeb, 0x7e, 0x26, 0xbf, 0xa4, 0x6c, 0xd2, 0x56, 0x29, 0xbd, 0x4d, + 0xde, 0xfc, 0xde, 0xe4, 0x65, 0x42, 0x2e, 0x32, 0x9c, 0x43, 0xca, 0xe7, 0x85, 0x0a, 0x32, 0x29, + 0x22, 0xd4, 0xc9, 0x27, 0x4a, 0x65, 0x82, 0x45, 0xc2, 0x72, 0x8d, 0x06, 0xe9, 0x91, 0x40, 0x35, + 0xb7, 0xc2, 0xa0, 0xf6, 0x42, 0xf7, 0x3c, 0x46, 0x8c, 0x53, 0xe0, 0xee, 0x14, 0xda, 0x88, 0x2f, + 0x8c, 0xb6, 0xc2, 0xf8, 0xee, 0xe5, 0x4f, 0x95, 0x9c, 0xdc, 0xfb, 0x61, 0x0f, 0xa8, 0x93, 0x27, + 0x94, 0xea, 0x25, 0x58, 0x24, 0xb4, 0x47, 0x9a, 0xe5, 0xd0, 0x77, 0x15, 0x64, 0xd0, 0xa9, 0xf4, + 0x2b, 0x83, 0xe6, 0x6c, 0xbf, 0x14, 0x9e, 0x83, 0x0c, 0xe8, 0x15, 0x69, 0x2f, 0x51, 0x27, 0x51, + 0x8a, 0x4b, 0x0f, 0x54, 0x1d, 0x70, 0xb0, 0x16, 0x1d, 0x74, 0x4d, 0x0e, 0x35, 0x44, 0xa0, 0x41, + 0x09, 0xf0, 0x54, 0xcd, 0x51, 0xed, 0x8d, 0xea, 0xb0, 0x09, 0x69, 0x48, 0x95, 0x5b, 0xd3, 0xa9, + 0xf7, 0x6b, 0x83, 0xd6, 0x90, 0xb3, 0xad, 0xfc, 0x6c, 0x47, 0x3a, 0xf6, 0x58, 0x3a, 0x26, 0xca, + 0xe8, 0x62, 0xe6, 0xdd, 0x94, 0x92, 0xba, 0x29, 0x72, 0xe8, 0x34, 0xdc, 0x1d, 0xae, 0xee, 0x4e, + 0x09, 0xf9, 0x03, 0xe9, 0x31, 0xa9, 0x25, 0x50, 0xac, 0xde, 0x52, 0x96, 0xf4, 0x86, 0x34, 0xbe, + 0x82, 0xd4, 0xfa, 0xf8, 0xad, 0xe1, 0x19, 0xf3, 0x9b, 0x62, 0xeb, 0x4d, 0xb1, 0xd7, 0xb2, 0x3b, + 0xf3, 0xd0, 0xa8, 0x7a, 0x57, 0x19, 0xe7, 0xa4, 0x27, 0x30, 0x63, 0x0a, 0x4c, 0x94, 0xca, 0xef, + 0xed, 0xa8, 0xe3, 0xd3, 0x1d, 0x59, 0xa7, 0xe1, 0xdb, 0x28, 0x96, 0xe6, 0xc3, 0x86, 0x4c, 0x60, + 0xc6, 0x57, 0x56, 0xbe, 0xb1, 0x72, 0x91, 0x4a, 0x50, 0x86, 0xc7, 0x18, 0xeb, 0x5c, 0xfc, 0xd3, + 0xdd, 0x47, 0x87, 0x7b, 0x6e, 0xf2, 0xed, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x6d, 0xe6, 0x21, + 0x30, 0xf8, 0x01, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go b/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go index 007f73e2a7..4dac9221d2 100644 --- a/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go +++ b/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go @@ -29,7 +29,7 @@ func (m *DynamicForkJoinTaskList) Reset() { *m = DynamicForkJoinTaskList func (m *DynamicForkJoinTaskList) String() string { return proto.CompactTextString(m) } func (*DynamicForkJoinTaskList) ProtoMessage() {} func (*DynamicForkJoinTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_dynamicforkjointasklist_67e040324fd05201, []int{0} + return fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e, []int{0} } func (m *DynamicForkJoinTaskList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DynamicForkJoinTaskList.Unmarshal(m, b) @@ -57,26 +57,26 @@ func (m *DynamicForkJoinTaskList) GetDynamicTasks() []*DynamicForkJoinTask { } func init() { - proto.RegisterType((*DynamicForkJoinTaskList)(nil), "com.netflix.conductor.proto.DynamicForkJoinTaskList") + proto.RegisterType((*DynamicForkJoinTaskList)(nil), "conductor.proto.DynamicForkJoinTaskList") } func init() { - proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_67e040324fd05201) + proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e) } -var fileDescriptor_dynamicforkjointasklist_67e040324fd05201 = []byte{ - // 195 bytes of a gzipped FileDescriptorProto +var fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e = []byte{ + // 200 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xce, 0xcd, 0x4f, 0x49, 0xcd, 0xd1, 0x4f, 0xa9, 0xcc, 0x4b, 0xcc, 0xcd, 0x4c, 0x4e, 0xcb, 0x2f, 0xca, 0xce, 0xca, 0xcf, 0xcc, 0x2b, 0x49, 0x2c, 0xce, 0xce, 0xc9, 0x2c, 0x2e, 0xd1, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, - 0x92, 0x4e, 0xce, 0xcf, 0xd5, 0xcb, 0x4b, 0x2d, 0x49, 0xcb, 0xc9, 0xac, 0xd0, 0x4b, 0xce, 0xcf, - 0x4b, 0x29, 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x48, 0x4a, 0xc9, 0xe3, 0x34, 0x01, 0xa2, 0x40, 0xa9, - 0x80, 0x4b, 0xdc, 0x05, 0x22, 0xe9, 0x96, 0x5f, 0x94, 0xed, 0x95, 0x9f, 0x99, 0x17, 0x92, 0x58, - 0x9c, 0xed, 0x93, 0x59, 0x5c, 0x22, 0x14, 0xca, 0xc5, 0x0b, 0xd5, 0x17, 0x0f, 0xd2, 0x50, 0x2c, - 0xc1, 0xa8, 0xc0, 0xac, 0xc1, 0x6d, 0x64, 0xa0, 0x87, 0xc7, 0x42, 0x3d, 0x2c, 0x86, 0x05, 0xf1, - 0x40, 0x8d, 0x01, 0x71, 0x8a, 0x9d, 0xc2, 0x9d, 0x24, 0x71, 0xd8, 0x18, 0x90, 0x14, 0x65, 0x95, - 0x9e, 0x59, 0x92, 0x51, 0x9a, 0x04, 0xb2, 0x42, 0x1f, 0x6a, 0x85, 0x3e, 0xdc, 0x0a, 0xfd, 0xe4, - 0x9c, 0xcc, 0xd4, 0xbc, 0x12, 0xfd, 0xf4, 0xfc, 0xf4, 0xa2, 0x82, 0x64, 0x24, 0x71, 0xb0, 0x27, - 0x93, 0xd8, 0xc0, 0x2e, 0x30, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x9f, 0xb2, 0x2f, 0xcc, 0x36, - 0x01, 0x00, 0x00, + 0xe2, 0x4f, 0xce, 0xcf, 0x4b, 0x29, 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x08, 0x48, 0xc9, 0xe3, 0xd4, + 0x05, 0x51, 0xa0, 0x94, 0xc2, 0x25, 0xee, 0x02, 0x91, 0x74, 0xcb, 0x2f, 0xca, 0xf6, 0xca, 0xcf, + 0xcc, 0x0b, 0x49, 0x2c, 0xce, 0xf6, 0xc9, 0x2c, 0x2e, 0x11, 0xf2, 0xe4, 0xe2, 0x85, 0xea, 0x8b, + 0x07, 0x69, 0x28, 0x96, 0x60, 0x54, 0x60, 0xd6, 0xe0, 0x36, 0x52, 0xd1, 0x43, 0xb3, 0x44, 0x0f, + 0x8b, 0x01, 0x41, 0x3c, 0x50, 0xad, 0x20, 0x4e, 0xb1, 0x53, 0x09, 0x97, 0x74, 0x72, 0x7e, 0xae, + 0x5e, 0x5e, 0x6a, 0x49, 0x5a, 0x4e, 0x66, 0x05, 0xba, 0x01, 0x4e, 0x92, 0x38, 0x9c, 0x10, 0x90, + 0x14, 0x65, 0x95, 0x9e, 0x59, 0x92, 0x51, 0x9a, 0xa4, 0x97, 0x9c, 0x9f, 0xab, 0x0f, 0xd5, 0xae, + 0x0f, 0xd7, 0xae, 0x9f, 0x9c, 0x93, 0x99, 0x9a, 0x57, 0xa2, 0x9f, 0x9e, 0x9f, 0x5e, 0x54, 0x90, + 0x8c, 0x24, 0x0e, 0xf6, 0x75, 0x12, 0x1b, 0xd8, 0x74, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x76, 0xa8, 0x2e, 0xed, 0x3b, 0x01, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/eventexecution.pb.go b/client/gogrpc/conductor/model/eventexecution.pb.go index 71aa03f168..a08e1b4655 100644 --- a/client/gogrpc/conductor/model/eventexecution.pb.go +++ b/client/gogrpc/conductor/model/eventexecution.pb.go @@ -45,7 +45,7 @@ func (x EventExecution_Status) String() string { return proto.EnumName(EventExecution_Status_name, int32(x)) } func (EventExecution_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_493ae02b3cb3fbdc, []int{0, 0} + return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0, 0} } type EventExecution struct { @@ -54,7 +54,8 @@ type EventExecution struct { Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"` Event string `protobuf:"bytes,4,opt,name=event" json:"event,omitempty"` Created int64 `protobuf:"varint,5,opt,name=created" json:"created,omitempty"` - Status EventExecution_Status `protobuf:"varint,6,opt,name=status,enum=com.netflix.conductor.proto.EventExecution_Status" json:"status,omitempty"` + Status EventExecution_Status `protobuf:"varint,6,opt,name=status,enum=conductor.proto.EventExecution_Status" json:"status,omitempty"` + Action EventHandler_Action_Type `protobuf:"varint,7,opt,name=action,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` Output map[string]*_struct.Value `protobuf:"bytes,8,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -65,7 +66,7 @@ func (m *EventExecution) Reset() { *m = EventExecution{} } func (m *EventExecution) String() string { return proto.CompactTextString(m) } func (*EventExecution) ProtoMessage() {} func (*EventExecution) Descriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_493ae02b3cb3fbdc, []int{0} + return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0} } func (m *EventExecution) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventExecution.Unmarshal(m, b) @@ -127,6 +128,13 @@ func (m *EventExecution) GetStatus() EventExecution_Status { return EventExecution_IN_PROGRESS } +func (m *EventExecution) GetAction() EventHandler_Action_Type { + if m != nil { + return m.Action + } + return EventHandler_Action_START_WORKFLOW +} + func (m *EventExecution) GetOutput() map[string]*_struct.Value { if m != nil { return m.Output @@ -135,40 +143,43 @@ func (m *EventExecution) GetOutput() map[string]*_struct.Value { } func init() { - proto.RegisterType((*EventExecution)(nil), "com.netflix.conductor.proto.EventExecution") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.EventExecution.OutputEntry") - proto.RegisterEnum("com.netflix.conductor.proto.EventExecution_Status", EventExecution_Status_name, EventExecution_Status_value) + proto.RegisterType((*EventExecution)(nil), "conductor.proto.EventExecution") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventExecution.OutputEntry") + proto.RegisterEnum("conductor.proto.EventExecution_Status", EventExecution_Status_name, EventExecution_Status_value) } func init() { - proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_493ae02b3cb3fbdc) -} - -var fileDescriptor_eventexecution_493ae02b3cb3fbdc = []byte{ - // 393 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x52, 0x41, 0x8f, 0x94, 0x30, - 0x18, 0x15, 0xd8, 0x61, 0x9d, 0x8f, 0x38, 0x92, 0xc6, 0x98, 0x66, 0xd4, 0x84, 0xec, 0x89, 0x83, - 0x29, 0x09, 0x1e, 0x34, 0x7b, 0xdb, 0x75, 0xaa, 0x41, 0xd7, 0x05, 0xc1, 0x78, 0xf0, 0xb2, 0x81, - 0xd2, 0x45, 0x22, 0xd0, 0x09, 0xb4, 0x9b, 0x99, 0x3f, 0xe6, 0xef, 0x33, 0x14, 0xc6, 0xcc, 0x5c, - 0x4c, 0xbc, 0x7d, 0xdf, 0x7b, 0xfd, 0x5e, 0xde, 0x7b, 0x29, 0xac, 0x5b, 0x51, 0xf2, 0x26, 0xe0, - 0x0f, 0xbc, 0x93, 0x7c, 0xc7, 0x99, 0x92, 0xb5, 0xe8, 0xc8, 0xb6, 0x17, 0x52, 0xa0, 0x17, 0x4c, - 0xb4, 0xa4, 0xe3, 0xf2, 0xbe, 0xa9, 0x77, 0x84, 0x89, 0xae, 0x54, 0x4c, 0x8a, 0x7e, 0x22, 0xd7, - 0x2f, 0x2b, 0x21, 0xaa, 0x86, 0x07, 0x7a, 0x2b, 0xd4, 0x7d, 0x30, 0xc8, 0x5e, 0x31, 0x39, 0xb1, - 0x17, 0xbf, 0x2d, 0x58, 0xd1, 0x51, 0x93, 0x1e, 0x34, 0xd1, 0x0a, 0xcc, 0xba, 0xc4, 0x86, 0x67, - 0xf8, 0xcb, 0xd4, 0xac, 0x4b, 0xf4, 0x0a, 0xa0, 0xe5, 0xc3, 0x90, 0x57, 0xfc, 0xae, 0x2e, 0xb1, - 0xa9, 0xf1, 0xe5, 0x8c, 0x44, 0x25, 0x42, 0x70, 0xd6, 0xe5, 0x2d, 0xc7, 0x96, 0x26, 0xf4, 0x8c, - 0x9e, 0xc1, 0x42, 0x1b, 0xc5, 0x67, 0x1a, 0x9c, 0x16, 0x84, 0xe1, 0x9c, 0xf5, 0x3c, 0x97, 0xbc, - 0xc4, 0x0b, 0xcf, 0xf0, 0xad, 0xf4, 0xb0, 0xa2, 0x4f, 0x60, 0x0f, 0x32, 0x97, 0x6a, 0xc0, 0xb6, - 0x67, 0xf8, 0xab, 0x30, 0x24, 0xff, 0x48, 0x44, 0x4e, 0xfd, 0x92, 0x4c, 0x5f, 0xa6, 0xb3, 0x02, - 0x8a, 0xc1, 0x16, 0x4a, 0x6e, 0x95, 0xc4, 0x8f, 0x3d, 0xcb, 0x77, 0xc2, 0xb7, 0xff, 0xa3, 0x15, - 0xeb, 0x4b, 0xda, 0xc9, 0x7e, 0x9f, 0xce, 0x32, 0xeb, 0xaf, 0xe0, 0x1c, 0xc1, 0xc8, 0x05, 0xeb, - 0x17, 0xdf, 0xcf, 0xfd, 0x8c, 0x23, 0x7a, 0x0d, 0x8b, 0x87, 0xbc, 0x51, 0x5c, 0x77, 0xe3, 0x84, - 0xcf, 0xc9, 0xd4, 0x38, 0x39, 0x34, 0x4e, 0xbe, 0x8f, 0x6c, 0x3a, 0x3d, 0xba, 0x34, 0xdf, 0x19, - 0x17, 0x57, 0x60, 0x4f, 0xae, 0xd1, 0x53, 0x70, 0xa2, 0xdb, 0xbb, 0x24, 0x8d, 0x3f, 0xa6, 0x34, - 0xcb, 0xdc, 0x47, 0xe8, 0x09, 0x2c, 0xdf, 0xc7, 0x5f, 0x92, 0x1b, 0xfa, 0x8d, 0x6e, 0x5c, 0x03, - 0x01, 0xd8, 0x1f, 0xae, 0xa2, 0x1b, 0xba, 0x71, 0x4d, 0xe4, 0xc0, 0x79, 0xf6, 0x39, 0x4a, 0x12, - 0xba, 0x71, 0xad, 0xeb, 0xdb, 0x6b, 0xf7, 0xd4, 0x7b, 0x52, 0xfc, 0xb8, 0xac, 0x6a, 0xf9, 0x53, - 0x15, 0x63, 0xe0, 0x60, 0x0e, 0x1c, 0xfc, 0x0d, 0x1c, 0xb0, 0xa6, 0xe6, 0x9d, 0x0c, 0x2a, 0x51, - 0xf5, 0x5b, 0x76, 0x84, 0xeb, 0x8f, 0x55, 0xd8, 0xda, 0xed, 0x9b, 0x3f, 0x01, 0x00, 0x00, 0xff, - 0xff, 0xa4, 0x08, 0x46, 0x66, 0x68, 0x02, 0x00, 0x00, + proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_680c67ac3fada8e2) +} + +var fileDescriptor_eventexecution_680c67ac3fada8e2 = []byte{ + // 434 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x8b, 0xd3, 0x40, + 0x14, 0xc6, 0x4d, 0xb2, 0x4d, 0xed, 0x0b, 0x76, 0xc3, 0x20, 0x32, 0x54, 0x85, 0xb2, 0x07, 0xa9, + 0x28, 0x13, 0xa8, 0x17, 0xd9, 0x83, 0xd0, 0xdd, 0x46, 0x2d, 0xae, 0x36, 0xa6, 0x8b, 0x07, 0x2f, + 0x4b, 0x3a, 0x79, 0x9b, 0x0d, 0x9b, 0xcc, 0x94, 0x64, 0x66, 0xd9, 0xfe, 0xb9, 0xfe, 0x27, 0xd2, + 0x49, 0x22, 0xdd, 0x22, 0xec, 0x6d, 0xde, 0xf7, 0x7d, 0xbf, 0xe4, 0xbd, 0x37, 0x03, 0xa3, 0x52, + 0xa6, 0x58, 0x04, 0x78, 0x87, 0x42, 0xe1, 0x3d, 0x72, 0xad, 0x72, 0x29, 0xd8, 0xa6, 0x92, 0x4a, + 0x92, 0x63, 0x2e, 0x45, 0xaa, 0xb9, 0x92, 0x55, 0x23, 0x8c, 0xe8, 0x5e, 0xf8, 0x26, 0x11, 0x69, + 0x81, 0x9d, 0xf3, 0x2a, 0x93, 0x32, 0x2b, 0x30, 0x30, 0xd5, 0x5a, 0x5f, 0x07, 0xb5, 0xaa, 0x34, + 0x57, 0x8d, 0x7b, 0xf2, 0xc7, 0x81, 0x61, 0xb8, 0x83, 0xc2, 0xee, 0x0f, 0x64, 0x08, 0x76, 0x9e, + 0x52, 0x6b, 0x6c, 0x4d, 0x06, 0xb1, 0x9d, 0xa7, 0xe4, 0x35, 0x40, 0x89, 0x75, 0x9d, 0x64, 0x78, + 0x95, 0xa7, 0xd4, 0x36, 0xfa, 0xa0, 0x55, 0x16, 0x29, 0x21, 0x70, 0x24, 0x92, 0x12, 0xa9, 0x63, + 0x0c, 0x73, 0x26, 0xcf, 0xa1, 0x67, 0x3a, 0xa1, 0x47, 0x46, 0x6c, 0x0a, 0x42, 0xa1, 0xcf, 0x2b, + 0x4c, 0x14, 0xa6, 0xb4, 0x37, 0xb6, 0x26, 0x4e, 0xdc, 0x95, 0xe4, 0x13, 0xb8, 0xb5, 0x4a, 0x94, + 0xae, 0xa9, 0x3b, 0xb6, 0x26, 0xc3, 0xe9, 0x1b, 0x76, 0x30, 0x1f, 0x7b, 0xd8, 0x23, 0x5b, 0x99, + 0x74, 0xdc, 0x52, 0x64, 0x06, 0x6e, 0xc2, 0x77, 0x06, 0xed, 0x1b, 0xfe, 0xed, 0xff, 0xf9, 0xaf, + 0xed, 0x62, 0x66, 0x26, 0xcb, 0x2e, 0xb7, 0x1b, 0x8c, 0x5b, 0x90, 0x9c, 0x83, 0x2b, 0xb5, 0xda, + 0x68, 0x45, 0x9f, 0x8e, 0x9d, 0x89, 0x37, 0x7d, 0xf7, 0x58, 0x0b, 0x4b, 0x93, 0x0e, 0x85, 0xaa, + 0xb6, 0x71, 0x8b, 0x8e, 0x7e, 0x82, 0xb7, 0x27, 0x13, 0x1f, 0x9c, 0x5b, 0xdc, 0xb6, 0xab, 0xdc, + 0x1d, 0xc9, 0x7b, 0xe8, 0xdd, 0x25, 0x85, 0x46, 0xb3, 0x46, 0x6f, 0xfa, 0x82, 0x35, 0x97, 0xc3, + 0xba, 0xcb, 0x61, 0xbf, 0x76, 0x6e, 0xdc, 0x84, 0x4e, 0xed, 0x8f, 0xd6, 0xc9, 0x0c, 0xdc, 0x66, + 0x58, 0x72, 0x0c, 0xde, 0xe2, 0xc7, 0x55, 0x14, 0x2f, 0xbf, 0xc4, 0xe1, 0x6a, 0xe5, 0x3f, 0x21, + 0xcf, 0x60, 0x70, 0xbe, 0xfc, 0x1e, 0x5d, 0x84, 0x97, 0xe1, 0xdc, 0xb7, 0x08, 0x80, 0xfb, 0x79, + 0xb6, 0xb8, 0x08, 0xe7, 0xbe, 0x4d, 0x3c, 0xe8, 0xaf, 0xbe, 0x2d, 0xa2, 0x28, 0x9c, 0xfb, 0xce, + 0xd9, 0x2d, 0xbc, 0xe4, 0xb2, 0x64, 0x02, 0xd5, 0x75, 0x91, 0xdf, 0x1f, 0xce, 0x75, 0xe6, 0x3f, + 0x1c, 0x2c, 0x5a, 0xff, 0x3e, 0xcd, 0x72, 0x75, 0xa3, 0xd7, 0x8c, 0xcb, 0x32, 0x68, 0xa9, 0xe0, + 0x1f, 0x15, 0xf0, 0x22, 0x47, 0xa1, 0x82, 0x4c, 0x66, 0xd5, 0x86, 0xef, 0xe9, 0xe6, 0x05, 0xae, + 0x5d, 0xf3, 0xd1, 0x0f, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, 0x30, 0x90, 0x3d, 0xc6, 0xbe, 0x02, + 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/eventhandler.pb.go b/client/gogrpc/conductor/model/eventhandler.pb.go new file mode 100644 index 0000000000..f64569a388 --- /dev/null +++ b/client/gogrpc/conductor/model/eventhandler.pb.go @@ -0,0 +1,379 @@ +// Code generated by protoc-gen-go. DO NOT EDIT. +// source: model/eventhandler.proto + +package model // import "github.com/netflix/conductor/client/gogrpc/conductor/model" + +import proto "github.com/golang/protobuf/proto" +import fmt "fmt" +import math "math" +import any "github.com/golang/protobuf/ptypes/any" +import _struct "github.com/golang/protobuf/ptypes/struct" + +// Reference imports to suppress errors if they are not otherwise used. +var _ = proto.Marshal +var _ = fmt.Errorf +var _ = math.Inf + +// This is a compile-time assertion to ensure that this generated file +// is compatible with the proto package it is being compiled against. +// A compilation error at this line likely means your copy of the +// proto package needs to be updated. +const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package + +type EventHandler_Action_Type int32 + +const ( + EventHandler_Action_START_WORKFLOW EventHandler_Action_Type = 0 + EventHandler_Action_COMPLETE_TASK EventHandler_Action_Type = 1 + EventHandler_Action_FAIL_TASK EventHandler_Action_Type = 2 +) + +var EventHandler_Action_Type_name = map[int32]string{ + 0: "START_WORKFLOW", + 1: "COMPLETE_TASK", + 2: "FAIL_TASK", +} +var EventHandler_Action_Type_value = map[string]int32{ + "START_WORKFLOW": 0, + "COMPLETE_TASK": 1, + "FAIL_TASK": 2, +} + +func (x EventHandler_Action_Type) String() string { + return proto.EnumName(EventHandler_Action_Type_name, int32(x)) +} +func (EventHandler_Action_Type) EnumDescriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2, 0} +} + +type EventHandler struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Event string `protobuf:"bytes,2,opt,name=event" json:"event,omitempty"` + Condition string `protobuf:"bytes,3,opt,name=condition" json:"condition,omitempty"` + Actions []*EventHandler_Action `protobuf:"bytes,4,rep,name=actions" json:"actions,omitempty"` + Active bool `protobuf:"varint,5,opt,name=active" json:"active,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler) Reset() { *m = EventHandler{} } +func (m *EventHandler) String() string { return proto.CompactTextString(m) } +func (*EventHandler) ProtoMessage() {} +func (*EventHandler) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0} +} +func (m *EventHandler) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler.Unmarshal(m, b) +} +func (m *EventHandler) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler.Marshal(b, m, deterministic) +} +func (dst *EventHandler) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler.Merge(dst, src) +} +func (m *EventHandler) XXX_Size() int { + return xxx_messageInfo_EventHandler.Size(m) +} +func (m *EventHandler) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler proto.InternalMessageInfo + +func (m *EventHandler) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventHandler) GetEvent() string { + if m != nil { + return m.Event + } + return "" +} + +func (m *EventHandler) GetCondition() string { + if m != nil { + return m.Condition + } + return "" +} + +func (m *EventHandler) GetActions() []*EventHandler_Action { + if m != nil { + return m.Actions + } + return nil +} + +func (m *EventHandler) GetActive() bool { + if m != nil { + return m.Active + } + return false +} + +type EventHandler_StartWorkflow struct { + Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + InputMessage *any.Any `protobuf:"bytes,5,opt,name=input_message,json=inputMessage" json:"input_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler_StartWorkflow) Reset() { *m = EventHandler_StartWorkflow{} } +func (m *EventHandler_StartWorkflow) String() string { return proto.CompactTextString(m) } +func (*EventHandler_StartWorkflow) ProtoMessage() {} +func (*EventHandler_StartWorkflow) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 0} +} +func (m *EventHandler_StartWorkflow) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler_StartWorkflow.Unmarshal(m, b) +} +func (m *EventHandler_StartWorkflow) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler_StartWorkflow.Marshal(b, m, deterministic) +} +func (dst *EventHandler_StartWorkflow) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler_StartWorkflow.Merge(dst, src) +} +func (m *EventHandler_StartWorkflow) XXX_Size() int { + return xxx_messageInfo_EventHandler_StartWorkflow.Size(m) +} +func (m *EventHandler_StartWorkflow) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler_StartWorkflow.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler_StartWorkflow proto.InternalMessageInfo + +func (m *EventHandler_StartWorkflow) GetName() string { + if m != nil { + return m.Name + } + return "" +} + +func (m *EventHandler_StartWorkflow) GetVersion() int32 { + if m != nil { + return m.Version + } + return 0 +} + +func (m *EventHandler_StartWorkflow) GetCorrelationId() string { + if m != nil { + return m.CorrelationId + } + return "" +} + +func (m *EventHandler_StartWorkflow) GetInput() map[string]*_struct.Value { + if m != nil { + return m.Input + } + return nil +} + +func (m *EventHandler_StartWorkflow) GetInputMessage() *any.Any { + if m != nil { + return m.InputMessage + } + return nil +} + +type EventHandler_TaskDetails struct { + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName" json:"task_ref_name,omitempty"` + Output map[string]*_struct.Value `protobuf:"bytes,3,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + OutputMessage *any.Any `protobuf:"bytes,4,opt,name=output_message,json=outputMessage" json:"output_message,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler_TaskDetails) Reset() { *m = EventHandler_TaskDetails{} } +func (m *EventHandler_TaskDetails) String() string { return proto.CompactTextString(m) } +func (*EventHandler_TaskDetails) ProtoMessage() {} +func (*EventHandler_TaskDetails) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 1} +} +func (m *EventHandler_TaskDetails) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler_TaskDetails.Unmarshal(m, b) +} +func (m *EventHandler_TaskDetails) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler_TaskDetails.Marshal(b, m, deterministic) +} +func (dst *EventHandler_TaskDetails) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler_TaskDetails.Merge(dst, src) +} +func (m *EventHandler_TaskDetails) XXX_Size() int { + return xxx_messageInfo_EventHandler_TaskDetails.Size(m) +} +func (m *EventHandler_TaskDetails) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler_TaskDetails.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler_TaskDetails proto.InternalMessageInfo + +func (m *EventHandler_TaskDetails) GetWorkflowId() string { + if m != nil { + return m.WorkflowId + } + return "" +} + +func (m *EventHandler_TaskDetails) GetTaskRefName() string { + if m != nil { + return m.TaskRefName + } + return "" +} + +func (m *EventHandler_TaskDetails) GetOutput() map[string]*_struct.Value { + if m != nil { + return m.Output + } + return nil +} + +func (m *EventHandler_TaskDetails) GetOutputMessage() *any.Any { + if m != nil { + return m.OutputMessage + } + return nil +} + +type EventHandler_Action struct { + Action EventHandler_Action_Type `protobuf:"varint,1,opt,name=action,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` + StartWorkflow *EventHandler_StartWorkflow `protobuf:"bytes,2,opt,name=start_workflow,json=startWorkflow" json:"start_workflow,omitempty"` + CompleteTask *EventHandler_TaskDetails `protobuf:"bytes,3,opt,name=complete_task,json=completeTask" json:"complete_task,omitempty"` + FailTask *EventHandler_TaskDetails `protobuf:"bytes,4,opt,name=fail_task,json=failTask" json:"fail_task,omitempty"` + ExpandInlineJson bool `protobuf:"varint,5,opt,name=expand_inline_json,json=expandInlineJson" json:"expand_inline_json,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *EventHandler_Action) Reset() { *m = EventHandler_Action{} } +func (m *EventHandler_Action) String() string { return proto.CompactTextString(m) } +func (*EventHandler_Action) ProtoMessage() {} +func (*EventHandler_Action) Descriptor() ([]byte, []int) { + return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2} +} +func (m *EventHandler_Action) XXX_Unmarshal(b []byte) error { + return xxx_messageInfo_EventHandler_Action.Unmarshal(m, b) +} +func (m *EventHandler_Action) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + return xxx_messageInfo_EventHandler_Action.Marshal(b, m, deterministic) +} +func (dst *EventHandler_Action) XXX_Merge(src proto.Message) { + xxx_messageInfo_EventHandler_Action.Merge(dst, src) +} +func (m *EventHandler_Action) XXX_Size() int { + return xxx_messageInfo_EventHandler_Action.Size(m) +} +func (m *EventHandler_Action) XXX_DiscardUnknown() { + xxx_messageInfo_EventHandler_Action.DiscardUnknown(m) +} + +var xxx_messageInfo_EventHandler_Action proto.InternalMessageInfo + +func (m *EventHandler_Action) GetAction() EventHandler_Action_Type { + if m != nil { + return m.Action + } + return EventHandler_Action_START_WORKFLOW +} + +func (m *EventHandler_Action) GetStartWorkflow() *EventHandler_StartWorkflow { + if m != nil { + return m.StartWorkflow + } + return nil +} + +func (m *EventHandler_Action) GetCompleteTask() *EventHandler_TaskDetails { + if m != nil { + return m.CompleteTask + } + return nil +} + +func (m *EventHandler_Action) GetFailTask() *EventHandler_TaskDetails { + if m != nil { + return m.FailTask + } + return nil +} + +func (m *EventHandler_Action) GetExpandInlineJson() bool { + if m != nil { + return m.ExpandInlineJson + } + return false +} + +func init() { + proto.RegisterType((*EventHandler)(nil), "conductor.proto.EventHandler") + proto.RegisterType((*EventHandler_StartWorkflow)(nil), "conductor.proto.EventHandler.StartWorkflow") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventHandler.StartWorkflow.InputEntry") + proto.RegisterType((*EventHandler_TaskDetails)(nil), "conductor.proto.EventHandler.TaskDetails") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.EventHandler.TaskDetails.OutputEntry") + proto.RegisterType((*EventHandler_Action)(nil), "conductor.proto.EventHandler.Action") + proto.RegisterEnum("conductor.proto.EventHandler_Action_Type", EventHandler_Action_Type_name, EventHandler_Action_Type_value) +} + +func init() { + proto.RegisterFile("model/eventhandler.proto", fileDescriptor_eventhandler_d75293086a3c9db8) +} + +var fileDescriptor_eventhandler_d75293086a3c9db8 = []byte{ + // 665 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, + 0x18, 0x77, 0x7f, 0x61, 0x4f, 0xe9, 0x9c, 0x17, 0x42, 0xea, 0x24, 0x91, 0x10, 0x4d, 0x30, 0x92, + 0x36, 0x99, 0xd1, 0x28, 0x1a, 0x93, 0xa1, 0x23, 0x4e, 0x06, 0xc3, 0x63, 0x91, 0xc4, 0x37, 0xcd, + 0xad, 0xbd, 0x8d, 0xba, 0xee, 0x6e, 0x69, 0xaf, 0x83, 0x7d, 0x1e, 0x3f, 0x81, 0x9f, 0xc0, 0xf7, + 0x7e, 0x2a, 0x73, 0x77, 0x2d, 0x14, 0x34, 0x28, 0x89, 0xef, 0x9e, 0xbf, 0xbf, 0xe7, 0x77, 0xbf, + 0xe7, 0x69, 0xc1, 0x9a, 0x72, 0x9f, 0x86, 0x0e, 0x9d, 0x53, 0x26, 0x4e, 0x09, 0xf3, 0x43, 0x1a, + 0xd9, 0xb3, 0x88, 0x0b, 0x8e, 0xee, 0x7a, 0x9c, 0xf9, 0x89, 0x27, 0x78, 0x1a, 0x68, 0xae, 0x8f, + 0x39, 0x1f, 0x87, 0xd4, 0x51, 0xde, 0x30, 0x19, 0x39, 0xb1, 0x88, 0x12, 0x4f, 0xa4, 0xd9, 0xfb, + 0xd7, 0xb3, 0x84, 0x2d, 0x74, 0x6a, 0xf3, 0x67, 0x0d, 0x56, 0x3a, 0x72, 0xc0, 0x07, 0x3d, 0x00, + 0x21, 0x28, 0x33, 0x32, 0xa5, 0x56, 0x61, 0xa3, 0xb0, 0x55, 0xc3, 0xca, 0x46, 0xab, 0x50, 0x51, + 0x24, 0xac, 0xa2, 0x0a, 0x6a, 0x07, 0xad, 0x43, 0x4d, 0xd2, 0x08, 0x44, 0xc0, 0x99, 0x55, 0x52, + 0x99, 0xcb, 0x00, 0x7a, 0x0b, 0x4b, 0xc4, 0x93, 0x56, 0x6c, 0x95, 0x37, 0x4a, 0x5b, 0x46, 0xeb, + 0x91, 0x7d, 0x8d, 0xb4, 0x9d, 0x9f, 0x6b, 0xb7, 0x55, 0x31, 0xce, 0x9a, 0xd0, 0x1a, 0x54, 0xa5, + 0x39, 0xa7, 0x56, 0x65, 0xa3, 0xb0, 0xb5, 0x8c, 0x53, 0xaf, 0xf9, 0xa3, 0x08, 0xe6, 0xb1, 0x20, + 0x91, 0x38, 0xe1, 0xd1, 0x64, 0x14, 0xf2, 0xb3, 0x3f, 0x32, 0xb6, 0x60, 0x69, 0x4e, 0xa3, 0x58, + 0x32, 0x93, 0x9c, 0x2b, 0x38, 0x73, 0xd1, 0x63, 0xa8, 0x7b, 0x3c, 0x8a, 0x68, 0x48, 0xe4, 0x1c, + 0x37, 0xf0, 0x53, 0xea, 0x66, 0x2e, 0xda, 0xf5, 0x51, 0x0f, 0x2a, 0x01, 0x9b, 0x25, 0x22, 0x25, + 0xff, 0xe2, 0x66, 0xf2, 0x57, 0x08, 0xd9, 0x5d, 0xd9, 0xd8, 0x61, 0x22, 0x5a, 0x60, 0x0d, 0x82, + 0x5e, 0x81, 0xa9, 0x0c, 0x77, 0x4a, 0xe3, 0x98, 0x8c, 0xf5, 0x9b, 0x8c, 0xd6, 0xaa, 0xad, 0x17, + 0x63, 0x67, 0x8b, 0xb1, 0xdb, 0x6c, 0x81, 0x57, 0x54, 0xe9, 0x81, 0xae, 0x6c, 0x1e, 0x01, 0x5c, + 0xe2, 0xa1, 0x06, 0x94, 0x26, 0x74, 0x91, 0x3e, 0x55, 0x9a, 0x68, 0x1b, 0x2a, 0x73, 0x12, 0x26, + 0x54, 0xbd, 0xd3, 0x68, 0xad, 0xfd, 0x06, 0xf9, 0x59, 0x66, 0xb1, 0x2e, 0xda, 0x29, 0xbe, 0x2c, + 0x34, 0xbf, 0x17, 0xc1, 0x18, 0x90, 0x78, 0xf2, 0x9e, 0x0a, 0x12, 0x84, 0x31, 0x7a, 0x08, 0xc6, + 0x59, 0x4a, 0x5d, 0xca, 0xa1, 0xb1, 0x21, 0x0b, 0x75, 0x7d, 0xb4, 0x09, 0xa6, 0x20, 0xf1, 0xc4, + 0x8d, 0xe8, 0xc8, 0x55, 0x4a, 0xeb, 0x33, 0x30, 0x64, 0x10, 0xd3, 0xd1, 0xa1, 0x14, 0xfc, 0x00, + 0xaa, 0x3c, 0x11, 0x52, 0xb0, 0x92, 0x12, 0xec, 0xf9, 0xcd, 0x82, 0xe5, 0xe6, 0xdb, 0x7d, 0xd5, + 0xa7, 0xf5, 0x4a, 0x41, 0xd0, 0x6b, 0xa8, 0x6b, 0xeb, 0x42, 0xb1, 0xf2, 0x0d, 0x8a, 0x99, 0xba, + 0x36, 0x93, 0xec, 0x13, 0x18, 0x39, 0xcc, 0xff, 0xa2, 0xd9, 0xb7, 0x12, 0x54, 0xf5, 0x85, 0xa2, + 0xb6, 0x3e, 0x4c, 0xce, 0x14, 0x62, 0xbd, 0xf5, 0xe4, 0x5f, 0xee, 0xda, 0x1e, 0x2c, 0x66, 0x14, + 0xa7, 0x8d, 0x08, 0x43, 0x3d, 0x96, 0x17, 0xe3, 0x66, 0x22, 0xa7, 0x44, 0x9e, 0xde, 0xe2, 0xca, + 0xb0, 0x19, 0x5f, 0xf9, 0x0a, 0x0e, 0xc1, 0xf4, 0xf8, 0x74, 0x16, 0x52, 0x41, 0x5d, 0xb9, 0x18, + 0x75, 0xd6, 0xc6, 0xdf, 0xd8, 0xe5, 0xf6, 0x80, 0x57, 0xb2, 0x7e, 0x19, 0x44, 0x7b, 0x50, 0x1b, + 0x91, 0x20, 0xd4, 0x58, 0xe5, 0xdb, 0x62, 0x2d, 0xcb, 0x5e, 0x85, 0xb3, 0x0d, 0x88, 0x9e, 0xcf, + 0x08, 0xf3, 0xdd, 0x80, 0x85, 0x01, 0xa3, 0xee, 0xd7, 0x98, 0xb3, 0xf4, 0x9b, 0x6e, 0xe8, 0x4c, + 0x57, 0x25, 0x3e, 0xc6, 0x9c, 0x6d, 0xbe, 0x81, 0xb2, 0x54, 0x0a, 0x21, 0xa8, 0x1f, 0x0f, 0xda, + 0x78, 0xe0, 0x9e, 0xf4, 0xf1, 0xfe, 0x5e, 0xaf, 0x7f, 0xd2, 0xb8, 0x83, 0xee, 0x81, 0xf9, 0xae, + 0x7f, 0x70, 0xd4, 0xeb, 0x0c, 0x3a, 0xee, 0xa0, 0x7d, 0xbc, 0xdf, 0x28, 0x20, 0x13, 0x6a, 0x7b, + 0xed, 0x6e, 0x4f, 0xbb, 0xc5, 0xdd, 0x00, 0x1e, 0x78, 0x7c, 0x6a, 0x33, 0x2a, 0x46, 0x61, 0x70, + 0x7e, 0x9d, 0xed, 0x6e, 0x3d, 0x4f, 0xf7, 0x68, 0xf8, 0x65, 0x67, 0x1c, 0x88, 0xd3, 0x64, 0x68, + 0x7b, 0x7c, 0xea, 0xa4, 0x3d, 0xce, 0x45, 0x8f, 0xe3, 0x85, 0x01, 0x65, 0xc2, 0x19, 0xf3, 0x71, + 0x34, 0xf3, 0x72, 0x71, 0xf5, 0x53, 0x1e, 0x56, 0x15, 0xe4, 0xb3, 0x5f, 0x01, 0x00, 0x00, 0xff, + 0xff, 0x15, 0xb8, 0xa4, 0xd6, 0xa4, 0x05, 0x00, 0x00, +} diff --git a/client/gogrpc/conductor/model/polldata.pb.go b/client/gogrpc/conductor/model/polldata.pb.go index 69f1cb74e3..53207b224e 100644 --- a/client/gogrpc/conductor/model/polldata.pb.go +++ b/client/gogrpc/conductor/model/polldata.pb.go @@ -32,7 +32,7 @@ func (m *PollData) Reset() { *m = PollData{} } func (m *PollData) String() string { return proto.CompactTextString(m) } func (*PollData) ProtoMessage() {} func (*PollData) Descriptor() ([]byte, []int) { - return fileDescriptor_polldata_a39ed67c311a83df, []int{0} + return fileDescriptor_polldata_17cab9e308fb8d52, []int{0} } func (m *PollData) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PollData.Unmarshal(m, b) @@ -81,25 +81,26 @@ func (m *PollData) GetLastPollTime() int64 { } func init() { - proto.RegisterType((*PollData)(nil), "com.netflix.conductor.proto.PollData") + proto.RegisterType((*PollData)(nil), "conductor.proto.PollData") } -func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_a39ed67c311a83df) } +func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_17cab9e308fb8d52) } -var fileDescriptor_polldata_a39ed67c311a83df = []byte{ - // 224 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x8f, 0x31, 0x4b, 0x03, 0x41, - 0x10, 0x46, 0x39, 0x23, 0x21, 0xb7, 0x88, 0xc5, 0x22, 0x72, 0x10, 0x84, 0x20, 0x16, 0xa9, 0x6e, - 0x0b, 0x3b, 0xcb, 0x60, 0xa1, 0x8d, 0x84, 0x60, 0x65, 0x73, 0xec, 0xed, 0x8e, 0xe7, 0xe2, 0xcc, - 0xce, 0xb9, 0xce, 0xa2, 0x3f, 0xc0, 0x1f, 0x2e, 0xb7, 0x89, 0x92, 0x72, 0xde, 0x2b, 0xe6, 0x7d, - 0xea, 0x82, 0xd8, 0x03, 0x9a, 0x91, 0x11, 0xbd, 0x15, 0xdb, 0x8e, 0x89, 0x85, 0xf5, 0xd2, 0x31, - 0xb5, 0x11, 0xe4, 0x15, 0xc3, 0x77, 0xeb, 0x38, 0xfa, 0xec, 0x84, 0xd3, 0x5e, 0x5e, 0xff, 0x54, - 0x6a, 0xb1, 0x65, 0xc4, 0x7b, 0x2b, 0x56, 0x5f, 0x29, 0xf5, 0x91, 0x21, 0x43, 0x17, 0x2d, 0x41, - 0x53, 0xad, 0xaa, 0x75, 0xbd, 0xab, 0x0b, 0x79, 0xb2, 0x04, 0xfa, 0x52, 0xcd, 0x3d, 0x93, 0x0d, - 0xb1, 0x39, 0x29, 0xea, 0x70, 0xe9, 0xa5, 0xaa, 0xbf, 0x38, 0xbd, 0x43, 0xea, 0x82, 0x6f, 0x66, - 0x45, 0x2d, 0xf6, 0xe0, 0xd1, 0xeb, 0x1b, 0x75, 0x8e, 0xf6, 0x53, 0xba, 0x29, 0xaa, 0x93, 0x40, - 0xd0, 0x9c, 0xae, 0xaa, 0xf5, 0x6c, 0x77, 0x36, 0xd1, 0xe9, 0xf3, 0x73, 0x20, 0xd8, 0x3c, 0x6c, - 0xd4, 0x5f, 0xc5, 0xb6, 0x7f, 0xb9, 0x1b, 0x82, 0xbc, 0xe5, 0xbe, 0x75, 0x4c, 0xe6, 0x10, 0x6e, - 0xfe, 0xc3, 0x8d, 0xc3, 0x00, 0x51, 0xcc, 0xc0, 0x43, 0x1a, 0xdd, 0x11, 0x2f, 0xe3, 0xfb, 0x79, - 0xd9, 0x75, 0xfb, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x47, 0x36, 0x49, 0x42, 0x0c, 0x01, 0x00, 0x00, +var fileDescriptor_polldata_17cab9e308fb8d52 = []byte{ + // 229 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x90, 0xc1, 0x4a, 0x03, 0x31, + 0x10, 0x86, 0x59, 0x2b, 0xa5, 0x3b, 0x88, 0x42, 0x10, 0x59, 0x28, 0x42, 0x11, 0x0f, 0x3d, 0x6d, + 0x0e, 0xde, 0x3c, 0x16, 0x2f, 0x5e, 0xa4, 0x14, 0x4f, 0x5e, 0x96, 0x6c, 0x32, 0xae, 0xc1, 0x4c, + 0x66, 0x8d, 0x13, 0xf4, 0x01, 0x7c, 0x70, 0xd9, 0xb4, 0x8a, 0x78, 0x9c, 0xef, 0xff, 0x99, 0xf9, + 0x18, 0x38, 0x27, 0x76, 0x18, 0xf4, 0xc8, 0x21, 0x38, 0x23, 0xa6, 0x1d, 0x13, 0x0b, 0xab, 0x33, + 0xcb, 0xd1, 0x65, 0x2b, 0x9c, 0xf6, 0xe0, 0xea, 0xab, 0x82, 0xc5, 0x96, 0x43, 0xb8, 0x33, 0x62, + 0xd4, 0x25, 0xc0, 0x5b, 0xc6, 0x8c, 0x5d, 0x34, 0x84, 0x4d, 0xb5, 0xaa, 0xd6, 0xf5, 0xae, 0x2e, + 0xe4, 0xc1, 0x10, 0xaa, 0x0b, 0x98, 0x3b, 0x26, 0xe3, 0x63, 0x73, 0x54, 0xa2, 0xc3, 0xa4, 0x96, + 0x50, 0x7f, 0x70, 0x7a, 0xc5, 0xd4, 0x79, 0xd7, 0xcc, 0x4a, 0xb4, 0xd8, 0x83, 0x7b, 0xa7, 0xae, + 0xe1, 0x34, 0x98, 0x77, 0xe9, 0x26, 0x91, 0x4e, 0x3c, 0x61, 0x73, 0xbc, 0xaa, 0xd6, 0xb3, 0xdd, + 0xc9, 0x44, 0xa7, 0xcb, 0x8f, 0x9e, 0x70, 0x83, 0xb0, 0xb4, 0x4c, 0x6d, 0x44, 0x79, 0x0e, 0xfe, + 0xb3, 0xfd, 0x67, 0xb9, 0x81, 0x1f, 0xc5, 0x6d, 0xff, 0x74, 0x3b, 0x78, 0x79, 0xc9, 0x7d, 0x6b, + 0x99, 0xf4, 0xa1, 0xaf, 0x7f, 0xfb, 0xda, 0x06, 0x8f, 0x51, 0xf4, 0xc0, 0x43, 0x1a, 0xed, 0x1f, + 0x5e, 0xbe, 0xd1, 0xcf, 0xcb, 0xba, 0x9b, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x37, 0x71, + 0xb0, 0x1d, 0x01, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go b/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go index cfc15ce8e9..2652f2ab07 100644 --- a/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go +++ b/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go @@ -34,7 +34,7 @@ func (m *RerunWorkflowRequest) Reset() { *m = RerunWorkflowRequest{} } func (m *RerunWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*RerunWorkflowRequest) ProtoMessage() {} func (*RerunWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_rerunworkflowrequest_ecd5e08640f59d8d, []int{0} + return fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8, []int{0} } func (m *RerunWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RerunWorkflowRequest.Unmarshal(m, b) @@ -90,39 +90,39 @@ func (m *RerunWorkflowRequest) GetCorrelationId() string { } func init() { - proto.RegisterType((*RerunWorkflowRequest)(nil), "com.netflix.conductor.proto.RerunWorkflowRequest") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.RerunWorkflowRequest.TaskInputEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.RerunWorkflowRequest.WorkflowInputEntry") + proto.RegisterType((*RerunWorkflowRequest)(nil), "conductor.proto.RerunWorkflowRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.RerunWorkflowRequest.TaskInputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.RerunWorkflowRequest.WorkflowInputEntry") } func init() { - proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_ecd5e08640f59d8d) + proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8) } -var fileDescriptor_rerunworkflowrequest_ecd5e08640f59d8d = []byte{ +var fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8 = []byte{ // 369 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0x4b, 0x6b, 0xdb, 0x40, - 0x10, 0xc7, 0x91, 0x1f, 0x05, 0xaf, 0xb1, 0x5b, 0xd6, 0xc5, 0x35, 0x6e, 0x0f, 0xa2, 0x50, 0xf0, - 0xc1, 0xac, 0xc0, 0xed, 0xa1, 0xf8, 0x14, 0x4c, 0x12, 0xf0, 0x2d, 0x08, 0x93, 0x84, 0x5c, 0x84, - 0x1e, 0x2b, 0x45, 0x48, 0xda, 0x71, 0x46, 0xbb, 0x71, 0xfc, 0x69, 0xf2, 0x55, 0x83, 0x56, 0x92, - 0xad, 0x10, 0x13, 0x08, 0xe4, 0xa6, 0x79, 0xfd, 0xe7, 0xa7, 0xff, 0x2c, 0x31, 0x33, 0x08, 0x78, - 0x6a, 0x21, 0x47, 0x25, 0x76, 0x80, 0x49, 0x98, 0xc2, 0x0e, 0xf9, 0x83, 0xe2, 0xb9, 0x64, 0x5b, - 0x04, 0x09, 0xf4, 0xa7, 0x0f, 0x19, 0x13, 0x5c, 0x86, 0x69, 0xfc, 0xc4, 0x7c, 0x10, 0x81, 0xf2, - 0x25, 0x60, 0x59, 0x9c, 0xfe, 0x8a, 0x00, 0xa2, 0x94, 0x5b, 0x3a, 0xf2, 0x54, 0x68, 0xe5, 0x12, - 0x95, 0x5f, 0x8d, 0xfe, 0x7e, 0xee, 0x90, 0xef, 0x76, 0xa1, 0x7c, 0x53, 0x29, 0xdb, 0xa5, 0x32, - 0xfd, 0x47, 0x7e, 0x20, 0x77, 0x50, 0x09, 0x27, 0x44, 0xc8, 0x9c, 0x7a, 0xb1, 0x13, 0x07, 0x13, - 0xc3, 0x34, 0x66, 0x3d, 0x7b, 0x84, 0xdc, 0x56, 0xe2, 0x12, 0x21, 0xab, 0x47, 0xd7, 0x01, 0x4d, - 0xc8, 0xf0, 0xd8, 0x29, 0xb6, 0x4a, 0x4e, 0x5a, 0x66, 0x7b, 0xd6, 0x5f, 0x9c, 0xb3, 0x77, 0x10, - 0xd9, 0x29, 0x00, 0x76, 0x50, 0x2d, 0x64, 0x2e, 0x84, 0xc4, 0xbd, 0x3d, 0xd8, 0x35, 0x73, 0x74, - 0x4e, 0x46, 0x4d, 0x44, 0xe9, 0xe6, 0x49, 0x81, 0xd7, 0xd6, 0x78, 0x5f, 0x0f, 0x78, 0x1b, 0x37, - 0x4f, 0xd6, 0x01, 0x75, 0x08, 0x29, 0x3b, 0x34, 0x56, 0x47, 0x63, 0x9d, 0x7d, 0x1c, 0x4b, 0xab, - 0x1d, 0x91, 0x7a, 0xb2, 0x8e, 0xe9, 0x1f, 0x32, 0xf4, 0x01, 0x91, 0xa7, 0xae, 0x8c, 0x41, 0x14, - 0x24, 0x5d, 0x4d, 0x32, 0x68, 0x64, 0xd7, 0xc1, 0xf4, 0x96, 0xd0, 0xb7, 0xbf, 0x46, 0xbf, 0x91, - 0x76, 0xc2, 0xf7, 0x95, 0xb5, 0xc5, 0x27, 0x9d, 0x93, 0xee, 0xa3, 0x9b, 0x2a, 0x3e, 0x69, 0x99, - 0xc6, 0xac, 0xbf, 0x18, 0xb3, 0xf2, 0x8e, 0xac, 0xbe, 0x23, 0xbb, 0x2e, 0xaa, 0x76, 0xd9, 0xb4, - 0x6c, 0xfd, 0x37, 0xa6, 0x1b, 0x32, 0x7c, 0x4d, 0xf7, 0x19, 0xaa, 0xab, 0xcd, 0x6a, 0x7c, 0xca, - 0x88, 0x2b, 0xef, 0x6e, 0x19, 0xc5, 0xf2, 0x5e, 0x79, 0x85, 0x87, 0x56, 0xe5, 0xa1, 0x75, 0xf0, - 0xd0, 0xf2, 0xd3, 0x98, 0x0b, 0x69, 0x45, 0x10, 0xe1, 0xd6, 0x6f, 0xe4, 0xf5, 0x6b, 0xf6, 0xbe, - 0xe8, 0x85, 0x7f, 0x5f, 0x02, 0x00, 0x00, 0xff, 0xff, 0x61, 0xde, 0x4e, 0x72, 0xdd, 0x02, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0x4f, 0xef, 0xd2, 0x30, + 0x18, 0xc7, 0x33, 0xf8, 0x61, 0x42, 0x09, 0x60, 0x8a, 0x41, 0x82, 0x1e, 0x16, 0x13, 0x13, 0x0e, + 0xa4, 0x4b, 0x90, 0x03, 0xe1, 0x48, 0xa2, 0x09, 0x37, 0x33, 0x89, 0x1a, 0x2f, 0xcb, 0xd6, 0x75, + 0x73, 0xd9, 0xd6, 0x07, 0x9e, 0xb5, 0x22, 0xaf, 0xc0, 0xb7, 0x6d, 0xd6, 0x6d, 0x30, 0x91, 0x83, + 0x07, 0x6f, 0xeb, 0xb3, 0xef, 0x9f, 0xcf, 0x9e, 0x8e, 0xd8, 0x39, 0x84, 0x22, 0x73, 0x50, 0xa0, + 0x96, 0x67, 0xc0, 0x34, 0xca, 0xe0, 0x8c, 0xe2, 0xa4, 0x45, 0xa1, 0xd8, 0x11, 0x41, 0x01, 0x1d, + 0x73, 0x90, 0xa1, 0xe6, 0x0a, 0xb0, 0x1a, 0xcc, 0x5f, 0xc7, 0x00, 0x71, 0x26, 0x1c, 0x73, 0x0a, + 0x74, 0xe4, 0x14, 0x0a, 0x35, 0xaf, 0xe5, 0x6f, 0x7e, 0x3d, 0x91, 0x17, 0x6e, 0x99, 0xf6, 0xa5, + 0x4e, 0x73, 0xab, 0x34, 0xba, 0x26, 0x2f, 0x51, 0x78, 0xa8, 0xa5, 0x17, 0x21, 0xe4, 0x5e, 0x53, + 0xe6, 0x25, 0xe1, 0xcc, 0xb2, 0xad, 0x45, 0xdf, 0x9d, 0xa0, 0x70, 0xb5, 0xfc, 0x80, 0x90, 0x37, + 0xd6, 0x7d, 0x48, 0x3d, 0x32, 0xba, 0x29, 0xe5, 0x51, 0xab, 0x59, 0xc7, 0xee, 0x2e, 0x06, 0xab, + 0x0d, 0xbb, 0xc3, 0x62, 0x8f, 0x4a, 0xd9, 0x35, 0xa9, 0xb4, 0xbe, 0x97, 0x0a, 0x2f, 0xee, 0xf0, + 0xdc, 0x9e, 0xd1, 0x25, 0x99, 0xb4, 0xb1, 0x94, 0x5f, 0xa4, 0x25, 0x52, 0xd7, 0x20, 0x8d, 0xaf, + 0x48, 0x07, 0xbf, 0x48, 0xf7, 0x21, 0xfd, 0x44, 0x48, 0xa5, 0x30, 0x28, 0x4f, 0x06, 0x65, 0xfd, + 0x6f, 0x28, 0x26, 0xe1, 0x86, 0xd1, 0x57, 0xcd, 0x99, 0xbe, 0x25, 0x23, 0x0e, 0x88, 0x22, 0xf3, + 0x55, 0x02, 0xb2, 0x6c, 0xef, 0x99, 0xf6, 0x61, 0x6b, 0xba, 0x0f, 0xe7, 0x5f, 0x09, 0xfd, 0xfb, + 0x73, 0xe8, 0x73, 0xd2, 0x4d, 0xc5, 0xa5, 0x5e, 0x61, 0xf9, 0x48, 0x97, 0xa4, 0xf7, 0xc3, 0xcf, + 0xb4, 0x98, 0x75, 0x6c, 0x6b, 0x31, 0x58, 0x4d, 0x59, 0x75, 0x5f, 0xac, 0xb9, 0x2f, 0xf6, 0xb9, + 0x7c, 0xeb, 0x56, 0xa2, 0x6d, 0x67, 0x63, 0xcd, 0x0f, 0x64, 0xf4, 0x27, 0xdd, 0xff, 0x48, 0xdd, + 0x9d, 0xc8, 0x2b, 0x0e, 0x39, 0x93, 0x42, 0x45, 0x59, 0xf2, 0xf3, 0x7e, 0x49, 0xbb, 0xe9, 0xa3, + 0x2d, 0x7d, 0x0c, 0xbe, 0x6d, 0xe3, 0x44, 0x7d, 0xd7, 0x01, 0xe3, 0x90, 0x3b, 0xb5, 0xd7, 0xb9, + 0x7a, 0x1d, 0x9e, 0x25, 0x42, 0x2a, 0x27, 0x86, 0x18, 0x8f, 0xbc, 0x35, 0x37, 0xbf, 0x71, 0xf0, + 0xcc, 0x44, 0xbf, 0xfb, 0x1d, 0x00, 0x00, 0xff, 0xff, 0xce, 0x3a, 0x9b, 0x51, 0xd6, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/skiptaskrequest.pb.go b/client/gogrpc/conductor/model/skiptaskrequest.pb.go index 047399ec96..6a937fa148 100644 --- a/client/gogrpc/conductor/model/skiptaskrequest.pb.go +++ b/client/gogrpc/conductor/model/skiptaskrequest.pb.go @@ -6,6 +6,7 @@ package model // import "github.com/netflix/conductor/client/gogrpc/conductor/mo import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" +import any "github.com/golang/protobuf/ptypes/any" import _struct "github.com/golang/protobuf/ptypes/struct" // Reference imports to suppress errors if they are not otherwise used. @@ -22,6 +23,8 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type SkipTaskRequest struct { TaskInput map[string]*_struct.Value `protobuf:"bytes,1,rep,name=task_input,json=taskInput" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` TaskOutput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=task_output,json=taskOutput" json:"task_output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + TaskInputMessage *any.Any `protobuf:"bytes,3,opt,name=task_input_message,json=taskInputMessage" json:"task_input_message,omitempty"` + TaskOutputMessage *any.Any `protobuf:"bytes,4,opt,name=task_output_message,json=taskOutputMessage" json:"task_output_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -31,7 +34,7 @@ func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } func (*SkipTaskRequest) ProtoMessage() {} func (*SkipTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_skiptaskrequest_877c4b9b26d6de32, []int{0} + return fileDescriptor_skiptaskrequest_fb745ec89a45d156, []int{0} } func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) @@ -65,35 +68,52 @@ func (m *SkipTaskRequest) GetTaskOutput() map[string]*_struct.Value { return nil } +func (m *SkipTaskRequest) GetTaskInputMessage() *any.Any { + if m != nil { + return m.TaskInputMessage + } + return nil +} + +func (m *SkipTaskRequest) GetTaskOutputMessage() *any.Any { + if m != nil { + return m.TaskOutputMessage + } + return nil +} + func init() { - proto.RegisterType((*SkipTaskRequest)(nil), "com.netflix.conductor.proto.SkipTaskRequest") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.SkipTaskRequest.TaskInputEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.SkipTaskRequest.TaskOutputEntry") + proto.RegisterType((*SkipTaskRequest)(nil), "conductor.proto.SkipTaskRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.SkipTaskRequest.TaskInputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.SkipTaskRequest.TaskOutputEntry") } func init() { - proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_877c4b9b26d6de32) + proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_fb745ec89a45d156) } -var fileDescriptor_skiptaskrequest_877c4b9b26d6de32 = []byte{ - // 296 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xcf, 0x4b, 0xfb, 0x30, - 0x18, 0xc6, 0x69, 0xc7, 0xf7, 0x0b, 0x4b, 0xc1, 0x69, 0x0f, 0x52, 0x3a, 0x0f, 0xc5, 0x53, 0x0f, - 0x92, 0xc0, 0xbc, 0xc8, 0xf4, 0x34, 0xf0, 0xe0, 0x69, 0x52, 0xa7, 0x87, 0x81, 0x48, 0x9b, 0x65, - 0x35, 0xb4, 0xeb, 0x5b, 0xd3, 0x37, 0xe2, 0xfe, 0x74, 0x6f, 0x92, 0x64, 0x13, 0xed, 0x41, 0x10, - 0xbc, 0xbd, 0x3f, 0xf2, 0x7c, 0x9e, 0xe4, 0x21, 0x64, 0xbc, 0x81, 0x95, 0xa8, 0x59, 0x57, 0xc9, - 0x16, 0xf3, 0xae, 0x52, 0xe2, 0x45, 0x8b, 0x0e, 0x69, 0xab, 0x00, 0x21, 0x1c, 0x73, 0xd8, 0xd0, - 0x46, 0xe0, 0xba, 0x96, 0x6f, 0x94, 0x43, 0xb3, 0xd2, 0x1c, 0x41, 0xb9, 0x65, 0x7c, 0x52, 0x02, - 0x94, 0xb5, 0x60, 0xb6, 0x2b, 0xf4, 0x9a, 0x75, 0xa8, 0x34, 0xdf, 0x49, 0x4f, 0xdf, 0x7d, 0x32, - 0xba, 0xab, 0x64, 0xbb, 0xc8, 0xbb, 0x2a, 0x73, 0xd0, 0x70, 0x49, 0x88, 0xf1, 0x78, 0x92, 0x4d, - 0xab, 0x31, 0xf2, 0x92, 0x41, 0x1a, 0x4c, 0x2e, 0xe9, 0x0f, 0x1e, 0xb4, 0x47, 0xa0, 0xa6, 0xbe, - 0x31, 0xea, 0xeb, 0x06, 0xd5, 0x36, 0x1b, 0xe2, 0xbe, 0x0f, 0x1f, 0x49, 0x60, 0xd9, 0xa0, 0xd1, - 0xc0, 0x7d, 0x0b, 0xbf, 0xfa, 0x35, 0x7c, 0x6e, 0xe5, 0x8e, 0x6e, 0x2f, 0xeb, 0x06, 0xf1, 0x82, - 0x1c, 0x7c, 0xf7, 0x0e, 0x0f, 0xc9, 0xa0, 0x12, 0xdb, 0xc8, 0x4b, 0xbc, 0x74, 0x98, 0x99, 0x32, - 0x3c, 0x23, 0xff, 0x5e, 0xf3, 0x5a, 0x8b, 0xc8, 0x4f, 0xbc, 0x34, 0x98, 0x1c, 0x53, 0x17, 0x10, - 0xdd, 0x07, 0x44, 0x1f, 0xcc, 0x36, 0x73, 0x87, 0xa6, 0xfe, 0x85, 0x17, 0xdf, 0x93, 0x51, 0xcf, - 0xf4, 0x2f, 0xb0, 0xb3, 0xf9, 0xec, 0xa8, 0xf7, 0xb6, 0xdb, 0x62, 0x39, 0x2d, 0x25, 0x3e, 0xeb, - 0xc2, 0x24, 0xc2, 0x76, 0x89, 0xb0, 0xcf, 0x44, 0x18, 0xaf, 0xa5, 0x68, 0x90, 0x95, 0x50, 0xaa, - 0x96, 0x7f, 0x99, 0xdb, 0xdf, 0x51, 0xfc, 0xb7, 0x5e, 0xe7, 0x1f, 0x01, 0x00, 0x00, 0xff, 0xff, - 0x6f, 0xf9, 0xb7, 0xd0, 0x2d, 0x02, 0x00, 0x00, +var fileDescriptor_skiptaskrequest_fb745ec89a45d156 = []byte{ + // 348 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xbd, 0x4e, 0xc3, 0x30, + 0x14, 0x85, 0x95, 0x06, 0x90, 0xea, 0x4a, 0xb4, 0x35, 0x08, 0x85, 0x94, 0xa1, 0x62, 0xea, 0x80, + 0x6c, 0x54, 0x16, 0xd4, 0x8d, 0x0a, 0x06, 0x06, 0xfe, 0x42, 0x61, 0x60, 0xa9, 0x12, 0xd7, 0x0d, + 0x51, 0x12, 0x3b, 0xc4, 0x36, 0x22, 0xcf, 0xcc, 0x4b, 0xa0, 0xd8, 0xfd, 0xc3, 0x48, 0x88, 0x81, + 0xcd, 0xbe, 0xf7, 0x9e, 0xef, 0x9e, 0x23, 0x1b, 0xf4, 0x72, 0x3e, 0xa3, 0x19, 0x16, 0x69, 0x52, + 0xc8, 0x50, 0xa4, 0x25, 0x7d, 0x53, 0x54, 0x48, 0x54, 0x94, 0x5c, 0x72, 0xd8, 0x26, 0x9c, 0xcd, + 0x14, 0x91, 0xbc, 0x34, 0x05, 0xff, 0x28, 0xe6, 0x3c, 0xce, 0x28, 0xd6, 0xb7, 0x48, 0xcd, 0xb1, + 0x90, 0xa5, 0x22, 0x8b, 0x71, 0xff, 0xd0, 0xee, 0x86, 0xac, 0x32, 0xad, 0xe3, 0x4f, 0x17, 0xb4, + 0x1f, 0xd3, 0xa4, 0x98, 0x84, 0x22, 0x0d, 0xcc, 0x0e, 0x78, 0x0b, 0x40, 0xbd, 0x72, 0x9a, 0xb0, + 0x42, 0x49, 0xcf, 0xe9, 0xbb, 0x83, 0xd6, 0x10, 0x23, 0x6b, 0x25, 0xb2, 0x54, 0xa8, 0x3e, 0x5f, + 0xd7, 0x8a, 0x2b, 0x26, 0xcb, 0x2a, 0x68, 0xca, 0xe5, 0x1d, 0x3e, 0x80, 0x96, 0xe6, 0x71, 0x25, + 0x6b, 0x60, 0x43, 0x03, 0x4f, 0xff, 0x04, 0xbc, 0xd3, 0x12, 0x43, 0xd4, 0xa6, 0x4c, 0x01, 0x8e, + 0x01, 0x5c, 0x5b, 0x9c, 0xe6, 0x54, 0x88, 0x30, 0xa6, 0x9e, 0xdb, 0x77, 0x06, 0xad, 0xe1, 0x3e, + 0x32, 0x71, 0xd1, 0x32, 0x2e, 0xba, 0x60, 0x55, 0xd0, 0x59, 0xf9, 0xb9, 0x31, 0xd3, 0xf0, 0x12, + 0xec, 0x6d, 0xd8, 0x5a, 0x41, 0xb6, 0x7e, 0x81, 0x74, 0xd7, 0x16, 0x16, 0x14, 0x7f, 0x02, 0x76, + 0xbf, 0x27, 0x87, 0x1d, 0xe0, 0xa6, 0xb4, 0xf2, 0x9c, 0xbe, 0x33, 0x68, 0x06, 0xf5, 0x11, 0x9e, + 0x80, 0xed, 0xf7, 0x30, 0x53, 0xd4, 0x6b, 0x68, 0xf6, 0xc1, 0x0f, 0xf6, 0x73, 0xdd, 0x0d, 0xcc, + 0xd0, 0xa8, 0x71, 0xee, 0xf8, 0x4f, 0xa0, 0x6d, 0xc5, 0xff, 0x0f, 0xec, 0x38, 0x03, 0x3d, 0xc2, + 0x73, 0xc4, 0xa8, 0x9c, 0x67, 0xc9, 0x87, 0xfd, 0x02, 0xe3, 0xae, 0xf5, 0x04, 0xf7, 0xd1, 0xcb, + 0x28, 0x4e, 0xe4, 0xab, 0x8a, 0x10, 0xe1, 0x39, 0x5e, 0xc8, 0xf0, 0x4a, 0x86, 0x49, 0x96, 0x50, + 0x26, 0x71, 0xcc, 0xe3, 0xb2, 0x20, 0x1b, 0x75, 0xfd, 0x77, 0xa3, 0x1d, 0x4d, 0x3d, 0xfb, 0x0a, + 0x00, 0x00, 0xff, 0xff, 0xcb, 0xb6, 0xee, 0xfd, 0xcb, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/startworkflowrequest.pb.go b/client/gogrpc/conductor/model/startworkflowrequest.pb.go index ac928d8fe5..dbc07d7d64 100644 --- a/client/gogrpc/conductor/model/startworkflowrequest.pb.go +++ b/client/gogrpc/conductor/model/startworkflowrequest.pb.go @@ -34,7 +34,7 @@ func (m *StartWorkflowRequest) Reset() { *m = StartWorkflowRequest{} } func (m *StartWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*StartWorkflowRequest) ProtoMessage() {} func (*StartWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_startworkflowrequest_0f3c58ff07a6063b, []int{0} + return fileDescriptor_startworkflowrequest_76bb252c87e811af, []int{0} } func (m *StartWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StartWorkflowRequest.Unmarshal(m, b) @@ -90,38 +90,38 @@ func (m *StartWorkflowRequest) GetTaskToDomain() map[string]string { } func init() { - proto.RegisterType((*StartWorkflowRequest)(nil), "com.netflix.conductor.proto.StartWorkflowRequest") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.StartWorkflowRequest.InputEntry") - proto.RegisterMapType((map[string]string)(nil), "com.netflix.conductor.proto.StartWorkflowRequest.TaskToDomainEntry") + proto.RegisterType((*StartWorkflowRequest)(nil), "conductor.proto.StartWorkflowRequest") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.StartWorkflowRequest.InputEntry") + proto.RegisterMapType((map[string]string)(nil), "conductor.proto.StartWorkflowRequest.TaskToDomainEntry") } func init() { - proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_0f3c58ff07a6063b) + proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_76bb252c87e811af) } -var fileDescriptor_startworkflowrequest_0f3c58ff07a6063b = []byte{ - // 353 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x92, 0x4f, 0x4b, 0xeb, 0x40, - 0x10, 0xc0, 0x49, 0xd3, 0xbc, 0x47, 0xb7, 0xef, 0x15, 0x5d, 0x4a, 0x09, 0xd5, 0x43, 0x10, 0x84, - 0x1e, 0x64, 0x03, 0xf5, 0x22, 0x45, 0x10, 0xaa, 0x1e, 0x7a, 0x2b, 0xb1, 0x28, 0x78, 0x29, 0xf9, - 0xb3, 0x8d, 0x4b, 0x92, 0x9d, 0xba, 0x99, 0xb4, 0xf6, 0xc3, 0xf9, 0xdd, 0x24, 0x9b, 0x54, 0x03, - 0x16, 0xc1, 0xdb, 0xfc, 0xc9, 0xfc, 0x66, 0xf2, 0x63, 0x89, 0x93, 0x41, 0xc4, 0x53, 0x37, 0x47, - 0x5f, 0xe1, 0x16, 0x54, 0xb2, 0x4a, 0x61, 0xab, 0xf8, 0x6b, 0xc1, 0x73, 0x64, 0x6b, 0x05, 0x08, - 0xf4, 0x24, 0x84, 0x8c, 0x49, 0x8e, 0xab, 0x54, 0xbc, 0xb1, 0x10, 0x64, 0x54, 0x84, 0x08, 0xaa, - 0x6a, 0x0e, 0x4f, 0x63, 0x80, 0x38, 0xe5, 0xae, 0xce, 0x82, 0x62, 0xe5, 0xe6, 0xa8, 0x8a, 0xb0, - 0x1e, 0x3d, 0x7b, 0x37, 0x49, 0xff, 0xa1, 0x24, 0x3f, 0xd5, 0x64, 0xaf, 0x22, 0x53, 0x4a, 0xda, - 0xd2, 0xcf, 0xb8, 0x6d, 0x38, 0xc6, 0xa8, 0xe3, 0xe9, 0x98, 0xda, 0xe4, 0xef, 0x86, 0xab, 0x5c, - 0x80, 0xb4, 0x5b, 0x8e, 0x31, 0xb2, 0xbc, 0x7d, 0x4a, 0xcf, 0x49, 0x2f, 0x04, 0xa5, 0x78, 0xea, - 0xa3, 0x00, 0xb9, 0x14, 0x91, 0x6d, 0xea, 0xb9, 0xff, 0x8d, 0xea, 0x2c, 0xa2, 0x1e, 0xb1, 0x84, - 0x5c, 0x17, 0x68, 0xb7, 0x1d, 0x73, 0xd4, 0x1d, 0x5f, 0xb3, 0x1f, 0x0e, 0x67, 0x87, 0xce, 0x62, - 0xb3, 0x72, 0xfc, 0x5e, 0xa2, 0xda, 0x79, 0x15, 0x8a, 0x0a, 0xd2, 0x43, 0x3f, 0x4f, 0x96, 0x08, - 0xcb, 0x08, 0x32, 0x5f, 0x48, 0xdb, 0xd2, 0xf0, 0xdb, 0xdf, 0xc3, 0x17, 0x7e, 0x9e, 0x2c, 0xe0, - 0x4e, 0x53, 0xaa, 0x1d, 0xff, 0xb0, 0x51, 0x1a, 0xce, 0x09, 0xf9, 0xda, 0x4f, 0x8f, 0x88, 0x99, - 0xf0, 0x5d, 0x2d, 0xa8, 0x0c, 0xe9, 0x05, 0xb1, 0x36, 0x7e, 0x5a, 0x70, 0x6d, 0xa7, 0x3b, 0x1e, - 0xb0, 0x4a, 0x3d, 0xdb, 0xab, 0x67, 0x8f, 0x65, 0xd7, 0xab, 0x3e, 0x9a, 0xb4, 0xae, 0x8c, 0xe1, - 0x0d, 0x39, 0xfe, 0xb6, 0xf4, 0x00, 0xb8, 0xdf, 0x04, 0x77, 0x1a, 0x80, 0xe9, 0x62, 0x3a, 0x38, - 0xf4, 0x2b, 0xf3, 0xe0, 0x79, 0x12, 0x0b, 0x7c, 0x29, 0x82, 0xd2, 0x82, 0x5b, 0x5b, 0x70, 0x3f, - 0x2d, 0xb8, 0x61, 0x2a, 0xb8, 0x44, 0x37, 0x86, 0x58, 0xad, 0xc3, 0x46, 0x5d, 0xbf, 0xb5, 0xe0, - 0x8f, 0xbe, 0xf8, 0xf2, 0x23, 0x00, 0x00, 0xff, 0xff, 0x53, 0xd0, 0xc7, 0x25, 0x7b, 0x02, 0x00, - 0x00, +var fileDescriptor_startworkflowrequest_76bb252c87e811af = []byte{ + // 360 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x4b, 0xe3, 0x40, + 0x14, 0xc6, 0x49, 0xd3, 0xec, 0xd2, 0xe9, 0x6e, 0x77, 0x1d, 0x4a, 0x09, 0xd5, 0x43, 0x10, 0x84, + 0x1e, 0x64, 0x22, 0xf5, 0xa0, 0xf4, 0x22, 0x14, 0x15, 0x7a, 0x2b, 0xb1, 0x28, 0x08, 0x52, 0x92, + 0xc9, 0x34, 0x0e, 0x49, 0xe6, 0xb5, 0x93, 0x49, 0x6b, 0xff, 0x20, 0xff, 0x4f, 0xc9, 0x24, 0xd5, + 0x50, 0x7b, 0xf0, 0x36, 0xef, 0xcb, 0xfb, 0x7e, 0xef, 0xbd, 0x8f, 0x20, 0x27, 0x85, 0x90, 0x25, + 0x6e, 0xa6, 0x7c, 0xa9, 0x36, 0x20, 0xe3, 0x45, 0x02, 0x1b, 0xc9, 0x56, 0x39, 0xcb, 0x14, 0x59, + 0x4a, 0x50, 0x80, 0xff, 0x51, 0x10, 0x61, 0x4e, 0x15, 0xc8, 0x52, 0xe8, 0x9f, 0x44, 0x00, 0x51, + 0xc2, 0x5c, 0x5d, 0x05, 0xf9, 0xc2, 0xcd, 0x94, 0xcc, 0x69, 0xd5, 0x7e, 0xfa, 0x6e, 0xa2, 0xee, + 0x43, 0x41, 0x7b, 0xaa, 0x68, 0x5e, 0x49, 0xc3, 0x18, 0x35, 0x85, 0x9f, 0x32, 0xdb, 0x70, 0x8c, + 0x41, 0xcb, 0xd3, 0x6f, 0x6c, 0xa3, 0xdf, 0x6b, 0x26, 0x33, 0x0e, 0xc2, 0x6e, 0x38, 0xc6, 0xc0, + 0xf2, 0x76, 0x25, 0x3e, 0x43, 0x1d, 0x0a, 0x52, 0xb2, 0xc4, 0x57, 0x1c, 0xc4, 0x9c, 0x87, 0xb6, + 0xa9, 0x7d, 0x7f, 0x6b, 0xea, 0x24, 0xc4, 0xf7, 0xc8, 0xe2, 0x62, 0x99, 0x2b, 0xbb, 0xe9, 0x98, + 0x83, 0xf6, 0xf0, 0x82, 0xec, 0x2d, 0x4b, 0x0e, 0xad, 0x42, 0x26, 0x85, 0xe5, 0x4e, 0x28, 0xb9, + 0xf5, 0x4a, 0x3b, 0x7e, 0x41, 0x1d, 0xe5, 0x67, 0xf1, 0x5c, 0xc1, 0x3c, 0x84, 0xd4, 0xe7, 0xc2, + 0xb6, 0x34, 0xf0, 0xea, 0x67, 0xc0, 0x99, 0x9f, 0xc5, 0x33, 0xb8, 0xd5, 0xce, 0x92, 0xfb, 0x47, + 0xd5, 0xa4, 0xfe, 0x14, 0xa1, 0xaf, 0x99, 0xf8, 0x3f, 0x32, 0x63, 0xb6, 0xad, 0x82, 0x28, 0x9e, + 0xf8, 0x1c, 0x59, 0x6b, 0x3f, 0xc9, 0x99, 0x4e, 0xa1, 0x3d, 0xec, 0x91, 0x32, 0x62, 0xb2, 0x8b, + 0x98, 0x3c, 0x16, 0x5f, 0xbd, 0xb2, 0x69, 0xd4, 0xb8, 0x36, 0xfa, 0x37, 0xe8, 0xe8, 0xdb, 0xd0, + 0x03, 0xe0, 0x6e, 0x1d, 0xdc, 0xaa, 0x01, 0xc6, 0x2b, 0x74, 0x4c, 0x21, 0x25, 0x82, 0xa9, 0x45, + 0xc2, 0xdf, 0xf6, 0xcf, 0x1c, 0xf7, 0x0e, 0xdd, 0x39, 0x0d, 0x9e, 0x47, 0x11, 0x57, 0xaf, 0x79, + 0x40, 0x28, 0xa4, 0x6e, 0xe5, 0x75, 0x3f, 0xbd, 0x2e, 0x4d, 0x38, 0x13, 0xca, 0x8d, 0x20, 0x92, + 0x4b, 0x5a, 0xd3, 0xf5, 0x4f, 0x16, 0xfc, 0xd2, 0xe8, 0xcb, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, + 0x48, 0x61, 0x1b, 0x82, 0x74, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/subworkflowparams.pb.go b/client/gogrpc/conductor/model/subworkflowparams.pb.go index 61f09fd26d..cac96fa19f 100644 --- a/client/gogrpc/conductor/model/subworkflowparams.pb.go +++ b/client/gogrpc/conductor/model/subworkflowparams.pb.go @@ -31,7 +31,7 @@ func (m *SubWorkflowParams) Reset() { *m = SubWorkflowParams{} } func (m *SubWorkflowParams) String() string { return proto.CompactTextString(m) } func (*SubWorkflowParams) ProtoMessage() {} func (*SubWorkflowParams) Descriptor() ([]byte, []int) { - return fileDescriptor_subworkflowparams_c15f17d5a5a4cfdd, []int{0} + return fileDescriptor_subworkflowparams_182a77e44709d20f, []int{0} } func (m *SubWorkflowParams) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_SubWorkflowParams.Unmarshal(m, b) @@ -66,27 +66,27 @@ func (m *SubWorkflowParams) GetVersion() *_struct.Value { } func init() { - proto.RegisterType((*SubWorkflowParams)(nil), "com.netflix.conductor.proto.SubWorkflowParams") + proto.RegisterType((*SubWorkflowParams)(nil), "conductor.proto.SubWorkflowParams") } func init() { - proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_c15f17d5a5a4cfdd) + proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_182a77e44709d20f) } -var fileDescriptor_subworkflowparams_c15f17d5a5a4cfdd = []byte{ - // 213 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x4d, 0x4b, 0xc4, 0x30, - 0x10, 0x86, 0xa9, 0x88, 0x62, 0x3c, 0x19, 0x41, 0x8a, 0x1f, 0x50, 0x3c, 0xf5, 0x94, 0x88, 0xde, - 0x3c, 0xf6, 0x17, 0xd4, 0x0a, 0x8a, 0xde, 0x92, 0x34, 0xcd, 0x86, 0x4d, 0x32, 0x25, 0x1f, 0xdb, - 0xfd, 0xf9, 0x0b, 0x69, 0xbb, 0x2c, 0xec, 0x6d, 0x66, 0xde, 0xe1, 0x99, 0x87, 0x41, 0x2f, 0x16, - 0x7a, 0x69, 0x68, 0x48, 0x7c, 0x02, 0xbf, 0x1d, 0x0c, 0x4c, 0x23, 0xf3, 0xcc, 0x06, 0x32, 0x7a, - 0x88, 0x80, 0x9f, 0x04, 0x58, 0xe2, 0x64, 0x1c, 0x8c, 0xde, 0x13, 0x01, 0xae, 0x4f, 0x22, 0x82, - 0x9f, 0xc3, 0xc7, 0x67, 0x05, 0xa0, 0x8c, 0xa4, 0xb9, 0xe3, 0x69, 0xa0, 0x21, 0xfa, 0x24, 0xe2, - 0x9c, 0xbe, 0xfe, 0xa1, 0xbb, 0xef, 0xc4, 0x7f, 0x17, 0x6a, 0x9b, 0xa9, 0x18, 0xa3, 0x4b, 0xc7, - 0xac, 0x2c, 0x8b, 0xaa, 0xa8, 0x6f, 0xba, 0x5c, 0xe3, 0x37, 0x74, 0xbd, 0x93, 0x3e, 0x68, 0x70, - 0xe5, 0x45, 0x55, 0xd4, 0xb7, 0xef, 0x0f, 0x64, 0x06, 0x93, 0x15, 0x4c, 0x7e, 0x98, 0x49, 0xb2, - 0x5b, 0xd7, 0x9a, 0xaf, 0xe6, 0xfe, 0x0c, 0xdd, 0xf2, 0xff, 0x4f, 0xa5, 0xe3, 0x26, 0x71, 0x22, - 0xc0, 0xd2, 0xc5, 0x99, 0x1e, 0x9d, 0xa9, 0x30, 0x5a, 0xba, 0x48, 0x15, 0x28, 0x3f, 0x8a, 0x93, - 0x79, 0x7e, 0x00, 0xbf, 0xca, 0xb7, 0x3e, 0x0e, 0x01, 0x00, 0x00, 0xff, 0xff, 0x9f, 0x5e, 0xa1, - 0x60, 0x10, 0x01, 0x00, 0x00, +var fileDescriptor_subworkflowparams_182a77e44709d20f = []byte{ + // 217 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x4f, 0x4b, 0xc4, 0x30, + 0x10, 0xc5, 0xa9, 0x88, 0x62, 0x3c, 0x88, 0x11, 0xa4, 0xf8, 0x07, 0x8a, 0xa7, 0x9e, 0x12, 0xd1, + 0x9b, 0xc7, 0x7e, 0x82, 0x52, 0x41, 0xd1, 0x5b, 0x92, 0xa6, 0x31, 0x98, 0x64, 0x4a, 0xfe, 0x6c, + 0xf7, 0xe3, 0x2f, 0xa4, 0xed, 0xb2, 0xec, 0xde, 0x66, 0xde, 0xcc, 0xfb, 0xcd, 0xf0, 0xd0, 0xb3, + 0x85, 0x5e, 0x1a, 0x1a, 0x12, 0x9f, 0xc0, 0xff, 0x0f, 0x06, 0xa6, 0x91, 0x79, 0x66, 0x03, 0x19, + 0x3d, 0x44, 0xc0, 0x37, 0x02, 0x5c, 0x9f, 0x44, 0x04, 0x3f, 0x0b, 0x0f, 0x4f, 0x0a, 0x40, 0x19, + 0x49, 0x73, 0xc7, 0xd3, 0x40, 0x43, 0xf4, 0x49, 0xc4, 0x79, 0xfa, 0xf2, 0x83, 0x6e, 0x3f, 0x13, + 0xff, 0x5e, 0x48, 0x6d, 0x26, 0x61, 0x8c, 0xce, 0x1d, 0xb3, 0xb2, 0x2c, 0xaa, 0xa2, 0xbe, 0xea, + 0x72, 0x8d, 0x5f, 0xd1, 0xe5, 0x46, 0xfa, 0xa0, 0xc1, 0x95, 0x67, 0x55, 0x51, 0x5f, 0xbf, 0xdd, + 0x93, 0x19, 0x4c, 0x56, 0x30, 0xf9, 0x62, 0x26, 0xc9, 0x6e, 0x5d, 0x6b, 0x1c, 0x7a, 0x14, 0x60, + 0x89, 0x93, 0x71, 0x30, 0x7a, 0x4b, 0x8e, 0xfe, 0x6a, 0xee, 0x4e, 0xee, 0xb6, 0xfc, 0xf7, 0x43, + 0xe9, 0xf8, 0x97, 0x38, 0x11, 0x60, 0xe9, 0x62, 0xa4, 0x7b, 0x23, 0x15, 0x46, 0x4b, 0x17, 0xa9, + 0x02, 0xe5, 0x47, 0x71, 0xa0, 0xe7, 0x44, 0xf8, 0x45, 0xe6, 0xbe, 0xef, 0x02, 0x00, 0x00, 0xff, + 0xff, 0xab, 0x91, 0x1f, 0xb2, 0x21, 0x01, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/task.pb.go b/client/gogrpc/conductor/model/task.pb.go index 617631eddb..156bdab5d1 100644 --- a/client/gogrpc/conductor/model/task.pb.go +++ b/client/gogrpc/conductor/model/task.pb.go @@ -6,6 +6,7 @@ package model // import "github.com/netflix/conductor/client/gogrpc/conductor/mo import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" +import any "github.com/golang/protobuf/ptypes/any" import _struct "github.com/golang/protobuf/ptypes/struct" // Reference imports to suppress errors if they are not otherwise used. @@ -22,50 +23,53 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Task_Status int32 const ( - Task_IN_PROGRESS Task_Status = 0 - Task_CANCELED Task_Status = 1 - Task_FAILED Task_Status = 2 - Task_COMPLETED Task_Status = 3 - Task_COMPLETED_WITH_ERRORS Task_Status = 4 - Task_SCHEDULED Task_Status = 5 - Task_TIMED_OUT Task_Status = 6 - Task_READY_FOR_RERUN Task_Status = 7 - Task_SKIPPED Task_Status = 8 + Task_IN_PROGRESS Task_Status = 0 + Task_CANCELED Task_Status = 1 + Task_FAILED Task_Status = 2 + Task_FAILED_WITH_TERMINAL_ERROR Task_Status = 3 + Task_COMPLETED Task_Status = 4 + Task_COMPLETED_WITH_ERRORS Task_Status = 5 + Task_SCHEDULED Task_Status = 6 + Task_TIMED_OUT Task_Status = 7 + Task_READY_FOR_RERUN Task_Status = 8 + Task_SKIPPED Task_Status = 9 ) var Task_Status_name = map[int32]string{ 0: "IN_PROGRESS", 1: "CANCELED", 2: "FAILED", - 3: "COMPLETED", - 4: "COMPLETED_WITH_ERRORS", - 5: "SCHEDULED", - 6: "TIMED_OUT", - 7: "READY_FOR_RERUN", - 8: "SKIPPED", + 3: "FAILED_WITH_TERMINAL_ERROR", + 4: "COMPLETED", + 5: "COMPLETED_WITH_ERRORS", + 6: "SCHEDULED", + 7: "TIMED_OUT", + 8: "READY_FOR_RERUN", + 9: "SKIPPED", } var Task_Status_value = map[string]int32{ - "IN_PROGRESS": 0, - "CANCELED": 1, - "FAILED": 2, - "COMPLETED": 3, - "COMPLETED_WITH_ERRORS": 4, - "SCHEDULED": 5, - "TIMED_OUT": 6, - "READY_FOR_RERUN": 7, - "SKIPPED": 8, + "IN_PROGRESS": 0, + "CANCELED": 1, + "FAILED": 2, + "FAILED_WITH_TERMINAL_ERROR": 3, + "COMPLETED": 4, + "COMPLETED_WITH_ERRORS": 5, + "SCHEDULED": 6, + "TIMED_OUT": 7, + "READY_FOR_RERUN": 8, + "SKIPPED": 9, } func (x Task_Status) String() string { return proto.EnumName(Task_Status_name, int32(x)) } func (Task_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_task_64f7e1feb6072ed5, []int{0, 0} + return fileDescriptor_task_7843d3d2fd8c2dc8, []int{0, 0} } type Task struct { TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - Status Task_Status `protobuf:"varint,2,opt,name=status,enum=com.netflix.conductor.proto.Task_Status" json:"status,omitempty"` + Status Task_Status `protobuf:"varint,2,opt,name=status,enum=conductor.proto.Task_Status" json:"status,omitempty"` InputData map[string]*_struct.Value `protobuf:"bytes,3,rep,name=input_data,json=inputData" json:"input_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` ReferenceTaskName string `protobuf:"bytes,4,opt,name=reference_task_name,json=referenceTaskName" json:"reference_task_name,omitempty"` RetryCount int32 `protobuf:"varint,5,opt,name=retry_count,json=retryCount" json:"retry_count,omitempty"` @@ -80,17 +84,20 @@ type Task struct { StartDelayInSeconds int32 `protobuf:"varint,14,opt,name=start_delay_in_seconds,json=startDelayInSeconds" json:"start_delay_in_seconds,omitempty"` RetriedTaskId string `protobuf:"bytes,15,opt,name=retried_task_id,json=retriedTaskId" json:"retried_task_id,omitempty"` Retried bool `protobuf:"varint,16,opt,name=retried" json:"retried,omitempty"` - CallbackFromWorker bool `protobuf:"varint,17,opt,name=callback_from_worker,json=callbackFromWorker" json:"callback_from_worker,omitempty"` - ResponseTimeoutSeconds int32 `protobuf:"varint,18,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` - WorkflowInstanceId string `protobuf:"bytes,19,opt,name=workflow_instance_id,json=workflowInstanceId" json:"workflow_instance_id,omitempty"` - WorkflowType string `protobuf:"bytes,20,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` - TaskId string `protobuf:"bytes,21,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,22,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` - CallbackAfterSeconds int64 `protobuf:"varint,23,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` - WorkerId string `protobuf:"bytes,24,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - OutputData map[string]*_struct.Value `protobuf:"bytes,25,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - WorkflowTask *WorkflowTask `protobuf:"bytes,26,opt,name=workflow_task,json=workflowTask" json:"workflow_task,omitempty"` - Domain string `protobuf:"bytes,27,opt,name=domain" json:"domain,omitempty"` + Executed bool `protobuf:"varint,17,opt,name=executed" json:"executed,omitempty"` + CallbackFromWorker bool `protobuf:"varint,18,opt,name=callback_from_worker,json=callbackFromWorker" json:"callback_from_worker,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,19,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` + WorkflowInstanceId string `protobuf:"bytes,20,opt,name=workflow_instance_id,json=workflowInstanceId" json:"workflow_instance_id,omitempty"` + WorkflowType string `protobuf:"bytes,21,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` + TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,23,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,24,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,25,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,26,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + WorkflowTask *WorkflowTask `protobuf:"bytes,27,opt,name=workflow_task,json=workflowTask" json:"workflow_task,omitempty"` + Domain string `protobuf:"bytes,28,opt,name=domain" json:"domain,omitempty"` + InputMessage *any.Any `protobuf:"bytes,29,opt,name=input_message,json=inputMessage" json:"input_message,omitempty"` + OutputMessage *any.Any `protobuf:"bytes,30,opt,name=output_message,json=outputMessage" json:"output_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -100,7 +107,7 @@ func (m *Task) Reset() { *m = Task{} } func (m *Task) String() string { return proto.CompactTextString(m) } func (*Task) ProtoMessage() {} func (*Task) Descriptor() ([]byte, []int) { - return fileDescriptor_task_64f7e1feb6072ed5, []int{0} + return fileDescriptor_task_7843d3d2fd8c2dc8, []int{0} } func (m *Task) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Task.Unmarshal(m, b) @@ -232,6 +239,13 @@ func (m *Task) GetRetried() bool { return false } +func (m *Task) GetExecuted() bool { + if m != nil { + return m.Executed + } + return false +} + func (m *Task) GetCallbackFromWorker() bool { if m != nil { return m.CallbackFromWorker @@ -309,72 +323,91 @@ func (m *Task) GetDomain() string { return "" } +func (m *Task) GetInputMessage() *any.Any { + if m != nil { + return m.InputMessage + } + return nil +} + +func (m *Task) GetOutputMessage() *any.Any { + if m != nil { + return m.OutputMessage + } + return nil +} + func init() { - proto.RegisterType((*Task)(nil), "com.netflix.conductor.proto.Task") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Task.InputDataEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Task.OutputDataEntry") - proto.RegisterEnum("com.netflix.conductor.proto.Task_Status", Task_Status_name, Task_Status_value) + proto.RegisterType((*Task)(nil), "conductor.proto.Task") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.InputDataEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.OutputDataEntry") + proto.RegisterEnum("conductor.proto.Task_Status", Task_Status_name, Task_Status_value) } -func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_64f7e1feb6072ed5) } +func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_7843d3d2fd8c2dc8) } -var fileDescriptor_task_64f7e1feb6072ed5 = []byte{ - // 899 bytes of a gzipped FileDescriptorProto +var fileDescriptor_task_7843d3d2fd8c2dc8 = []byte{ + // 980 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x6f, 0xdb, 0x36, - 0x14, 0x9d, 0xf2, 0xe1, 0x8f, 0xab, 0xd8, 0x56, 0x99, 0xc4, 0x61, 0x9c, 0x0d, 0x35, 0x32, 0x74, - 0xf0, 0x80, 0x41, 0xce, 0xd2, 0x61, 0x28, 0xfa, 0xb4, 0xd4, 0x52, 0x56, 0x61, 0xad, 0x6d, 0xc8, - 0xce, 0x82, 0xed, 0x45, 0xa0, 0x25, 0xda, 0x15, 0x2c, 0x89, 0x1e, 0x45, 0xad, 0xf3, 0x8f, 0xd9, - 0x3f, 0xdc, 0x8f, 0x18, 0x48, 0x5a, 0xaa, 0xb7, 0x87, 0xf6, 0xa5, 0x6f, 0xe2, 0x39, 0xf7, 0x5e, - 0x9f, 0x7b, 0xc8, 0x7b, 0x0d, 0x56, 0xca, 0x22, 0x9a, 0x0c, 0x05, 0xc9, 0xd7, 0xf6, 0x86, 0x33, - 0xc1, 0xd0, 0x55, 0xc8, 0x52, 0x3b, 0xa3, 0x62, 0x99, 0xc4, 0x7f, 0xd9, 0x21, 0xcb, 0xa2, 0x22, - 0x14, 0x8c, 0x6b, 0xb2, 0x87, 0x75, 0xf8, 0x7b, 0xc6, 0xd7, 0xcb, 0x84, 0xbd, 0xff, 0x90, 0xd6, - 0xfb, 0x72, 0xc5, 0xd8, 0x2a, 0xa1, 0x43, 0x75, 0x5a, 0x14, 0xcb, 0x61, 0x2e, 0x78, 0x11, 0x0a, - 0xcd, 0x5e, 0xff, 0x63, 0xc2, 0xd1, 0x9c, 0xe4, 0x6b, 0x74, 0x05, 0x4d, 0x99, 0x14, 0x88, 0xed, - 0x86, 0x62, 0xa3, 0x6f, 0x0c, 0x9a, 0x7e, 0x43, 0x02, 0xf3, 0xed, 0x86, 0xa2, 0x9f, 0xa0, 0x96, - 0x0b, 0x22, 0x8a, 0x1c, 0x1f, 0xf4, 0x8d, 0x41, 0xfb, 0x76, 0x60, 0x7f, 0x44, 0x8b, 0x2d, 0xeb, - 0xd9, 0x33, 0x15, 0xef, 0xef, 0xf2, 0xd0, 0x04, 0x20, 0xce, 0x36, 0x85, 0x08, 0x22, 0x22, 0x08, - 0x3e, 0xec, 0x1f, 0x0e, 0xcc, 0xdb, 0x9b, 0x4f, 0x57, 0xf1, 0x64, 0x8e, 0x43, 0x04, 0x71, 0x33, - 0xc1, 0xb7, 0x7e, 0x33, 0x2e, 0xcf, 0xc8, 0x86, 0x53, 0x4e, 0x97, 0x94, 0xd3, 0x2c, 0xa4, 0x81, - 0x52, 0x9e, 0x91, 0x94, 0xe2, 0x23, 0xa5, 0xfc, 0x49, 0x45, 0xc9, 0x2a, 0x63, 0x92, 0x52, 0xf4, - 0x14, 0x4c, 0x4e, 0x05, 0xdf, 0x06, 0x21, 0x2b, 0x32, 0x81, 0x8f, 0xfb, 0xc6, 0xe0, 0xd8, 0x07, - 0x05, 0x8d, 0x24, 0x82, 0x2c, 0x38, 0xcc, 0xe9, 0x1f, 0xb8, 0xa6, 0x08, 0xf9, 0x89, 0x9e, 0x41, - 0x3b, 0x64, 0x9c, 0xd3, 0x84, 0x88, 0x98, 0x65, 0x41, 0x1c, 0xe1, 0xba, 0xaa, 0xde, 0xda, 0x43, - 0xbd, 0x08, 0x7d, 0x05, 0xb0, 0x61, 0x49, 0xb2, 0x2b, 0xdc, 0x50, 0xf9, 0x4d, 0x89, 0xe8, 0xba, - 0xd7, 0xd0, 0x52, 0xf2, 0x22, 0xba, 0xd4, 0x12, 0x9b, 0xaa, 0x88, 0x29, 0x41, 0x87, 0x2e, 0x95, - 0xb8, 0x67, 0xd0, 0xce, 0xc3, 0x77, 0x34, 0x2a, 0x12, 0x1a, 0x05, 0x22, 0x4e, 0x29, 0x86, 0xbe, - 0x31, 0x38, 0xf4, 0x5b, 0x15, 0x3a, 0x8f, 0x53, 0x2a, 0x7f, 0x29, 0x17, 0x84, 0x0b, 0x1d, 0x62, - 0xaa, 0x90, 0xa6, 0x42, 0x14, 0x7d, 0x09, 0x0d, 0x9a, 0xed, 0xf2, 0x4f, 0x14, 0x59, 0xa7, 0x99, - 0xce, 0x7c, 0x0a, 0x66, 0xb1, 0x89, 0x88, 0xa0, 0x9a, 0x6d, 0x29, 0x16, 0x34, 0xa4, 0x02, 0x9e, - 0x43, 0x57, 0x97, 0x8e, 0x68, 0x42, 0xb6, 0x41, 0x9c, 0x05, 0x39, 0x95, 0x37, 0x92, 0xe3, 0xb6, - 0x6a, 0xe8, 0x54, 0xb1, 0x8e, 0x24, 0xbd, 0x6c, 0xa6, 0x29, 0xf4, 0x0d, 0x74, 0xa4, 0x81, 0xb1, - 0x14, 0x2d, 0x5b, 0x8c, 0x23, 0xdc, 0xd1, 0x0e, 0xed, 0x60, 0xe9, 0xbe, 0x17, 0x21, 0x0c, 0xf5, - 0x1d, 0x80, 0xad, 0xbe, 0x31, 0x68, 0xf8, 0xe5, 0x11, 0xdd, 0xc0, 0x59, 0x48, 0x92, 0x64, 0x41, - 0xc2, 0x75, 0xb0, 0xe4, 0x2c, 0x0d, 0xe4, 0x03, 0xa6, 0x1c, 0x3f, 0x51, 0x61, 0xa8, 0xe4, 0xee, - 0x39, 0x4b, 0x1f, 0x15, 0x83, 0x5e, 0x00, 0xe6, 0x34, 0xdf, 0xb0, 0x2c, 0xd7, 0xbd, 0xb0, 0x42, - 0x54, 0x52, 0x91, 0x92, 0xda, 0x2d, 0xf9, 0xb9, 0xa6, 0x4b, 0xb5, 0x37, 0x70, 0x56, 0x8e, 0x47, - 0x10, 0x67, 0xb9, 0x20, 0xf2, 0xe5, 0xc4, 0x11, 0x3e, 0x55, 0x92, 0x51, 0xc9, 0x79, 0x3b, 0xca, - 0x8b, 0xd0, 0xd7, 0xd0, 0xaa, 0x32, 0xd4, 0x5c, 0x9c, 0xa9, 0xd0, 0x93, 0x12, 0x54, 0xb3, 0x71, - 0x01, 0xf5, 0xb2, 0xf9, 0x73, 0x45, 0xd7, 0x84, 0xee, 0xfa, 0x47, 0xb8, 0xe0, 0x94, 0xe4, 0x2c, - 0x0b, 0x96, 0x8c, 0x07, 0x71, 0x16, 0xb2, 0x74, 0x93, 0x50, 0xf9, 0x68, 0x70, 0x57, 0x05, 0x9e, - 0x6b, 0xfa, 0x9e, 0x71, 0x6f, 0x8f, 0x44, 0x3f, 0x40, 0xb7, 0xf2, 0x84, 0x2c, 0x05, 0xe5, 0x55, - 0x7f, 0x17, 0xea, 0xda, 0x2a, 0xc7, 0xee, 0x24, 0x59, 0x76, 0x77, 0x05, 0x4d, 0xed, 0x9d, 0x14, - 0x82, 0xf5, 0xfc, 0x6a, 0xc0, 0x8b, 0x90, 0x0f, 0x26, 0x2b, 0x44, 0x35, 0x7e, 0x97, 0x6a, 0xfc, - 0xbe, 0xff, 0xf4, 0xf8, 0x4d, 0x54, 0xd2, 0x87, 0xf9, 0x03, 0x56, 0x01, 0x68, 0xbc, 0x6f, 0x0e, - 0xc9, 0xd7, 0xb8, 0xd7, 0x37, 0x06, 0xe6, 0xed, 0xb7, 0x1f, 0xad, 0xfa, 0x58, 0x3a, 0x47, 0xf2, - 0xf5, 0x9e, 0x8f, 0x72, 0x01, 0x75, 0xa1, 0x16, 0xb1, 0x94, 0xc4, 0x19, 0xbe, 0xd2, 0x36, 0xea, - 0x53, 0x6f, 0x0e, 0xed, 0xff, 0x6e, 0x01, 0x39, 0xa9, 0x6b, 0xba, 0xdd, 0x2d, 0x29, 0xf9, 0x89, - 0xbe, 0x83, 0xe3, 0x3f, 0x49, 0x52, 0x50, 0xb5, 0x9e, 0xcc, 0xdb, 0xae, 0xad, 0x77, 0x9e, 0x5d, - 0xee, 0x3c, 0xfb, 0x57, 0xc9, 0xfa, 0x3a, 0xe8, 0xe5, 0xc1, 0x0b, 0xa3, 0xf7, 0x00, 0x9d, 0xff, - 0x35, 0xf7, 0x39, 0xca, 0x5e, 0xff, 0x6d, 0x40, 0x4d, 0x6f, 0x3e, 0xd4, 0x01, 0xd3, 0x1b, 0x07, - 0x53, 0x7f, 0xf2, 0xb3, 0xef, 0xce, 0x66, 0xd6, 0x17, 0xe8, 0x04, 0x1a, 0xa3, 0xbb, 0xf1, 0xc8, - 0x7d, 0xe3, 0x3a, 0x96, 0x81, 0x00, 0x6a, 0xf7, 0x77, 0x9e, 0xfc, 0x3e, 0x40, 0x2d, 0x68, 0x8e, - 0x26, 0x6f, 0xa7, 0x6f, 0xdc, 0xb9, 0xeb, 0x58, 0x87, 0xe8, 0x12, 0xce, 0xab, 0x63, 0xf0, 0xe8, - 0xcd, 0x5f, 0x07, 0xae, 0xef, 0x4f, 0xfc, 0x99, 0x75, 0x24, 0x23, 0x67, 0xa3, 0xd7, 0xae, 0xf3, - 0x20, 0x13, 0x8f, 0xe5, 0x71, 0xee, 0xbd, 0x75, 0x9d, 0x60, 0xf2, 0x30, 0xb7, 0x6a, 0xe8, 0x14, - 0x3a, 0xbe, 0x7b, 0xe7, 0xfc, 0x16, 0xdc, 0x4f, 0xfc, 0xc0, 0x77, 0xfd, 0x87, 0xb1, 0x55, 0x47, - 0x26, 0xd4, 0x67, 0xbf, 0x78, 0xd3, 0xa9, 0xeb, 0x58, 0x8d, 0x57, 0xce, 0xab, 0x9a, 0x34, 0x7b, - 0xba, 0xf8, 0xfd, 0xe5, 0x2a, 0x16, 0xef, 0x8a, 0x85, 0xbc, 0xad, 0xe1, 0xee, 0xb6, 0x86, 0xd5, - 0x6d, 0x0d, 0xc3, 0x24, 0xa6, 0x99, 0x18, 0xae, 0xd8, 0x8a, 0x6f, 0xc2, 0x3d, 0x5c, 0xfd, 0xcb, - 0x2c, 0x6a, 0xca, 0x80, 0xe7, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xf7, 0xe0, 0x94, 0x89, 0xa4, - 0x06, 0x00, 0x00, + 0x14, 0x9d, 0xf3, 0xe1, 0x8f, 0xeb, 0xd8, 0x56, 0x99, 0xc4, 0x61, 0x9c, 0xa4, 0x35, 0xb2, 0x65, + 0xf0, 0xc3, 0x60, 0x17, 0x69, 0x31, 0x74, 0xdd, 0x93, 0x63, 0x2b, 0xab, 0xb0, 0x24, 0x0e, 0x64, + 0x67, 0xc1, 0xf6, 0x22, 0x30, 0x12, 0xed, 0x0a, 0x96, 0x48, 0x8f, 0xa2, 0xd6, 0xfa, 0xc7, 0xed, + 0x3f, 0xed, 0x27, 0x0c, 0x24, 0x25, 0x35, 0x4b, 0x8b, 0x3d, 0xf5, 0x8d, 0x3c, 0xe7, 0xdc, 0xe3, + 0x7b, 0xaf, 0x78, 0xaf, 0xc1, 0x8a, 0x79, 0x40, 0xa3, 0x81, 0x24, 0xc9, 0xb2, 0xbf, 0x12, 0x5c, + 0x72, 0xd4, 0xf2, 0x39, 0x0b, 0x52, 0x5f, 0x72, 0x61, 0x80, 0x0e, 0x36, 0x92, 0x0f, 0x5c, 0x2c, + 0xe7, 0x11, 0xff, 0xf0, 0x49, 0xda, 0x39, 0x5e, 0x70, 0xbe, 0x88, 0xe8, 0x40, 0xdf, 0x1e, 0xd2, + 0xf9, 0x20, 0x91, 0x22, 0xf5, 0x65, 0xc6, 0x1e, 0x3e, 0x65, 0x09, 0x5b, 0x1b, 0xea, 0xf4, 0x9f, + 0x1d, 0xd8, 0x9a, 0x91, 0x64, 0x89, 0x8e, 0xa0, 0xa6, 0xfc, 0x3c, 0xb9, 0x5e, 0x51, 0x5c, 0xea, + 0x96, 0x7a, 0x35, 0xb7, 0xaa, 0x80, 0xd9, 0x7a, 0x45, 0xd1, 0x6b, 0x28, 0x27, 0x92, 0xc8, 0x34, + 0xc1, 0x1b, 0xdd, 0x52, 0xaf, 0x79, 0x7e, 0xdc, 0x7f, 0x92, 0x5a, 0x5f, 0x79, 0xf4, 0xa7, 0x5a, + 0xe3, 0x66, 0x5a, 0x34, 0x02, 0x08, 0xd9, 0x2a, 0x95, 0x5e, 0x40, 0x24, 0xc1, 0x9b, 0xdd, 0xcd, + 0x5e, 0xfd, 0xfc, 0xbb, 0x2f, 0x47, 0x3a, 0x4a, 0x37, 0x26, 0x92, 0xd8, 0x4c, 0x8a, 0xb5, 0x5b, + 0x0b, 0xf3, 0x3b, 0xea, 0xc3, 0xae, 0xa0, 0x73, 0x2a, 0x28, 0xf3, 0xa9, 0xa7, 0x33, 0x64, 0x24, + 0xa6, 0x78, 0x4b, 0x67, 0xf8, 0xac, 0xa0, 0x94, 0xcb, 0x0d, 0x89, 0x29, 0x7a, 0x01, 0x75, 0x41, + 0xa5, 0x58, 0x7b, 0x3e, 0x4f, 0x99, 0xc4, 0xdb, 0xdd, 0x52, 0x6f, 0xdb, 0x05, 0x0d, 0x8d, 0x14, + 0x82, 0x2c, 0xd8, 0x4c, 0xe8, 0x9f, 0xb8, 0xac, 0x09, 0x75, 0x44, 0x67, 0xd0, 0xf4, 0xb9, 0x10, + 0x34, 0x22, 0x32, 0xe4, 0xcc, 0x0b, 0x03, 0x5c, 0xd1, 0xee, 0x8d, 0x47, 0xa8, 0x13, 0xa0, 0x13, + 0x80, 0x15, 0x8f, 0xa2, 0xcc, 0xb8, 0xaa, 0xe3, 0x6b, 0x0a, 0x31, 0xbe, 0xa7, 0xd0, 0xd0, 0xe9, + 0x05, 0x74, 0x6e, 0x52, 0xac, 0x69, 0x93, 0xba, 0x02, 0xc7, 0x74, 0xae, 0x93, 0x3b, 0x83, 0x66, + 0xe2, 0xbf, 0xa7, 0x41, 0x1a, 0xd1, 0xc0, 0x93, 0x61, 0x4c, 0x31, 0x74, 0x4b, 0xbd, 0x4d, 0xb7, + 0x51, 0xa0, 0xb3, 0x30, 0xa6, 0xea, 0x97, 0x12, 0x49, 0x84, 0x34, 0x92, 0xba, 0x96, 0xd4, 0x34, + 0xa2, 0xe9, 0x43, 0xa8, 0x52, 0x96, 0xc5, 0xef, 0x68, 0xb2, 0x42, 0x99, 0x89, 0x7c, 0x01, 0xf5, + 0x74, 0x15, 0x10, 0x49, 0x0d, 0xdb, 0xd0, 0x2c, 0x18, 0x48, 0x0b, 0x5e, 0x41, 0xdb, 0x58, 0x07, + 0x34, 0x22, 0x6b, 0x2f, 0x64, 0x5e, 0x42, 0xd5, 0x17, 0x49, 0x70, 0x53, 0x17, 0xb4, 0xab, 0xd9, + 0xb1, 0x22, 0x1d, 0x36, 0x35, 0x14, 0xfa, 0x1e, 0x5a, 0xaa, 0x81, 0xa1, 0x4a, 0x5a, 0x95, 0x18, + 0x06, 0xb8, 0x65, 0x3a, 0x94, 0xc1, 0xaa, 0xfb, 0x4e, 0x80, 0x30, 0x54, 0x32, 0x00, 0x5b, 0xdd, + 0x52, 0xaf, 0xea, 0xe6, 0x57, 0xd4, 0x81, 0x2a, 0xfd, 0x48, 0xfd, 0x54, 0xd2, 0x00, 0x3f, 0xd3, + 0x54, 0x71, 0x47, 0x2f, 0x61, 0xcf, 0x27, 0x51, 0xf4, 0x40, 0xfc, 0xa5, 0x37, 0x17, 0x3c, 0xf6, + 0xd4, 0xfb, 0xa6, 0x02, 0x23, 0xad, 0x43, 0x39, 0x77, 0x29, 0x78, 0x7c, 0xaf, 0x19, 0xf4, 0x06, + 0xb0, 0xa0, 0xc9, 0x8a, 0xb3, 0xc4, 0xd4, 0xc9, 0x53, 0x59, 0x94, 0xb1, 0xab, 0xcb, 0x68, 0xe7, + 0xfc, 0xcc, 0xd0, 0x79, 0x25, 0x2f, 0x61, 0x2f, 0x9f, 0x1e, 0x2f, 0x64, 0x89, 0x24, 0xea, 0x55, + 0x85, 0x01, 0xde, 0xd3, 0xe5, 0xa0, 0x9c, 0x73, 0x32, 0xca, 0x09, 0xd0, 0xb7, 0xd0, 0x28, 0x22, + 0xf4, 0x6c, 0xec, 0x6b, 0xe9, 0x4e, 0x0e, 0xea, 0xf9, 0x38, 0x80, 0x4a, 0xde, 0x98, 0xb6, 0xa6, + 0xcb, 0xd2, 0x74, 0xe4, 0x47, 0x38, 0x10, 0x94, 0x24, 0x9c, 0x79, 0x73, 0x2e, 0xbc, 0x90, 0xf9, + 0x3c, 0x5e, 0x45, 0x54, 0x3d, 0x28, 0x7c, 0xa0, 0x85, 0xfb, 0x86, 0xbe, 0xe4, 0xc2, 0x79, 0x44, + 0xa2, 0xd7, 0xd0, 0x2e, 0x7a, 0x42, 0xe6, 0x92, 0x8a, 0xa2, 0x3e, 0xac, 0x3f, 0x69, 0xd1, 0xb1, + 0xa1, 0x22, 0xf3, 0xea, 0x8e, 0xa0, 0x66, 0x7a, 0xa7, 0x12, 0x39, 0x34, 0x33, 0x6c, 0x00, 0x27, + 0x40, 0x97, 0x50, 0xe7, 0xa9, 0x2c, 0xc6, 0xb1, 0xa3, 0xc7, 0xf1, 0xec, 0xcb, 0xe3, 0x38, 0xd1, + 0xc2, 0x4f, 0xf3, 0x08, 0xbc, 0x00, 0xd0, 0xc5, 0xe3, 0x86, 0x90, 0x64, 0x89, 0x8f, 0xba, 0xa5, + 0x5e, 0xfd, 0xfc, 0xe4, 0x33, 0xa7, 0xfb, 0xbc, 0x43, 0x24, 0x59, 0x3e, 0xea, 0x97, 0x5a, 0x36, + 0x6d, 0x28, 0x07, 0x3c, 0x26, 0x21, 0xc3, 0xc7, 0xa6, 0x5d, 0xe6, 0x86, 0x7e, 0x82, 0x86, 0xd9, + 0x18, 0x31, 0x4d, 0x12, 0xb2, 0xa0, 0xf8, 0x44, 0x7b, 0xef, 0xf5, 0xcd, 0x02, 0xeb, 0xe7, 0x0b, + 0xac, 0x3f, 0x64, 0x6b, 0x77, 0x47, 0x4b, 0xaf, 0x8d, 0x12, 0xfd, 0x0c, 0xcd, 0xac, 0xbc, 0x3c, + 0xf6, 0xf9, 0xff, 0xc4, 0x36, 0x8c, 0x36, 0x0b, 0xee, 0xcc, 0xa0, 0xf9, 0xdf, 0x0d, 0xa4, 0xb6, + 0xc4, 0x92, 0xae, 0xb3, 0x45, 0xa8, 0x8e, 0xe8, 0x07, 0xd8, 0xfe, 0x8b, 0x44, 0x29, 0xd5, 0x2b, + 0xb0, 0x7e, 0xde, 0xfe, 0xcc, 0xf7, 0x37, 0xc5, 0xba, 0x46, 0xf4, 0x76, 0xe3, 0x4d, 0xa9, 0x73, + 0x07, 0xad, 0x27, 0x8d, 0xfc, 0x1a, 0xb6, 0xa7, 0x7f, 0x97, 0xa0, 0x6c, 0x36, 0x2d, 0x6a, 0x41, + 0xdd, 0xb9, 0xf1, 0x6e, 0xdd, 0xc9, 0x2f, 0xae, 0x3d, 0x9d, 0x5a, 0xdf, 0xa0, 0x1d, 0xa8, 0x8e, + 0x86, 0x37, 0x23, 0xfb, 0xca, 0x1e, 0x5b, 0x25, 0x04, 0x50, 0xbe, 0x1c, 0x3a, 0xea, 0xbc, 0x81, + 0x9e, 0x43, 0xc7, 0x9c, 0xbd, 0x7b, 0x67, 0xf6, 0xce, 0x9b, 0xd9, 0xee, 0xb5, 0x73, 0x33, 0xbc, + 0xf2, 0x6c, 0xd7, 0x9d, 0xb8, 0xd6, 0x26, 0x6a, 0x40, 0x6d, 0x34, 0xb9, 0xbe, 0xbd, 0xb2, 0x67, + 0xf6, 0xd8, 0xda, 0x42, 0x87, 0xb0, 0x5f, 0x5c, 0x4d, 0x84, 0x16, 0x4e, 0xad, 0x6d, 0xa5, 0x9c, + 0x8e, 0xde, 0xd9, 0xe3, 0x3b, 0x65, 0x5c, 0x56, 0xd7, 0x99, 0x73, 0x6d, 0x8f, 0xbd, 0xc9, 0xdd, + 0xcc, 0xaa, 0xa0, 0x5d, 0x68, 0xb9, 0xf6, 0x70, 0xfc, 0xbb, 0x77, 0x39, 0x71, 0x3d, 0xd7, 0x76, + 0xef, 0x6e, 0xac, 0x2a, 0xaa, 0x43, 0x65, 0xfa, 0xab, 0x73, 0x7b, 0x6b, 0x8f, 0xad, 0xda, 0x05, + 0x81, 0x23, 0x9f, 0xc7, 0x7d, 0x46, 0xe5, 0x3c, 0x0a, 0x3f, 0x3e, 0x7d, 0x36, 0x17, 0x65, 0xf5, + 0x42, 0x6e, 0x1f, 0xfe, 0x78, 0xbb, 0x08, 0xe5, 0xfb, 0xf4, 0xa1, 0xef, 0xf3, 0x78, 0x90, 0x69, + 0x07, 0x85, 0x76, 0xe0, 0x47, 0x21, 0x65, 0x72, 0xb0, 0xe0, 0x0b, 0xb1, 0xf2, 0x1f, 0xe1, 0xfa, + 0x1f, 0xf2, 0xa1, 0xac, 0xad, 0x5e, 0xfd, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x54, 0xf9, 0x20, 0xc1, + 0x54, 0x07, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/taskdef.pb.go b/client/gogrpc/conductor/model/taskdef.pb.go index 073a3e7a16..8b07c0eb1e 100644 --- a/client/gogrpc/conductor/model/taskdef.pb.go +++ b/client/gogrpc/conductor/model/taskdef.pb.go @@ -39,7 +39,7 @@ func (x TaskDef_RetryLogic) String() string { return proto.EnumName(TaskDef_RetryLogic_name, int32(x)) } func (TaskDef_RetryLogic) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_9dd365e0d8e63269, []int{0, 0} + return fileDescriptor_taskdef_34514f3248f44dc2, []int{0, 0} } type TaskDef_TimeoutPolicy int32 @@ -65,7 +65,7 @@ func (x TaskDef_TimeoutPolicy) String() string { return proto.EnumName(TaskDef_TimeoutPolicy_name, int32(x)) } func (TaskDef_TimeoutPolicy) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_9dd365e0d8e63269, []int{0, 1} + return fileDescriptor_taskdef_34514f3248f44dc2, []int{0, 1} } type TaskDef struct { @@ -75,8 +75,8 @@ type TaskDef struct { TimeoutSeconds int64 `protobuf:"varint,4,opt,name=timeout_seconds,json=timeoutSeconds" json:"timeout_seconds,omitempty"` InputKeys []string `protobuf:"bytes,5,rep,name=input_keys,json=inputKeys" json:"input_keys,omitempty"` OutputKeys []string `protobuf:"bytes,6,rep,name=output_keys,json=outputKeys" json:"output_keys,omitempty"` - TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,enum=com.netflix.conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` - RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,enum=com.netflix.conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` + TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,enum=conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` + RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,enum=conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` RetryDelaySeconds int32 `protobuf:"varint,9,opt,name=retry_delay_seconds,json=retryDelaySeconds" json:"retry_delay_seconds,omitempty"` ResponseTimeoutSeconds int32 `protobuf:"varint,10,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` ConcurrentExecLimit int32 `protobuf:"varint,11,opt,name=concurrent_exec_limit,json=concurrentExecLimit" json:"concurrent_exec_limit,omitempty"` @@ -90,7 +90,7 @@ func (m *TaskDef) Reset() { *m = TaskDef{} } func (m *TaskDef) String() string { return proto.CompactTextString(m) } func (*TaskDef) ProtoMessage() {} func (*TaskDef) Descriptor() ([]byte, []int) { - return fileDescriptor_taskdef_9dd365e0d8e63269, []int{0} + return fileDescriptor_taskdef_34514f3248f44dc2, []int{0} } func (m *TaskDef) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskDef.Unmarshal(m, b) @@ -195,50 +195,50 @@ func (m *TaskDef) GetInputTemplate() map[string]*_struct.Value { } func init() { - proto.RegisterType((*TaskDef)(nil), "com.netflix.conductor.proto.TaskDef") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.TaskDef.InputTemplateEntry") - proto.RegisterEnum("com.netflix.conductor.proto.TaskDef_RetryLogic", TaskDef_RetryLogic_name, TaskDef_RetryLogic_value) - proto.RegisterEnum("com.netflix.conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) -} - -func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_9dd365e0d8e63269) } - -var fileDescriptor_taskdef_9dd365e0d8e63269 = []byte{ - // 566 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x93, 0x4f, 0x6f, 0xd3, 0x40, - 0x10, 0xc5, 0xeb, 0xa6, 0x69, 0xc9, 0x84, 0xa6, 0x66, 0x23, 0x8a, 0x55, 0x40, 0x58, 0xbd, 0xe0, - 0x03, 0xb2, 0x51, 0x38, 0x50, 0x95, 0x53, 0xff, 0x38, 0x28, 0x6a, 0x68, 0x22, 0x63, 0xa0, 0xe5, - 0x80, 0xe5, 0x6c, 0x26, 0x66, 0x15, 0xdb, 0x6b, 0xd9, 0x6b, 0x54, 0x7f, 0x38, 0xbe, 0x1b, 0xda, - 0xb5, 0xd3, 0xa6, 0x20, 0xa1, 0xde, 0x76, 0xdf, 0x9b, 0x37, 0xc9, 0xfc, 0x76, 0x0c, 0xfd, 0x84, - 0xcf, 0x31, 0x76, 0x44, 0x58, 0x2c, 0xe7, 0xb8, 0xb0, 0xb3, 0x9c, 0x0b, 0x4e, 0x9e, 0x53, 0x9e, - 0xd8, 0x29, 0x8a, 0x45, 0xcc, 0x6e, 0x6c, 0xca, 0xd3, 0x79, 0x49, 0x05, 0xcf, 0x6b, 0xf3, 0xe0, - 0x45, 0xc4, 0x79, 0x14, 0xa3, 0xa3, 0x6e, 0xb3, 0x72, 0xe1, 0x14, 0x22, 0x2f, 0xa9, 0xa8, 0xdd, - 0xc3, 0xdf, 0xdb, 0xb0, 0xe3, 0x87, 0xc5, 0xf2, 0x1c, 0x17, 0x84, 0xc0, 0x56, 0x1a, 0x26, 0x68, - 0x68, 0xa6, 0x66, 0x75, 0x3c, 0x75, 0x26, 0x26, 0x74, 0xe7, 0x58, 0xd0, 0x9c, 0x65, 0x82, 0xf1, - 0xd4, 0xd8, 0x54, 0xd6, 0xba, 0x44, 0x5e, 0x41, 0x37, 0x47, 0x91, 0x57, 0x01, 0xe5, 0x65, 0x2a, - 0x8c, 0x96, 0xa9, 0x59, 0x6d, 0x0f, 0x94, 0x74, 0x26, 0x15, 0xf2, 0x1a, 0xf6, 0x04, 0x4b, 0x90, - 0x97, 0x22, 0x28, 0x50, 0xfe, 0xbb, 0xc2, 0xd8, 0x32, 0x35, 0xab, 0xe5, 0xf5, 0x1a, 0xf9, 0x73, - 0xad, 0x92, 0x97, 0x00, 0x2c, 0xcd, 0x4a, 0x11, 0x2c, 0xb1, 0x2a, 0x8c, 0xb6, 0xd9, 0xb2, 0x3a, - 0x5e, 0x47, 0x29, 0x17, 0x58, 0x15, 0xf2, 0x87, 0x78, 0x29, 0x6e, 0xfd, 0x6d, 0xe5, 0x43, 0x2d, - 0xa9, 0x82, 0x6b, 0x58, 0x75, 0x0c, 0x32, 0x1e, 0x33, 0x5a, 0x19, 0x3b, 0xa6, 0x66, 0xf5, 0x06, - 0x03, 0xfb, 0x3f, 0x7c, 0xec, 0x66, 0x7a, 0xdb, 0xaf, 0xa3, 0x53, 0x95, 0xf4, 0x76, 0xc5, 0xfa, - 0x95, 0x4c, 0x57, 0x43, 0xc6, 0x3c, 0x62, 0xd4, 0x78, 0xa4, 0xfa, 0x3a, 0x0f, 0xea, 0xeb, 0xc9, - 0xdc, 0x58, 0xc6, 0x1a, 0x2a, 0xea, 0x4c, 0x6c, 0xe8, 0xd7, 0x1d, 0xe7, 0x18, 0x87, 0xd5, 0x2d, - 0x99, 0x8e, 0xc2, 0xf7, 0x44, 0x59, 0xe7, 0xd2, 0x59, 0xc1, 0x39, 0x02, 0x23, 0xc7, 0x22, 0xe3, - 0x69, 0x81, 0xc1, 0xdf, 0x38, 0x41, 0x85, 0xf6, 0x57, 0xbe, 0x7f, 0x1f, 0xeb, 0x00, 0x9e, 0x52, - 0x9e, 0xd2, 0x32, 0xcf, 0x31, 0x15, 0x01, 0xde, 0x20, 0x0d, 0x62, 0x96, 0x30, 0x61, 0x74, 0x55, - 0xac, 0x7f, 0x67, 0xba, 0x37, 0x48, 0xc7, 0xd2, 0x22, 0x3f, 0xa0, 0x57, 0x3f, 0x85, 0xc0, 0x24, - 0x8b, 0x43, 0x81, 0xc6, 0x63, 0xb3, 0x65, 0x75, 0x07, 0xef, 0x1f, 0x34, 0xf2, 0x48, 0x46, 0xfd, - 0x26, 0xe9, 0xa6, 0x22, 0xaf, 0xbc, 0x5d, 0xb6, 0xae, 0x1d, 0x5c, 0x01, 0xf9, 0xb7, 0x88, 0xe8, - 0xd0, 0x5a, 0x62, 0xd5, 0xec, 0x9f, 0x3c, 0x92, 0x37, 0xd0, 0xfe, 0x15, 0xc6, 0x25, 0xaa, 0xc5, - 0xeb, 0x0e, 0xf6, 0xed, 0x7a, 0x99, 0xed, 0xd5, 0x32, 0xdb, 0x5f, 0xa5, 0xeb, 0xd5, 0x45, 0xc7, - 0x9b, 0x47, 0xda, 0xe1, 0x5b, 0x80, 0x3b, 0xe2, 0xa4, 0x03, 0xed, 0xe1, 0xe8, 0xca, 0x3d, 0xd7, - 0x37, 0xc8, 0x33, 0xe8, 0xbb, 0x57, 0xd3, 0xc9, 0xa5, 0x7b, 0xe9, 0x8f, 0x4e, 0xc6, 0xc1, 0xe9, - 0xc9, 0xd9, 0xc5, 0x64, 0x38, 0xd4, 0xb5, 0xc3, 0x0f, 0xb0, 0x7b, 0xef, 0xed, 0x65, 0xc8, 0x73, - 0x7d, 0xef, 0x5a, 0xdf, 0x20, 0x7b, 0xd0, 0xf5, 0x47, 0x9f, 0xdc, 0x60, 0xf2, 0xc5, 0x0f, 0xbe, - 0x0d, 0x75, 0x8d, 0xf4, 0x00, 0x4e, 0xc6, 0xae, 0xe7, 0x07, 0x93, 0xcb, 0xf1, 0xb5, 0xbe, 0x79, - 0xfa, 0xf1, 0xb4, 0xd3, 0x4c, 0x3d, 0x9d, 0x7d, 0x3f, 0x8e, 0x98, 0xf8, 0x59, 0xce, 0x24, 0x23, - 0xa7, 0x61, 0xe4, 0xdc, 0x32, 0x72, 0x68, 0xcc, 0x30, 0x15, 0x4e, 0xc4, 0xa3, 0x3c, 0xa3, 0x6b, - 0xba, 0xfa, 0xa2, 0x67, 0xdb, 0x6a, 0xa4, 0x77, 0x7f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xe5, 0xb9, - 0x8e, 0x83, 0xe1, 0x03, 0x00, 0x00, + proto.RegisterType((*TaskDef)(nil), "conductor.proto.TaskDef") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskDef.InputTemplateEntry") + proto.RegisterEnum("conductor.proto.TaskDef_RetryLogic", TaskDef_RetryLogic_name, TaskDef_RetryLogic_value) + proto.RegisterEnum("conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) +} + +func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_34514f3248f44dc2) } + +var fileDescriptor_taskdef_34514f3248f44dc2 = []byte{ + // 568 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x93, 0x61, 0x4f, 0x9b, 0x5e, + 0x14, 0xc6, 0xc5, 0x5a, 0xfd, 0xf7, 0xf4, 0x6f, 0xed, 0x6e, 0x33, 0x47, 0xdc, 0x96, 0x11, 0x97, + 0x6c, 0x24, 0x5b, 0x60, 0xe9, 0xde, 0x18, 0xf7, 0x4a, 0x2d, 0x4d, 0x1a, 0xab, 0x35, 0x77, 0x6c, + 0xd3, 0xbd, 0x21, 0xf4, 0x72, 0xca, 0x6e, 0x0a, 0x5c, 0x02, 0x97, 0x45, 0x3e, 0xca, 0xbe, 0xed, + 0xc2, 0x85, 0x6a, 0x75, 0xf1, 0xdd, 0xbd, 0xcf, 0xf3, 0x3b, 0x07, 0xce, 0xc3, 0x01, 0x06, 0xb1, + 0x08, 0x30, 0xb2, 0xa5, 0x9f, 0x2f, 0x03, 0x5c, 0x58, 0x69, 0x26, 0xa4, 0x20, 0x7b, 0x4c, 0x24, + 0x41, 0xc1, 0xa4, 0xc8, 0x6a, 0xe1, 0xe0, 0x55, 0x28, 0x44, 0x18, 0xa1, 0xad, 0x6e, 0xf3, 0x62, + 0x61, 0xe7, 0x32, 0x2b, 0x98, 0xac, 0xdd, 0xc3, 0x3f, 0xdb, 0xb0, 0xe3, 0xfa, 0xf9, 0x72, 0x84, + 0x0b, 0x42, 0x60, 0x2b, 0xf1, 0x63, 0xd4, 0x35, 0x43, 0x33, 0x3b, 0x54, 0x9d, 0x89, 0x01, 0xdd, + 0x00, 0x73, 0x96, 0xf1, 0x54, 0x72, 0x91, 0xe8, 0x9b, 0xca, 0x5a, 0x97, 0xc8, 0x1b, 0xe8, 0x66, + 0x28, 0xb3, 0xd2, 0x63, 0xa2, 0x48, 0xa4, 0xde, 0x32, 0x34, 0xb3, 0x4d, 0x41, 0x49, 0x67, 0x95, + 0x42, 0xde, 0xc3, 0x9e, 0xe4, 0x31, 0x8a, 0x42, 0x7a, 0x39, 0x56, 0x6f, 0x97, 0xeb, 0x5b, 0x86, + 0x66, 0xb6, 0x68, 0xaf, 0x91, 0xbf, 0xd6, 0x2a, 0x79, 0x0d, 0xc0, 0x93, 0xb4, 0x90, 0xde, 0x12, + 0xcb, 0x5c, 0x6f, 0x1b, 0x2d, 0xb3, 0x43, 0x3b, 0x4a, 0x39, 0xc7, 0x32, 0xaf, 0x1e, 0x24, 0x0a, + 0x79, 0xe7, 0x6f, 0x2b, 0x1f, 0x6a, 0x49, 0x01, 0x17, 0xb0, 0xea, 0xe8, 0xa5, 0x22, 0xe2, 0xac, + 0xd4, 0x77, 0x0c, 0xcd, 0xec, 0x0d, 0xdf, 0x59, 0x8f, 0x32, 0xb1, 0x9a, 0x89, 0x2d, 0xb7, 0xc6, + 0xaf, 0x14, 0x4d, 0x77, 0xe5, 0xfa, 0x95, 0x8c, 0x56, 0x83, 0x45, 0x22, 0xe4, 0x4c, 0xff, 0x4f, + 0xf5, 0x7a, 0xfb, 0x64, 0x2f, 0x5a, 0xb1, 0xd3, 0x0a, 0x6d, 0xa6, 0x57, 0x67, 0x62, 0xc1, 0xa0, + 0xee, 0x12, 0x60, 0xe4, 0x97, 0x77, 0x09, 0x74, 0x54, 0x4c, 0xcf, 0x94, 0x35, 0xaa, 0x9c, 0x55, + 0x08, 0x47, 0xa0, 0x67, 0x98, 0xa7, 0x22, 0xc9, 0xd1, 0x7b, 0x1c, 0x1b, 0xa8, 0xa2, 0xfd, 0x95, + 0xef, 0x3e, 0x8c, 0x6f, 0x08, 0xcf, 0x99, 0x48, 0x58, 0x91, 0x65, 0x98, 0x48, 0x0f, 0x6f, 0x91, + 0x79, 0x11, 0x8f, 0xb9, 0xd4, 0xbb, 0xaa, 0x6c, 0x70, 0x6f, 0x3a, 0xb7, 0xc8, 0xa6, 0x95, 0x45, + 0x28, 0xf4, 0xea, 0xc8, 0x25, 0xc6, 0x69, 0xe4, 0x4b, 0xd4, 0xff, 0x37, 0x5a, 0x66, 0x77, 0xf8, + 0xe1, 0xc9, 0x31, 0x27, 0x15, 0xee, 0x36, 0xb4, 0x93, 0xc8, 0xac, 0xa4, 0xbb, 0x7c, 0x5d, 0x3b, + 0xb8, 0x06, 0xf2, 0x2f, 0x44, 0xfa, 0xd0, 0x5a, 0x62, 0xd9, 0xec, 0x56, 0x75, 0x24, 0x1f, 0xa1, + 0xfd, 0xdb, 0x8f, 0x0a, 0x54, 0x4b, 0xd5, 0x1d, 0xee, 0x5b, 0xf5, 0xa2, 0x5a, 0xab, 0x45, 0xb5, + 0xbe, 0x57, 0x2e, 0xad, 0xa1, 0xe3, 0xcd, 0x23, 0xed, 0xf0, 0x13, 0xc0, 0x7d, 0xca, 0xa4, 0x03, + 0xed, 0xf1, 0xe4, 0xda, 0x19, 0xf5, 0x37, 0xc8, 0x0b, 0x18, 0x38, 0xd7, 0x57, 0xb3, 0x4b, 0xe7, + 0xd2, 0x9d, 0x9c, 0x4c, 0xbd, 0xd3, 0x93, 0xb3, 0xf3, 0xd9, 0x78, 0xdc, 0xd7, 0x0e, 0xbf, 0xc0, + 0xee, 0x83, 0x6f, 0x5c, 0x15, 0x51, 0xc7, 0xa5, 0x37, 0xfd, 0x0d, 0xb2, 0x07, 0x5d, 0x77, 0x72, + 0xe1, 0x78, 0xb3, 0x6f, 0xae, 0xf7, 0x63, 0xdc, 0xd7, 0x48, 0x0f, 0xe0, 0x64, 0xea, 0x50, 0xd7, + 0x9b, 0x5d, 0x4e, 0x6f, 0xfa, 0x9b, 0xa7, 0x01, 0xbc, 0x64, 0x22, 0xb6, 0x12, 0x94, 0x8b, 0x88, + 0xdf, 0x3e, 0x4e, 0xe4, 0xb4, 0xd3, 0x44, 0x72, 0x35, 0xff, 0x79, 0x1c, 0x72, 0xf9, 0xab, 0x98, + 0x5b, 0x4c, 0xc4, 0x76, 0x83, 0xdb, 0x77, 0xb8, 0xcd, 0x22, 0x8e, 0x89, 0xb4, 0x43, 0x11, 0x66, + 0x29, 0x5b, 0xd3, 0xd5, 0xef, 0x3b, 0xdf, 0x56, 0xdd, 0x3e, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, + 0xb1, 0xb3, 0xea, 0x13, 0xce, 0x03, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/taskexeclog.pb.go b/client/gogrpc/conductor/model/taskexeclog.pb.go index 2761ddd081..d62a8be72b 100644 --- a/client/gogrpc/conductor/model/taskexeclog.pb.go +++ b/client/gogrpc/conductor/model/taskexeclog.pb.go @@ -31,7 +31,7 @@ func (m *TaskExecLog) Reset() { *m = TaskExecLog{} } func (m *TaskExecLog) String() string { return proto.CompactTextString(m) } func (*TaskExecLog) ProtoMessage() {} func (*TaskExecLog) Descriptor() ([]byte, []int) { - return fileDescriptor_taskexeclog_fcbee8d85ec58fef, []int{0} + return fileDescriptor_taskexeclog_e9c8274b44d54689, []int{0} } func (m *TaskExecLog) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskExecLog.Unmarshal(m, b) @@ -73,26 +73,26 @@ func (m *TaskExecLog) GetCreatedTime() int64 { } func init() { - proto.RegisterType((*TaskExecLog)(nil), "com.netflix.conductor.proto.TaskExecLog") + proto.RegisterType((*TaskExecLog)(nil), "conductor.proto.TaskExecLog") } func init() { - proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_fcbee8d85ec58fef) + proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_e9c8274b44d54689) } -var fileDescriptor_taskexeclog_fcbee8d85ec58fef = []byte{ - // 201 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x4c, 0x8f, 0x31, 0x4b, 0xc5, 0x30, - 0x10, 0x80, 0xa9, 0x85, 0x27, 0xe6, 0x29, 0x48, 0x96, 0x57, 0x70, 0x79, 0x3a, 0x75, 0x4a, 0x06, - 0x37, 0xc7, 0x82, 0x83, 0xe8, 0x20, 0xa5, 0x93, 0x0e, 0xa5, 0xbd, 0x9c, 0x31, 0x34, 0xe9, 0x95, - 0xf4, 0x0a, 0xfd, 0xf9, 0xd2, 0x5a, 0x1e, 0xdd, 0xee, 0xbe, 0x83, 0x8f, 0xef, 0xc4, 0x29, 0x90, - 0x41, 0xaf, 0xb9, 0x19, 0x3b, 0x9c, 0x11, 0x3c, 0x59, 0x35, 0x44, 0x62, 0x92, 0x0f, 0x40, 0x41, - 0xf5, 0xc8, 0x3f, 0xde, 0xcd, 0x0a, 0xa8, 0x37, 0x13, 0x30, 0xc5, 0xff, 0xe3, 0xd3, 0xb7, 0x38, - 0x56, 0xcd, 0xd8, 0xbd, 0xce, 0x08, 0x1f, 0x64, 0xe5, 0xbd, 0x48, 0x3d, 0xd9, 0x2c, 0x39, 0x27, - 0xf9, 0x4d, 0xb9, 0x8c, 0xf2, 0x24, 0xae, 0x17, 0x65, 0xed, 0x4c, 0x76, 0xb5, 0xd2, 0xc3, 0xb2, - 0xbe, 0x19, 0xf9, 0x28, 0x6e, 0x21, 0x62, 0xc3, 0x68, 0x6a, 0x76, 0x01, 0xb3, 0xf4, 0x9c, 0xe4, - 0x69, 0x79, 0xdc, 0x58, 0xe5, 0x02, 0x16, 0xef, 0xc5, 0xdd, 0x4e, 0xfe, 0xd9, 0x7e, 0xbd, 0x58, - 0xc7, 0xbf, 0x53, 0xab, 0x80, 0x82, 0xde, 0x8a, 0xf4, 0xa5, 0x48, 0x83, 0x77, 0xd8, 0xb3, 0xb6, - 0x64, 0xe3, 0x00, 0x3b, 0xbe, 0x3e, 0xd6, 0x1e, 0xd6, 0xe0, 0xe7, 0xbf, 0x00, 0x00, 0x00, 0xff, - 0xff, 0xf0, 0xe3, 0x45, 0x74, 0xe8, 0x00, 0x00, 0x00, +var fileDescriptor_taskexeclog_e9c8274b44d54689 = []byte{ + // 205 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0x31, 0x4b, 0xc4, 0x40, + 0x10, 0x85, 0x89, 0x81, 0x13, 0xf7, 0x14, 0x65, 0x9b, 0x0b, 0xd8, 0x9c, 0x56, 0x57, 0xed, 0x16, + 0x76, 0x96, 0x07, 0x16, 0x82, 0x85, 0x84, 0x54, 0x5a, 0x84, 0x64, 0x76, 0xdc, 0x2c, 0xd9, 0xcd, + 0x84, 0xcd, 0x04, 0xf2, 0xf3, 0x25, 0x31, 0x48, 0xb8, 0x6e, 0xe6, 0x83, 0xf7, 0x3e, 0x9e, 0x38, + 0x04, 0x32, 0xe8, 0x35, 0x57, 0x43, 0x8b, 0x13, 0x82, 0x27, 0xab, 0xfa, 0x48, 0x4c, 0xf2, 0x1e, + 0xa8, 0x33, 0x23, 0x30, 0xc5, 0x3f, 0xf0, 0xfc, 0x2d, 0xf6, 0x45, 0x35, 0xb4, 0x6f, 0x13, 0xc2, + 0x07, 0x59, 0xf9, 0x20, 0x52, 0x4f, 0x36, 0x4b, 0x8e, 0xc9, 0xe9, 0x26, 0x9f, 0x4f, 0x79, 0x10, + 0xd7, 0x73, 0x4d, 0xe9, 0x4c, 0x76, 0xb5, 0xd0, 0xdd, 0xfc, 0xbe, 0x1b, 0xf9, 0x24, 0x6e, 0x21, + 0x62, 0xc5, 0x68, 0x4a, 0x76, 0x01, 0xb3, 0xf4, 0x98, 0x9c, 0xd2, 0x7c, 0xbf, 0xb2, 0xc2, 0x05, + 0x3c, 0x37, 0xe2, 0x11, 0x28, 0xa8, 0x0e, 0xf9, 0xc7, 0xbb, 0x49, 0x5d, 0xb8, 0xcf, 0x77, 0x1b, + 0xf3, 0x67, 0xfd, 0xf5, 0x6a, 0x1d, 0x37, 0x63, 0xad, 0x80, 0x82, 0x5e, 0x23, 0xfa, 0x3f, 0xa2, + 0xc1, 0x3b, 0xec, 0x58, 0x5b, 0xb2, 0xb1, 0x87, 0x0d, 0x5f, 0x96, 0xd6, 0xbb, 0xa5, 0xf1, 0xe5, + 0x37, 0x00, 0x00, 0xff, 0xff, 0x78, 0x61, 0x87, 0x8e, 0xf9, 0x00, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/taskresult.pb.go b/client/gogrpc/conductor/model/taskresult.pb.go index c4f12c1682..25846d54fa 100644 --- a/client/gogrpc/conductor/model/taskresult.pb.go +++ b/client/gogrpc/conductor/model/taskresult.pb.go @@ -6,6 +6,7 @@ package model // import "github.com/netflix/conductor/client/gogrpc/conductor/mo import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" +import any "github.com/golang/protobuf/ptypes/any" import _struct "github.com/golang/protobuf/ptypes/struct" // Reference imports to suppress errors if they are not otherwise used. @@ -22,30 +23,33 @@ const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type TaskResult_Status int32 const ( - TaskResult_IN_PROGRESS TaskResult_Status = 0 - TaskResult_FAILED TaskResult_Status = 1 - TaskResult_COMPLETED TaskResult_Status = 2 - TaskResult_SCHEDULED TaskResult_Status = 3 + TaskResult_IN_PROGRESS TaskResult_Status = 0 + TaskResult_FAILED TaskResult_Status = 1 + TaskResult_FAILED_WITH_TERMINAL_ERROR TaskResult_Status = 2 + TaskResult_COMPLETED TaskResult_Status = 3 + TaskResult_SCHEDULED TaskResult_Status = 4 ) var TaskResult_Status_name = map[int32]string{ 0: "IN_PROGRESS", 1: "FAILED", - 2: "COMPLETED", - 3: "SCHEDULED", + 2: "FAILED_WITH_TERMINAL_ERROR", + 3: "COMPLETED", + 4: "SCHEDULED", } var TaskResult_Status_value = map[string]int32{ - "IN_PROGRESS": 0, - "FAILED": 1, - "COMPLETED": 2, - "SCHEDULED": 3, + "IN_PROGRESS": 0, + "FAILED": 1, + "FAILED_WITH_TERMINAL_ERROR": 2, + "COMPLETED": 3, + "SCHEDULED": 4, } func (x TaskResult_Status) String() string { return proto.EnumName(TaskResult_Status_name, int32(x)) } func (TaskResult_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskresult_544d5e3612411ce1, []int{0, 0} + return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0, 0} } type TaskResult struct { @@ -54,8 +58,9 @@ type TaskResult struct { ReasonForIncompletion string `protobuf:"bytes,3,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` CallbackAfterSeconds int64 `protobuf:"varint,4,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` WorkerId string `protobuf:"bytes,5,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - Status TaskResult_Status `protobuf:"varint,6,opt,name=status,enum=com.netflix.conductor.proto.TaskResult_Status" json:"status,omitempty"` + Status TaskResult_Status `protobuf:"varint,6,opt,name=status,enum=conductor.proto.TaskResult_Status" json:"status,omitempty"` OutputData map[string]*_struct.Value `protobuf:"bytes,7,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + OutputMessage *any.Any `protobuf:"bytes,8,opt,name=output_message,json=outputMessage" json:"output_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -65,7 +70,7 @@ func (m *TaskResult) Reset() { *m = TaskResult{} } func (m *TaskResult) String() string { return proto.CompactTextString(m) } func (*TaskResult) ProtoMessage() {} func (*TaskResult) Descriptor() ([]byte, []int) { - return fileDescriptor_taskresult_544d5e3612411ce1, []int{0} + return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0} } func (m *TaskResult) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskResult.Unmarshal(m, b) @@ -134,43 +139,54 @@ func (m *TaskResult) GetOutputData() map[string]*_struct.Value { return nil } +func (m *TaskResult) GetOutputMessage() *any.Any { + if m != nil { + return m.OutputMessage + } + return nil +} + func init() { - proto.RegisterType((*TaskResult)(nil), "com.netflix.conductor.proto.TaskResult") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.TaskResult.OutputDataEntry") - proto.RegisterEnum("com.netflix.conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) -} - -func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_544d5e3612411ce1) } - -var fileDescriptor_taskresult_544d5e3612411ce1 = []byte{ - // 455 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x6f, 0xd3, 0x30, - 0x14, 0xc6, 0xc9, 0xba, 0x65, 0xf4, 0x15, 0x58, 0x65, 0x8d, 0x2e, 0xda, 0x38, 0x54, 0x3b, 0xf5, - 0x80, 0x1c, 0x54, 0x10, 0xa0, 0xdd, 0xb6, 0x36, 0x85, 0xa0, 0x41, 0xab, 0x64, 0x43, 0x88, 0x4b, - 0xe4, 0x38, 0x4e, 0x88, 0xe2, 0xc6, 0x95, 0xed, 0x30, 0xf6, 0xdf, 0xf0, 0xa7, 0x22, 0x3b, 0xe9, - 0x36, 0x71, 0x40, 0xdc, 0xf2, 0xde, 0xef, 0xbd, 0xa7, 0xef, 0xfb, 0x1c, 0x18, 0xad, 0x45, 0xc6, - 0xb8, 0xaf, 0x89, 0xaa, 0x24, 0x53, 0x0d, 0xd7, 0x78, 0x23, 0x85, 0x16, 0xe8, 0x84, 0x8a, 0x35, - 0xae, 0x99, 0xce, 0x79, 0xf9, 0x0b, 0x53, 0x51, 0x67, 0x0d, 0xd5, 0x42, 0xb6, 0xf0, 0xf8, 0x45, - 0x21, 0x44, 0xc1, 0x99, 0x6f, 0xab, 0xb4, 0xc9, 0x7d, 0xa5, 0x65, 0x43, 0xbb, 0xd5, 0xd3, 0xdf, - 0xbb, 0x00, 0x57, 0x44, 0x55, 0x91, 0xbd, 0x87, 0x5e, 0xc1, 0xe1, 0x8d, 0x90, 0x55, 0xce, 0xc5, - 0x4d, 0x52, 0xd6, 0x4a, 0x93, 0x9a, 0xb2, 0xa4, 0xcc, 0x3c, 0x67, 0xec, 0x4c, 0xfa, 0x11, 0xda, - 0xb2, 0xb0, 0x43, 0x61, 0x86, 0x8e, 0x60, 0xdf, 0xe8, 0x31, 0x43, 0x3b, 0x76, 0xc8, 0x35, 0x65, - 0x98, 0xa1, 0xb7, 0x70, 0x24, 0x19, 0x51, 0xa2, 0x4e, 0x72, 0x21, 0x93, 0xb2, 0xa6, 0x62, 0xbd, - 0xe1, 0x4c, 0x97, 0xa2, 0xf6, 0x7a, 0x76, 0xf0, 0x79, 0x8b, 0x17, 0x42, 0x86, 0x0f, 0x20, 0x7a, - 0x03, 0x23, 0x4a, 0x38, 0x4f, 0x09, 0xad, 0x12, 0x92, 0x6b, 0x26, 0x13, 0xc5, 0x8c, 0x27, 0xe5, - 0xed, 0x8e, 0x9d, 0x49, 0x2f, 0x3a, 0xdc, 0xd2, 0x73, 0x03, 0xe3, 0x96, 0xa1, 0x13, 0xe8, 0x1b, - 0x71, 0x4c, 0x1a, 0x21, 0x7b, 0xf6, 0xfe, 0xe3, 0xb6, 0x11, 0x66, 0x68, 0x01, 0xae, 0xd2, 0x44, - 0x37, 0xca, 0x73, 0xc7, 0xce, 0xe4, 0xd9, 0x14, 0xe3, 0x7f, 0x04, 0x86, 0xef, 0xe3, 0xc0, 0xb1, - 0xdd, 0x8a, 0xba, 0x6d, 0xf4, 0x0d, 0x06, 0xa2, 0xd1, 0x9b, 0x46, 0x27, 0x19, 0xd1, 0xc4, 0xdb, - 0x1f, 0xf7, 0x26, 0x83, 0xe9, 0xbb, 0xff, 0x3d, 0xb6, 0xb4, 0xab, 0x73, 0xa2, 0x49, 0x50, 0x6b, - 0x79, 0x1b, 0x81, 0xb8, 0x6b, 0x1c, 0x5f, 0xc3, 0xc1, 0x5f, 0x18, 0x0d, 0xa1, 0x57, 0xb1, 0xdb, - 0x2e, 0x79, 0xf3, 0x89, 0x5e, 0xc2, 0xde, 0x4f, 0xc2, 0x1b, 0x66, 0x83, 0x1e, 0x4c, 0x47, 0xb8, - 0x7d, 0x59, 0xbc, 0x7d, 0x59, 0xfc, 0xd5, 0xd0, 0xa8, 0x1d, 0x3a, 0xdb, 0x79, 0xef, 0x9c, 0xce, - 0xc0, 0x6d, 0x2d, 0xa0, 0x03, 0x18, 0x84, 0x5f, 0x92, 0x55, 0xb4, 0xfc, 0x10, 0x05, 0x71, 0x3c, - 0x7c, 0x84, 0x00, 0xdc, 0xc5, 0x79, 0x78, 0x19, 0xcc, 0x87, 0x0e, 0x7a, 0x0a, 0xfd, 0xd9, 0xf2, - 0xf3, 0xea, 0x32, 0xb8, 0x0a, 0xe6, 0xc3, 0x1d, 0x53, 0xc6, 0xb3, 0x8f, 0xc1, 0xfc, 0xda, 0xd0, - 0xde, 0xc5, 0xa7, 0x8b, 0x27, 0xf7, 0x2e, 0x56, 0xe9, 0xf7, 0xb3, 0xa2, 0xd4, 0x3f, 0x9a, 0xd4, - 0xd8, 0xf6, 0x3b, 0xdb, 0xfe, 0x9d, 0x6d, 0x9f, 0xf2, 0x92, 0xd5, 0xda, 0x2f, 0x44, 0x21, 0x37, - 0xf4, 0x41, 0xdf, 0xfe, 0xba, 0xa9, 0x6b, 0xb5, 0xbe, 0xfe, 0x13, 0x00, 0x00, 0xff, 0xff, 0xbe, - 0xcf, 0xb8, 0x26, 0xca, 0x02, 0x00, 0x00, + proto.RegisterType((*TaskResult)(nil), "conductor.proto.TaskResult") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskResult.OutputDataEntry") + proto.RegisterEnum("conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) +} + +func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_ccaec941f8ac2f31) } + +var fileDescriptor_taskresult_ccaec941f8ac2f31 = []byte{ + // 517 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xdf, 0x6e, 0xda, 0x30, + 0x14, 0xc6, 0x17, 0xa0, 0x69, 0x39, 0xac, 0x05, 0x59, 0x8c, 0x66, 0x74, 0x9a, 0x10, 0x57, 0x48, + 0x9b, 0x92, 0x89, 0x4d, 0xd3, 0xc4, 0xae, 0x68, 0x49, 0xd7, 0x48, 0x50, 0x90, 0xa1, 0x9b, 0xb4, + 0x9b, 0xc8, 0x38, 0x26, 0x8b, 0x08, 0x31, 0xb2, 0x9d, 0x75, 0x3c, 0xf0, 0xde, 0x63, 0x4a, 0x1c, + 0xda, 0x8a, 0x49, 0xbd, 0xf3, 0x39, 0xbf, 0xef, 0x7c, 0x3a, 0x7f, 0x64, 0x68, 0x6d, 0x78, 0xc0, + 0x62, 0x47, 0x11, 0xb9, 0x16, 0x4c, 0xa6, 0xb1, 0xb2, 0xb7, 0x82, 0x2b, 0x8e, 0xea, 0x94, 0x27, + 0x41, 0x4a, 0x15, 0x17, 0x3a, 0xd1, 0x7e, 0x13, 0x72, 0x1e, 0xc6, 0xcc, 0xc9, 0xa3, 0x65, 0xba, + 0x72, 0xa4, 0x12, 0x29, 0x2d, 0xe4, 0xed, 0xd7, 0x87, 0x94, 0x24, 0x3b, 0x8d, 0xba, 0x7f, 0x2b, + 0x00, 0x0b, 0x22, 0xd7, 0x38, 0xb7, 0x47, 0x1f, 0xa0, 0x79, 0xcf, 0xc5, 0x7a, 0x15, 0xf3, 0x7b, + 0x3f, 0x4a, 0xa4, 0x22, 0x09, 0x65, 0x7e, 0x14, 0x58, 0x46, 0xc7, 0xe8, 0x55, 0x31, 0xda, 0x33, + 0xaf, 0x40, 0x5e, 0x80, 0xce, 0xe1, 0x38, 0x6b, 0x2f, 0x13, 0x95, 0x72, 0x91, 0x99, 0x85, 0x5e, + 0x80, 0x3e, 0xc3, 0xb9, 0x60, 0x44, 0xf2, 0xc4, 0x5f, 0x71, 0xe1, 0x47, 0x09, 0xe5, 0x9b, 0x6d, + 0xcc, 0x54, 0xc4, 0x13, 0xab, 0x9c, 0x0b, 0x5f, 0x69, 0x7c, 0xcd, 0x85, 0xf7, 0x04, 0xa2, 0x4f, + 0xd0, 0xa2, 0x24, 0x8e, 0x97, 0x84, 0xae, 0x7d, 0xb2, 0x52, 0x4c, 0xf8, 0x92, 0x65, 0xe3, 0x4a, + 0xab, 0xd2, 0x31, 0x7a, 0x65, 0xdc, 0xdc, 0xd3, 0x61, 0x06, 0xe7, 0x9a, 0xa1, 0x0b, 0xa8, 0x66, + 0xcd, 0x31, 0x91, 0x35, 0x72, 0x94, 0xfb, 0x9f, 0xe8, 0x84, 0x17, 0xa0, 0x01, 0x98, 0x52, 0x11, + 0x95, 0x4a, 0xcb, 0xec, 0x18, 0xbd, 0xb3, 0x7e, 0xd7, 0x3e, 0xd8, 0x9f, 0xfd, 0xb8, 0x02, 0x7b, + 0x9e, 0x2b, 0x71, 0x51, 0x81, 0xc6, 0x50, 0xe3, 0xa9, 0xda, 0xa6, 0xca, 0x0f, 0x88, 0x22, 0xd6, + 0x71, 0xa7, 0xdc, 0xab, 0xf5, 0xdf, 0x3d, 0x67, 0x30, 0xcd, 0xe5, 0x23, 0xa2, 0x88, 0x9b, 0x28, + 0xb1, 0xc3, 0xc0, 0x1f, 0x12, 0xe8, 0x2b, 0x9c, 0x15, 0x6e, 0x1b, 0x26, 0x25, 0x09, 0x99, 0x75, + 0xd2, 0x31, 0x7a, 0xb5, 0x7e, 0xd3, 0xd6, 0x27, 0xb2, 0xf7, 0x27, 0xb2, 0x87, 0xc9, 0x0e, 0x9f, + 0x6a, 0xed, 0x44, 0x4b, 0xdb, 0x77, 0x50, 0x3f, 0xf0, 0x46, 0x0d, 0x28, 0xaf, 0xd9, 0xae, 0x38, + 0x4f, 0xf6, 0x44, 0xef, 0xe1, 0xe8, 0x37, 0x89, 0x53, 0x96, 0x5f, 0xa3, 0xd6, 0x6f, 0xfd, 0x67, + 0xfc, 0x3d, 0xa3, 0x58, 0x8b, 0x06, 0xa5, 0x2f, 0x46, 0x97, 0x82, 0xa9, 0x67, 0x46, 0x75, 0xa8, + 0x79, 0xb7, 0xfe, 0x0c, 0x4f, 0xbf, 0x61, 0x77, 0x3e, 0x6f, 0xbc, 0x40, 0x00, 0xe6, 0xf5, 0xd0, + 0x1b, 0xbb, 0xa3, 0x86, 0x81, 0xde, 0x42, 0x5b, 0xbf, 0xfd, 0x1f, 0xde, 0xe2, 0xc6, 0x5f, 0xb8, + 0x78, 0xe2, 0xdd, 0x0e, 0xc7, 0xbe, 0x8b, 0xf1, 0x14, 0x37, 0x4a, 0xe8, 0x14, 0xaa, 0x57, 0xd3, + 0xc9, 0x6c, 0xec, 0x2e, 0xdc, 0x51, 0xa3, 0x9c, 0x85, 0xf3, 0xab, 0x1b, 0x77, 0x74, 0x97, 0x55, + 0x57, 0x2e, 0x43, 0xb8, 0xa0, 0x7c, 0x63, 0x27, 0x4c, 0xad, 0xe2, 0xe8, 0xcf, 0xe1, 0xfa, 0x2e, + 0x5f, 0x3e, 0xee, 0x6f, 0xb6, 0xfc, 0x39, 0x08, 0x23, 0xf5, 0x2b, 0x5d, 0xda, 0x94, 0x6f, 0x9c, + 0xa2, 0xc2, 0x79, 0xa8, 0x70, 0x68, 0x1c, 0xb1, 0x44, 0x39, 0x21, 0x0f, 0xc5, 0x96, 0x3e, 0xc9, + 0xe7, 0x7f, 0x65, 0x69, 0xe6, 0x86, 0x1f, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xff, 0xd5, 0x82, + 0xee, 0x3b, 0x03, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/tasksummary.pb.go b/client/gogrpc/conductor/model/tasksummary.pb.go index 5bad0055bd..abdeaab07e 100644 --- a/client/gogrpc/conductor/model/tasksummary.pb.go +++ b/client/gogrpc/conductor/model/tasksummary.pb.go @@ -26,7 +26,7 @@ type TaskSummary struct { StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime" json:"start_time,omitempty"` UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - Status Task_Status `protobuf:"varint,8,opt,name=status,enum=com.netflix.conductor.proto.Task_Status" json:"status,omitempty"` + Status Task_Status `protobuf:"varint,8,opt,name=status,enum=conductor.proto.Task_Status" json:"status,omitempty"` ReasonForIncompletion string `protobuf:"bytes,9,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` ExecutionTime int64 `protobuf:"varint,10,opt,name=execution_time,json=executionTime" json:"execution_time,omitempty"` QueueWaitTime int64 `protobuf:"varint,11,opt,name=queue_wait_time,json=queueWaitTime" json:"queue_wait_time,omitempty"` @@ -44,7 +44,7 @@ func (m *TaskSummary) Reset() { *m = TaskSummary{} } func (m *TaskSummary) String() string { return proto.CompactTextString(m) } func (*TaskSummary) ProtoMessage() {} func (*TaskSummary) Descriptor() ([]byte, []int) { - return fileDescriptor_tasksummary_ab79a64782e5b735, []int{0} + return fileDescriptor_tasksummary_ab439d130c50da04, []int{0} } func (m *TaskSummary) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskSummary.Unmarshal(m, b) @@ -177,41 +177,41 @@ func (m *TaskSummary) GetTaskId() string { } func init() { - proto.RegisterType((*TaskSummary)(nil), "com.netflix.conductor.proto.TaskSummary") + proto.RegisterType((*TaskSummary)(nil), "conductor.proto.TaskSummary") } func init() { - proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ab79a64782e5b735) + proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ab439d130c50da04) } -var fileDescriptor_tasksummary_ab79a64782e5b735 = []byte{ +var fileDescriptor_tasksummary_ab439d130c50da04 = []byte{ // 446 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0x4f, 0x6b, 0x1b, 0x31, - 0x10, 0xc5, 0x71, 0x93, 0xf8, 0xcf, 0x38, 0xeb, 0x84, 0xa5, 0xad, 0xb7, 0x0d, 0x25, 0x26, 0xa5, - 0xc5, 0xa7, 0x5d, 0x68, 0xa1, 0x87, 0x9e, 0x4a, 0x28, 0x05, 0x53, 0x28, 0xc5, 0x31, 0x14, 0x7a, - 0x59, 0x64, 0x69, 0xd6, 0x11, 0x5e, 0x49, 0x5b, 0xad, 0x84, 0xe3, 0x0f, 0xd9, 0xef, 0x54, 0x34, - 0xda, 0xb8, 0x3e, 0xe5, 0xa8, 0xdf, 0x7b, 0x9a, 0x19, 0xbd, 0x11, 0x4c, 0x95, 0x11, 0x58, 0x17, - 0x8e, 0xb5, 0xdb, 0xd6, 0x2b, 0xc5, 0xec, 0x3e, 0x6f, 0xac, 0x71, 0x26, 0xbd, 0xe2, 0x46, 0xe5, - 0x1a, 0x5d, 0x55, 0xcb, 0x87, 0x9c, 0x1b, 0x2d, 0x3c, 0x77, 0xc6, 0x46, 0xf1, 0xf5, 0xe5, 0xff, - 0x5b, 0x91, 0xdc, 0xfc, 0x3d, 0x85, 0xf1, 0x8a, 0xb5, 0xdb, 0xbb, 0x58, 0x24, 0xbd, 0x86, 0xf1, - 0xce, 0xd8, 0x6d, 0x55, 0x9b, 0x5d, 0x29, 0x45, 0xd6, 0x9b, 0xf5, 0xe6, 0xa3, 0x25, 0x3c, 0xa2, - 0x85, 0x48, 0xdf, 0x42, 0x72, 0x30, 0xb8, 0x7d, 0x83, 0xd9, 0x33, 0xb2, 0x9c, 0x3f, 0xc2, 0xd5, - 0xbe, 0xc1, 0xf4, 0x1d, 0x4c, 0xb8, 0xb1, 0x16, 0x6b, 0xe6, 0xa4, 0xd1, 0xa1, 0xd0, 0x09, 0xb9, - 0x92, 0x23, 0xba, 0x10, 0xc1, 0xd6, 0xf2, 0x7b, 0x14, 0xbe, 0x46, 0x51, 0x3a, 0xa9, 0x30, 0x3b, - 0x8d, 0xb6, 0x03, 0x5d, 0x49, 0x85, 0xe9, 0x1b, 0x80, 0xd6, 0x31, 0xeb, 0xa2, 0xe5, 0x8c, 0x2c, - 0x23, 0x22, 0x24, 0x5f, 0xc3, 0xd8, 0x37, 0x82, 0x39, 0x8c, 0x7a, 0x3f, 0x8e, 0x1c, 0x11, 0x19, - 0x5e, 0xc1, 0x10, 0x75, 0xd7, 0x60, 0x40, 0xea, 0x00, 0x75, 0x2c, 0xfd, 0x05, 0xfa, 0xad, 0x63, - 0xce, 0xb7, 0xd9, 0x70, 0xd6, 0x9b, 0x4f, 0x3e, 0xcc, 0xf3, 0x27, 0xe2, 0xcb, 0x43, 0x50, 0xf9, - 0x1d, 0xf9, 0x97, 0xdd, 0xbd, 0xf4, 0x13, 0x4c, 0x2d, 0xb2, 0xd6, 0xe8, 0xb2, 0x32, 0xb6, 0x94, - 0x9a, 0x1b, 0xd5, 0xd4, 0x18, 0x1e, 0x98, 0x8d, 0xa8, 0xd7, 0x8b, 0x28, 0x7f, 0x33, 0x76, 0x71, - 0x24, 0x86, 0xb7, 0xe3, 0x03, 0x72, 0x4f, 0x01, 0xd1, 0x68, 0x30, 0xeb, 0xcd, 0x4f, 0x96, 0xc9, - 0x81, 0xd2, 0x80, 0xef, 0xe1, 0xe2, 0x8f, 0x47, 0x8f, 0xe5, 0x8e, 0xc9, 0x2e, 0x80, 0x71, 0xf4, - 0x11, 0xfe, 0xc5, 0x64, 0x0c, 0xe1, 0x06, 0x92, 0xb0, 0xd5, 0x52, 0x60, 0x55, 0x6a, 0xa6, 0x30, - 0x3b, 0xa7, 0xe6, 0xe3, 0x00, 0xbf, 0x62, 0xf5, 0x83, 0x29, 0x4c, 0xaf, 0x60, 0x44, 0x1e, 0x5a, - 0x5b, 0x42, 0xfa, 0x30, 0x00, 0x5a, 0xd9, 0x73, 0x38, 0x93, 0xba, 0xf1, 0x2e, 0x9b, 0x90, 0x10, - 0x0f, 0xe9, 0x4b, 0xe8, 0x1b, 0xef, 0x02, 0xbe, 0x20, 0xdc, 0x9d, 0xd2, 0x29, 0x0c, 0xa8, 0x94, - 0x14, 0xd9, 0x65, 0x14, 0xc2, 0x71, 0x21, 0x6e, 0xbf, 0xdf, 0x26, 0x47, 0xdf, 0xe9, 0xe7, 0xfa, - 0xf7, 0xe7, 0x8d, 0x74, 0xf7, 0x7e, 0x1d, 0x72, 0x2d, 0xba, 0x5c, 0x8b, 0x43, 0xae, 0x05, 0xaf, - 0x25, 0x6a, 0x57, 0x6c, 0xcc, 0xc6, 0x36, 0xfc, 0x88, 0xd3, 0x3f, 0x5d, 0xf7, 0x29, 0xf6, 0x8f, - 0xff, 0x02, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x7f, 0x0a, 0x96, 0xed, 0x02, 0x00, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xcf, 0x8b, 0x13, 0x31, + 0x14, 0xc7, 0xa9, 0xbb, 0xdb, 0x1f, 0xaf, 0x3b, 0xed, 0x32, 0xa8, 0x1d, 0x5d, 0x65, 0xcb, 0x8a, + 0xd2, 0xd3, 0x14, 0x54, 0x3c, 0x78, 0x5c, 0x44, 0xe8, 0x45, 0xa4, 0x5b, 0x10, 0xbc, 0x0c, 0x69, + 0xf2, 0xa6, 0x0d, 0x9d, 0x24, 0x63, 0x26, 0xa1, 0xdb, 0x3f, 0xcf, 0xff, 0x4c, 0xf2, 0x32, 0x5b, + 0xcb, 0x1e, 0xf3, 0xf9, 0x7e, 0xf2, 0x92, 0xf7, 0x12, 0x98, 0x28, 0x23, 0xb0, 0x9a, 0x3b, 0xd6, + 0xec, 0x1a, 0xaf, 0x14, 0xb3, 0x87, 0xbc, 0xb6, 0xc6, 0x99, 0x74, 0xcc, 0x8d, 0x16, 0x9e, 0x3b, + 0x63, 0x23, 0x78, 0x7d, 0xf5, 0xdf, 0x8c, 0xe4, 0xf6, 0xef, 0x39, 0x0c, 0x57, 0xac, 0xd9, 0xdd, + 0xc7, 0x8d, 0xe9, 0x0d, 0x0c, 0xf7, 0xc6, 0xee, 0xca, 0xca, 0xec, 0x0b, 0x29, 0xb2, 0xce, 0xb4, + 0x33, 0x1b, 0x2c, 0xe1, 0x11, 0x2d, 0x44, 0xfa, 0x0e, 0x92, 0xa3, 0xe0, 0x0e, 0x35, 0x66, 0xcf, + 0x48, 0xb9, 0x7c, 0x84, 0xab, 0x43, 0x8d, 0xe9, 0x7b, 0x18, 0x71, 0x63, 0x2d, 0x56, 0xcc, 0x49, + 0xa3, 0x43, 0xa1, 0x33, 0xb2, 0x92, 0x13, 0xba, 0x10, 0x41, 0x6b, 0xf8, 0x16, 0x85, 0xaf, 0x50, + 0x14, 0x4e, 0x2a, 0xcc, 0xce, 0xa3, 0x76, 0xa4, 0x2b, 0xa9, 0x30, 0x7d, 0x0b, 0xd0, 0x38, 0x66, + 0x5d, 0x54, 0x2e, 0x48, 0x19, 0x10, 0xa1, 0xf8, 0x06, 0x86, 0xbe, 0x16, 0xcc, 0x61, 0xcc, 0xbb, + 0xf1, 0xca, 0x11, 0x91, 0xf0, 0x0a, 0xfa, 0xa8, 0xdb, 0x03, 0x7a, 0x94, 0xf6, 0x50, 0xc7, 0xd2, + 0x9f, 0xa1, 0xdb, 0x38, 0xe6, 0x7c, 0x93, 0xf5, 0xa7, 0x9d, 0xd9, 0xe8, 0xe3, 0x9b, 0xfc, 0xc9, + 0xc8, 0xf2, 0x30, 0x9c, 0xfc, 0x9e, 0x9c, 0x65, 0xeb, 0xa6, 0x5f, 0x60, 0x62, 0x91, 0x35, 0x46, + 0x17, 0xa5, 0xb1, 0x85, 0xd4, 0xdc, 0xa8, 0xba, 0xc2, 0xd0, 0x54, 0x36, 0xa0, 0xfa, 0x2f, 0x62, + 0xfc, 0xdd, 0xd8, 0xc5, 0x49, 0x18, 0xfa, 0xc5, 0x07, 0xe4, 0x9e, 0x86, 0x42, 0xd7, 0x81, 0x69, + 0x67, 0x76, 0xb6, 0x4c, 0x8e, 0x94, 0x2e, 0xf5, 0x01, 0xc6, 0x7f, 0x3c, 0x7a, 0x2c, 0xf6, 0x4c, + 0xb6, 0x4d, 0x0f, 0xa3, 0x47, 0xf8, 0x17, 0x93, 0xb1, 0xf1, 0x5b, 0x48, 0xc2, 0x4b, 0x16, 0x02, + 0xcb, 0x42, 0x33, 0x85, 0xd9, 0x25, 0x1d, 0x3e, 0x0c, 0xf0, 0x1b, 0x96, 0x3f, 0x98, 0xc2, 0xf4, + 0x1a, 0x06, 0xe4, 0xd0, 0x53, 0x25, 0x94, 0xf7, 0x03, 0xa0, 0x67, 0x7a, 0x0e, 0x17, 0x52, 0xd7, + 0xde, 0x65, 0x23, 0x0a, 0xe2, 0x22, 0x7d, 0x09, 0x5d, 0xe3, 0x5d, 0xc0, 0x63, 0xc2, 0xed, 0x2a, + 0x9d, 0x40, 0x8f, 0x4a, 0x49, 0x91, 0x5d, 0xc5, 0x20, 0x2c, 0x17, 0xe2, 0x6e, 0x0b, 0xd7, 0xdc, + 0xa8, 0x5c, 0xa3, 0x2b, 0x2b, 0xf9, 0xf0, 0x74, 0x82, 0x77, 0xc9, 0xc9, 0xff, 0xfa, 0xb9, 0xfe, + 0xfd, 0x75, 0x23, 0xdd, 0xd6, 0xaf, 0x73, 0x6e, 0xd4, 0xbc, 0xdd, 0x32, 0x3f, 0x6e, 0x99, 0xf3, + 0x4a, 0xa2, 0x76, 0xf3, 0x8d, 0xd9, 0xd8, 0x9a, 0x9f, 0x70, 0xfa, 0xb8, 0xeb, 0x2e, 0x55, 0xfc, + 0xf4, 0x2f, 0x00, 0x00, 0xff, 0xff, 0x32, 0xdb, 0x34, 0x28, 0xf2, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/workflow.pb.go b/client/gogrpc/conductor/model/workflow.pb.go index c2e30ab7b2..9588282808 100644 --- a/client/gogrpc/conductor/model/workflow.pb.go +++ b/client/gogrpc/conductor/model/workflow.pb.go @@ -51,11 +51,11 @@ func (x Workflow_WorkflowStatus) String() string { return proto.EnumName(Workflow_WorkflowStatus_name, int32(x)) } func (Workflow_WorkflowStatus) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflow_827faed39aedc44b, []int{0, 0} + return fileDescriptor_workflow_ccfaf06783966538, []int{0, 0} } type Workflow struct { - Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,enum=com.netflix.conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` EndTime int64 `protobuf:"varint,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"` WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` ParentWorkflowId string `protobuf:"bytes,4,opt,name=parent_workflow_id,json=parentWorkflowId" json:"parent_workflow_id,omitempty"` @@ -81,7 +81,7 @@ func (m *Workflow) Reset() { *m = Workflow{} } func (m *Workflow) String() string { return proto.CompactTextString(m) } func (*Workflow) ProtoMessage() {} func (*Workflow) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_827faed39aedc44b, []int{0} + return fileDescriptor_workflow_ccfaf06783966538, []int{0} } func (m *Workflow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Workflow.Unmarshal(m, b) @@ -221,58 +221,58 @@ func (m *Workflow) GetFailedReferenceTaskNames() []string { } func init() { - proto.RegisterType((*Workflow)(nil), "com.netflix.conductor.proto.Workflow") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Workflow.InputEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.Workflow.OutputEntry") - proto.RegisterMapType((map[string]string)(nil), "com.netflix.conductor.proto.Workflow.TaskToDomainEntry") - proto.RegisterEnum("com.netflix.conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) + proto.RegisterType((*Workflow)(nil), "conductor.proto.Workflow") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.InputEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.OutputEntry") + proto.RegisterMapType((map[string]string)(nil), "conductor.proto.Workflow.TaskToDomainEntry") + proto.RegisterEnum("conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) } -func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_827faed39aedc44b) } +func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_ccfaf06783966538) } -var fileDescriptor_workflow_827faed39aedc44b = []byte{ +var fileDescriptor_workflow_ccfaf06783966538 = []byte{ // 688 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, - 0x18, 0x77, 0x8c, 0x0d, 0xf6, 0x8c, 0xcd, 0x72, 0x4e, 0xa9, 0x60, 0xe2, 0xc4, 0x98, 0xec, 0x05, - 0xe9, 0x14, 0x51, 0x0c, 0x89, 0x31, 0xe0, 0x36, 0x6d, 0x02, 0x63, 0x96, 0x02, 0x89, 0x89, 0x69, - 0xba, 0xf6, 0x3a, 0x9a, 0xb5, 0x77, 0xcd, 0xf5, 0x0a, 0xee, 0xd3, 0xf9, 0xd5, 0xcc, 0xdd, 0xad, - 0x50, 0xd4, 0x10, 0x5e, 0xf8, 0xae, 0xf7, 0xfc, 0xfe, 0xf4, 0xee, 0xf7, 0xdc, 0x73, 0xd0, 0x8a, - 0xa9, 0x8f, 0xa3, 0xee, 0x15, 0x65, 0xd3, 0x20, 0xa2, 0x57, 0x46, 0xc2, 0x28, 0xa7, 0x68, 0xc3, - 0xa3, 0xb1, 0x41, 0x30, 0x0f, 0xa2, 0xf0, 0xa7, 0xe1, 0x51, 0xe2, 0x67, 0x1e, 0xa7, 0x4c, 0x81, - 0xeb, 0x9a, 0x92, 0x70, 0x37, 0x9d, 0xce, 0x2b, 0xcf, 0x26, 0x94, 0x4e, 0x22, 0xdc, 0x95, 0xab, - 0x71, 0x16, 0x74, 0x53, 0xce, 0x32, 0x8f, 0x2b, 0x74, 0xf3, 0x57, 0x0d, 0x96, 0xcf, 0xe7, 0xfe, - 0xe8, 0x10, 0xaa, 0x29, 0x77, 0x79, 0x96, 0xea, 0xa5, 0x76, 0xa9, 0xd3, 0xdc, 0xde, 0x31, 0xee, - 0xf8, 0x95, 0x91, 0xcb, 0xae, 0x3f, 0x4e, 0xa4, 0xd6, 0x9a, 0x7b, 0xa0, 0xa7, 0xb0, 0x8c, 0x89, - 0xef, 0xf0, 0x30, 0xc6, 0xfa, 0x42, 0xbb, 0xd4, 0x29, 0x5b, 0x4b, 0x98, 0xf8, 0x76, 0x18, 0x63, - 0xf4, 0x1c, 0xea, 0xf9, 0xa1, 0x9c, 0xd0, 0xd7, 0xcb, 0xed, 0x52, 0xa7, 0x66, 0x41, 0x5e, 0x32, - 0x7d, 0xb4, 0x05, 0x28, 0x71, 0x19, 0x26, 0xdc, 0x29, 0xf2, 0x16, 0x25, 0x4f, 0x53, 0xc8, 0xf9, - 0x0d, 0xfb, 0x1d, 0xac, 0xfd, 0xc9, 0x16, 0x01, 0x08, 0x49, 0x45, 0x4a, 0x5a, 0xb7, 0x25, 0xb6, - 0x9b, 0x4e, 0x4d, 0x1f, 0xed, 0x42, 0x45, 0xd0, 0x52, 0xbd, 0xda, 0x2e, 0x77, 0xea, 0xdb, 0x2f, - 0xee, 0x3c, 0xad, 0xd0, 0x58, 0x8a, 0x8f, 0x06, 0x50, 0x09, 0x49, 0x92, 0x71, 0x7d, 0x59, 0x0a, - 0x5f, 0xdf, 0x2f, 0x26, 0x53, 0x48, 0xfa, 0x84, 0xb3, 0x99, 0xa5, 0xe4, 0xc8, 0x84, 0x2a, 0xcd, - 0xb8, 0x30, 0xaa, 0x49, 0xa3, 0x37, 0xf7, 0x33, 0x3a, 0x96, 0x1a, 0xe5, 0x34, 0x37, 0x40, 0x2f, - 0xa1, 0x71, 0x73, 0xf6, 0x59, 0x82, 0x75, 0x90, 0x07, 0x5f, 0xc9, 0x8b, 0xf6, 0x2c, 0xc1, 0x48, - 0x87, 0xa5, 0x4b, 0xcc, 0xd2, 0x90, 0x12, 0xbd, 0xde, 0x2e, 0x75, 0x2a, 0x56, 0xbe, 0x44, 0xaf, - 0xa0, 0xe9, 0x51, 0xc6, 0x70, 0xe4, 0xf2, 0x90, 0x12, 0x11, 0xdc, 0x8a, 0xd4, 0x37, 0x0a, 0x55, - 0xd3, 0x47, 0x3b, 0xb0, 0xc6, 0xb0, 0xc3, 0x32, 0xe2, 0x04, 0x8c, 0xc6, 0xb7, 0x7a, 0xd3, 0x90, - 0xfc, 0x47, 0x0c, 0x5b, 0x19, 0x19, 0x30, 0x1a, 0x17, 0xda, 0xf3, 0x5e, 0xa8, 0xdc, 0x94, 0x12, - 0x27, 0xa0, 0xcc, 0x09, 0x89, 0x47, 0xe3, 0x24, 0xc2, 0xc2, 0x52, 0x6f, 0x4a, 0xd5, 0x63, 0x05, - 0x0f, 0x28, 0x33, 0x0b, 0xa0, 0xd8, 0x54, 0xea, 0x5d, 0xe0, 0xd8, 0x75, 0xf2, 0x5d, 0x3f, 0x94, - 0xbb, 0x6e, 0xa8, 0xea, 0xd9, 0x7c, 0xef, 0x2d, 0xa8, 0xe0, 0x4b, 0x4c, 0xb8, 0xae, 0x49, 0x33, - 0xb5, 0x40, 0x3f, 0xa0, 0x29, 0xef, 0x00, 0xa7, 0x8e, 0x4f, 0x63, 0x37, 0x24, 0xfa, 0xaa, 0xcc, - 0x78, 0xf7, 0x7e, 0x19, 0x8b, 0x76, 0xdb, 0xb4, 0x27, 0x95, 0x2a, 0xe9, 0x15, 0x5e, 0x28, 0xa1, - 0x8f, 0xb0, 0x11, 0xb8, 0x61, 0x84, 0x7d, 0x87, 0xe1, 0x00, 0x33, 0x4c, 0x3c, 0xac, 0xee, 0x1c, - 0x71, 0x63, 0x9c, 0xea, 0xa8, 0x5d, 0xee, 0xd4, 0x2c, 0x5d, 0x51, 0xac, 0x9c, 0x21, 0x4c, 0x87, - 0x02, 0x5f, 0x1f, 0x01, 0xdc, 0x5c, 0x07, 0xa4, 0x41, 0x79, 0x8a, 0x67, 0x72, 0xe8, 0x6a, 0x96, - 0xf8, 0x44, 0x5b, 0x50, 0xb9, 0x74, 0xa3, 0x4c, 0x0d, 0x4e, 0x7d, 0xfb, 0x89, 0xa1, 0x86, 0xd8, - 0xc8, 0x87, 0xd8, 0x38, 0x13, 0xa8, 0xa5, 0x48, 0x7b, 0x0b, 0x1f, 0x4a, 0xeb, 0xdf, 0xa0, 0x5e, - 0xb8, 0x17, 0xff, 0xc5, 0xf2, 0x13, 0xac, 0xfe, 0x15, 0xc3, 0x3f, 0x8c, 0x5b, 0x45, 0xe3, 0x5a, - 0xc1, 0x60, 0xd3, 0x83, 0xe6, 0xed, 0xb7, 0x01, 0xd5, 0x61, 0xc9, 0x3a, 0x1d, 0x0e, 0xcd, 0xe1, - 0x17, 0xed, 0x01, 0x6a, 0x40, 0xed, 0xf3, 0xf1, 0xd1, 0xe8, 0xb0, 0x6f, 0xf7, 0x7b, 0x5a, 0x09, - 0x01, 0x54, 0x07, 0xfb, 0xe6, 0x61, 0xbf, 0xa7, 0x2d, 0x08, 0xc8, 0x36, 0x8f, 0xfa, 0x3d, 0xe7, - 0xf8, 0xd4, 0xd6, 0xca, 0xa8, 0x09, 0x60, 0xf7, 0xad, 0x23, 0x73, 0xb8, 0x2f, 0xa8, 0x8b, 0x82, - 0x3a, 0xda, 0x3f, 0x3d, 0xe9, 0xf7, 0xb4, 0xca, 0xc1, 0xd7, 0x03, 0xc8, 0x7f, 0x32, 0x1a, 0x7f, - 0xdf, 0x9b, 0x84, 0xfc, 0x22, 0x1b, 0x8b, 0x26, 0x77, 0xe7, 0x4d, 0xee, 0x5e, 0x37, 0xb9, 0xeb, - 0x45, 0x21, 0x26, 0xbc, 0x3b, 0xa1, 0x13, 0x96, 0x78, 0x85, 0xba, 0x7c, 0x34, 0xc7, 0x55, 0x19, - 0xc5, 0xdb, 0xdf, 0x01, 0x00, 0x00, 0xff, 0xff, 0xcf, 0x79, 0x5d, 0x7e, 0x77, 0x05, 0x00, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0xdf, 0x4f, 0xdb, 0x48, + 0x10, 0xc7, 0x2f, 0x84, 0x04, 0x32, 0x21, 0xc1, 0xec, 0x85, 0xc3, 0x07, 0x27, 0x5d, 0x44, 0x8b, + 0x14, 0x09, 0xe4, 0x48, 0xf4, 0x87, 0x2a, 0x24, 0xd4, 0x42, 0x63, 0x2a, 0x4b, 0x10, 0x82, 0x31, + 0x20, 0xf5, 0xc5, 0x72, 0xec, 0x75, 0xb0, 0x62, 0xef, 0x5a, 0xeb, 0x35, 0x34, 0xcf, 0xfd, 0xc7, + 0xab, 0xdd, 0x8d, 0xc1, 0x40, 0x79, 0xeb, 0x9b, 0x77, 0xe6, 0xf3, 0xfd, 0x6a, 0x76, 0x66, 0xc7, + 0xd0, 0x49, 0x68, 0x80, 0xe3, 0xfe, 0x3d, 0x65, 0xd3, 0x30, 0xa6, 0xf7, 0x46, 0xca, 0x28, 0xa7, + 0x68, 0xd5, 0xa7, 0x24, 0xc8, 0x7d, 0x4e, 0x99, 0x0a, 0x6c, 0x6a, 0x0a, 0xe3, 0x5e, 0x36, 0x9d, + 0x47, 0xfe, 0x9b, 0x50, 0x3a, 0x89, 0x71, 0x5f, 0x9e, 0xc6, 0x79, 0xd8, 0xcf, 0x38, 0xcb, 0x7d, + 0xae, 0xb2, 0xdb, 0x3f, 0x1b, 0xb0, 0x7c, 0x33, 0xf7, 0x44, 0x5f, 0xa0, 0x9e, 0x71, 0x8f, 0xe7, + 0x99, 0x5e, 0xe9, 0x56, 0x7a, 0xed, 0xfd, 0x9e, 0xf1, 0xcc, 0xde, 0x28, 0xd0, 0x87, 0x8f, 0x4b, + 0xc9, 0xdb, 0x73, 0x1d, 0xfa, 0x17, 0x96, 0x31, 0x09, 0x5c, 0x1e, 0x25, 0x58, 0x5f, 0xe8, 0x56, + 0x7a, 0x55, 0x7b, 0x09, 0x93, 0xc0, 0x89, 0x12, 0x8c, 0xfe, 0x87, 0x66, 0x51, 0xbc, 0x1b, 0x05, + 0x7a, 0xb5, 0x5b, 0xe9, 0x35, 0x6c, 0x28, 0x42, 0x56, 0x80, 0xf6, 0x00, 0xa5, 0x1e, 0xc3, 0x84, + 0xbb, 0x65, 0x6e, 0x51, 0x72, 0x9a, 0xca, 0xdc, 0x3c, 0xd2, 0x1f, 0x60, 0xe3, 0x39, 0x2d, 0x2e, + 0x2d, 0x24, 0x35, 0x29, 0xe9, 0x3c, 0x95, 0x38, 0x5e, 0x36, 0xb5, 0x02, 0xb4, 0x0b, 0x35, 0x81, + 0x65, 0x7a, 0xbd, 0x5b, 0xed, 0x35, 0xf7, 0xd7, 0x5f, 0xdc, 0x50, 0x70, 0xb6, 0x62, 0xd0, 0x01, + 0xd4, 0x22, 0x92, 0xe6, 0x5c, 0x5f, 0x96, 0xf0, 0xdb, 0xd7, 0xdb, 0x61, 0x09, 0xcc, 0x24, 0x9c, + 0xcd, 0x6c, 0x25, 0x41, 0x87, 0x50, 0xa7, 0x39, 0x17, 0xe2, 0x86, 0x14, 0xef, 0xbc, 0x2e, 0x3e, + 0x97, 0x9c, 0x52, 0xcf, 0x45, 0xe8, 0x0d, 0xb4, 0x1e, 0xef, 0x35, 0x4b, 0xb1, 0x0e, 0xf2, 0x52, + 0x2b, 0x45, 0xd0, 0x99, 0xa5, 0x18, 0xe9, 0xb0, 0x74, 0x87, 0x59, 0x16, 0x51, 0xa2, 0x37, 0xbb, + 0x95, 0x5e, 0xcd, 0x2e, 0x8e, 0x68, 0x07, 0xda, 0x3e, 0x65, 0x0c, 0xc7, 0x1e, 0x8f, 0x28, 0x11, + 0x4d, 0x59, 0x91, 0xfa, 0x56, 0x29, 0x6a, 0x05, 0xe8, 0x3d, 0x6c, 0x30, 0xec, 0xb2, 0x9c, 0xb8, + 0x21, 0xa3, 0xc9, 0x93, 0xbe, 0xb7, 0x24, 0xff, 0x37, 0xc3, 0x76, 0x4e, 0x4e, 0x18, 0x4d, 0x4a, + 0xad, 0xff, 0x28, 0x54, 0x5e, 0x46, 0x89, 0x1b, 0x52, 0xe6, 0x46, 0xc4, 0xa7, 0x49, 0x1a, 0x63, + 0x61, 0xa9, 0xb7, 0xa5, 0x6a, 0x5d, 0xa5, 0x4f, 0x28, 0xb3, 0x4a, 0x49, 0x51, 0x54, 0xe6, 0xdf, + 0xe2, 0xc4, 0x73, 0x8b, 0xaa, 0x57, 0x65, 0xd5, 0x2d, 0x15, 0xbd, 0x9e, 0xd7, 0xde, 0x81, 0x1a, + 0xbe, 0xc3, 0x84, 0xeb, 0x9a, 0x34, 0x53, 0x07, 0x74, 0x01, 0x6d, 0x39, 0x5f, 0x4e, 0xdd, 0x80, + 0x26, 0x5e, 0x44, 0xf4, 0x35, 0xd9, 0xd7, 0xdd, 0xd7, 0xfb, 0x2a, 0x46, 0xe9, 0xd0, 0x81, 0xa4, + 0x55, 0x77, 0x57, 0x78, 0x29, 0x84, 0x0e, 0x61, 0x2b, 0xf4, 0xa2, 0x18, 0x07, 0x2e, 0xc3, 0x21, + 0x66, 0x98, 0xf8, 0x58, 0xbd, 0x21, 0xe2, 0x25, 0x38, 0xd3, 0x51, 0xb7, 0xda, 0x6b, 0xd8, 0xba, + 0x42, 0xec, 0x82, 0x10, 0xa6, 0x43, 0x91, 0xdf, 0x1c, 0x01, 0x3c, 0x8e, 0x1d, 0x69, 0x50, 0x9d, + 0xe2, 0x99, 0x5c, 0x9c, 0x86, 0x2d, 0x3e, 0xd1, 0x1e, 0xd4, 0xee, 0xbc, 0x38, 0x57, 0x8b, 0xd0, + 0xdc, 0xff, 0xc7, 0x50, 0x8b, 0x68, 0x14, 0x8b, 0x68, 0x5c, 0x8b, 0xac, 0xad, 0xa0, 0x83, 0x85, + 0x4f, 0x95, 0xcd, 0x0b, 0x68, 0x96, 0xde, 0xc2, 0x1f, 0xb1, 0xfc, 0x0c, 0x6b, 0x2f, 0xda, 0xf0, + 0x1b, 0xe3, 0x4e, 0xd9, 0xb8, 0x51, 0x32, 0xd8, 0xf6, 0xa1, 0xfd, 0x74, 0xd7, 0x51, 0x13, 0x96, + 0xec, 0xab, 0xe1, 0xd0, 0x1a, 0x7e, 0xd3, 0xfe, 0x42, 0x2d, 0x68, 0x7c, 0x3d, 0x3f, 0x1b, 0x9d, + 0x9a, 0x8e, 0x39, 0xd0, 0x2a, 0x08, 0xa0, 0x7e, 0x72, 0x64, 0x9d, 0x9a, 0x03, 0x6d, 0x41, 0xa4, + 0x1c, 0xeb, 0xcc, 0x1c, 0xb8, 0xe7, 0x57, 0x8e, 0x56, 0x45, 0x6d, 0x00, 0xc7, 0xb4, 0xcf, 0xac, + 0xe1, 0x91, 0x40, 0x17, 0x05, 0x3a, 0x3a, 0xba, 0xba, 0x34, 0x07, 0x5a, 0xed, 0x18, 0xc3, 0x96, + 0x4f, 0x13, 0x83, 0x60, 0x1e, 0xc6, 0xd1, 0x8f, 0xe7, 0x13, 0x3d, 0x86, 0xa2, 0x82, 0xd1, 0xf8, + 0xfb, 0xc1, 0x24, 0xe2, 0xb7, 0xf9, 0xd8, 0xf0, 0x69, 0xd2, 0x9f, 0xf3, 0xfd, 0x07, 0xbe, 0xef, + 0xc7, 0x11, 0x26, 0xbc, 0x3f, 0xa1, 0x13, 0x96, 0xfa, 0xa5, 0xb8, 0xfc, 0x2b, 0x8e, 0xeb, 0xd2, + 0xee, 0xdd, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x27, 0xe7, 0x4f, 0x4c, 0x05, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/workflowdef.pb.go b/client/gogrpc/conductor/model/workflowdef.pb.go index 5256c694fd..1be3a0cac5 100644 --- a/client/gogrpc/conductor/model/workflowdef.pb.go +++ b/client/gogrpc/conductor/model/workflowdef.pb.go @@ -28,6 +28,7 @@ type WorkflowDef struct { OutputParameters map[string]*_struct.Value `protobuf:"bytes,6,rep,name=output_parameters,json=outputParameters" json:"output_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` FailureWorkflow string `protobuf:"bytes,7,opt,name=failure_workflow,json=failureWorkflow" json:"failure_workflow,omitempty"` SchemaVersion int32 `protobuf:"varint,8,opt,name=schema_version,json=schemaVersion" json:"schema_version,omitempty"` + Restartable bool `protobuf:"varint,9,opt,name=restartable" json:"restartable,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -37,7 +38,7 @@ func (m *WorkflowDef) Reset() { *m = WorkflowDef{} } func (m *WorkflowDef) String() string { return proto.CompactTextString(m) } func (*WorkflowDef) ProtoMessage() {} func (*WorkflowDef) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowdef_bf5b9fbf7e32cdd5, []int{0} + return fileDescriptor_workflowdef_3a04d4bf8b36be23, []int{0} } func (m *WorkflowDef) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowDef.Unmarshal(m, b) @@ -113,40 +114,48 @@ func (m *WorkflowDef) GetSchemaVersion() int32 { return 0 } +func (m *WorkflowDef) GetRestartable() bool { + if m != nil { + return m.Restartable + } + return false +} + func init() { - proto.RegisterType((*WorkflowDef)(nil), "com.netflix.conductor.proto.WorkflowDef") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.WorkflowDef.OutputParametersEntry") + proto.RegisterType((*WorkflowDef)(nil), "conductor.proto.WorkflowDef") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowDef.OutputParametersEntry") } func init() { - proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_bf5b9fbf7e32cdd5) -} - -var fileDescriptor_workflowdef_bf5b9fbf7e32cdd5 = []byte{ - // 388 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x4b, 0xab, 0xd3, 0x40, - 0x14, 0xc7, 0xc9, 0xcd, 0xcd, 0xad, 0x9d, 0x50, 0x5b, 0x07, 0xd4, 0x50, 0x5d, 0x04, 0x41, 0x48, - 0x41, 0x26, 0x50, 0x37, 0xd2, 0x85, 0x42, 0xa9, 0x2b, 0x17, 0x96, 0x20, 0x15, 0x74, 0x51, 0x26, - 0x93, 0x93, 0x34, 0xe4, 0x31, 0x61, 0x1e, 0xad, 0xfd, 0xc0, 0x7e, 0x0f, 0xc9, 0x24, 0xa9, 0xb5, - 0x88, 0xb8, 0x9b, 0xf9, 0xfd, 0xe7, 0xbc, 0xfe, 0x67, 0xd0, 0xf3, 0x8a, 0x27, 0x50, 0x86, 0x27, - 0x2e, 0x8a, 0xb4, 0xe4, 0xa7, 0x04, 0x52, 0xd2, 0x08, 0xae, 0x38, 0x7e, 0xc1, 0x78, 0x45, 0x6a, - 0x50, 0x69, 0x99, 0xff, 0x20, 0x8c, 0xd7, 0x89, 0x66, 0x8a, 0x8b, 0x4e, 0x9c, 0x7b, 0x7f, 0x46, - 0x29, 0x2a, 0x8b, 0x5e, 0x79, 0x99, 0x71, 0x9e, 0x95, 0x10, 0x9a, 0x5b, 0xac, 0xd3, 0x50, 0x2a, - 0xa1, 0x99, 0xea, 0xd4, 0x57, 0x3f, 0x6d, 0xe4, 0x7e, 0xed, 0x83, 0x36, 0x90, 0x62, 0x8c, 0xee, - 0x6b, 0x5a, 0x81, 0x67, 0xf9, 0x56, 0x30, 0x8e, 0xcc, 0x19, 0xfb, 0xc8, 0x4d, 0x40, 0x32, 0x91, - 0x37, 0x2a, 0xe7, 0xb5, 0x77, 0x67, 0xa4, 0x6b, 0x84, 0x3d, 0x34, 0x3a, 0x82, 0x90, 0xad, 0x6a, - 0xfb, 0x56, 0xe0, 0x44, 0xc3, 0x15, 0x7f, 0x40, 0x4e, 0xdb, 0x8b, 0xf4, 0xee, 0x7d, 0x3b, 0x70, - 0x97, 0x0b, 0xf2, 0x8f, 0x21, 0xc8, 0xd0, 0xc8, 0x17, 0x2a, 0x8b, 0xa8, 0x8b, 0xc3, 0x0b, 0x34, - 0xcb, 0xeb, 0x46, 0xab, 0x7d, 0x43, 0x05, 0xad, 0x40, 0x81, 0x90, 0x9e, 0xe3, 0xdb, 0xc1, 0x38, - 0x9a, 0x1a, 0xbe, 0xbd, 0x60, 0x5c, 0xa0, 0x27, 0x5c, 0xab, 0x9b, 0xb7, 0x0f, 0xa6, 0xee, 0xfb, - 0xff, 0xaa, 0xbb, 0x81, 0x94, 0x7c, 0x36, 0x19, 0x7e, 0x67, 0xfd, 0x58, 0x2b, 0x71, 0x8e, 0x66, - 0xfc, 0x06, 0xb7, 0x7d, 0xa5, 0x34, 0x2f, 0xb5, 0x80, 0xfd, 0x60, 0xba, 0x37, 0x32, 0xce, 0x4c, - 0x7b, 0x3e, 0x64, 0xc5, 0xaf, 0xd1, 0x63, 0xc9, 0x0e, 0x50, 0xd1, 0xfd, 0x60, 0xd2, 0x23, 0x63, - 0xd2, 0xa4, 0xa3, 0xbb, 0x0e, 0xce, 0xbf, 0xa3, 0xa7, 0x7f, 0x2d, 0x8e, 0x67, 0xc8, 0x2e, 0xe0, - 0xdc, 0xaf, 0xa4, 0x3d, 0xe2, 0x37, 0xc8, 0x39, 0xd2, 0x52, 0x83, 0xd9, 0x85, 0xbb, 0x7c, 0x46, - 0xba, 0x1d, 0x93, 0x61, 0xc7, 0x64, 0xd7, 0xaa, 0x51, 0xf7, 0x68, 0x75, 0xf7, 0xce, 0x5a, 0x7f, - 0x5a, 0x4f, 0xae, 0xa6, 0xdc, 0xc6, 0xdf, 0x56, 0x59, 0xae, 0x0e, 0x3a, 0x6e, 0x7d, 0x09, 0x7b, - 0x5f, 0xc2, 0x8b, 0x2f, 0x21, 0x2b, 0x73, 0xa8, 0x55, 0x98, 0xf1, 0x4c, 0x34, 0xec, 0x8a, 0x9b, - 0x5f, 0x16, 0x3f, 0x98, 0x3a, 0x6f, 0x7f, 0x05, 0x00, 0x00, 0xff, 0xff, 0xbd, 0x9d, 0x18, 0xb5, - 0xab, 0x02, 0x00, 0x00, + proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_3a04d4bf8b36be23) +} + +var fileDescriptor_workflowdef_3a04d4bf8b36be23 = []byte{ + // 404 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xdf, 0x6b, 0xd5, 0x30, + 0x14, 0xc7, 0xe9, 0xba, 0x6e, 0xbb, 0x29, 0xf3, 0x5e, 0x03, 0x6a, 0x98, 0x0a, 0x45, 0x10, 0x2a, + 0x48, 0x0a, 0x77, 0x2f, 0xb2, 0xc7, 0x31, 0x9f, 0x1d, 0x45, 0x26, 0xe8, 0x43, 0x49, 0xd3, 0xd3, + 0xde, 0xd2, 0xb4, 0x29, 0xf9, 0xb1, 0xb9, 0xbf, 0xda, 0x7f, 0x41, 0x9a, 0xb6, 0xb3, 0x16, 0x7d, + 0xcb, 0xf9, 0x7c, 0xcf, 0x2f, 0xbe, 0x27, 0xe8, 0x55, 0x2b, 0x0b, 0x10, 0xc9, 0x83, 0x54, 0x4d, + 0x29, 0xe4, 0x43, 0x01, 0x25, 0xed, 0x95, 0x34, 0x12, 0x6f, 0xb9, 0xec, 0x0a, 0xcb, 0x8d, 0x54, + 0x23, 0xb8, 0x20, 0x7f, 0x67, 0x1a, 0xa6, 0x9b, 0x49, 0x79, 0x53, 0x49, 0x59, 0x09, 0x48, 0x5c, + 0x94, 0xdb, 0x32, 0xd1, 0x46, 0x59, 0x6e, 0x46, 0xf5, 0xdd, 0x2f, 0x1f, 0x85, 0xdf, 0xa6, 0xa2, + 0x1b, 0x28, 0x31, 0x46, 0xc7, 0x1d, 0x6b, 0x81, 0x78, 0x91, 0x17, 0x6f, 0x52, 0xf7, 0xc6, 0x11, + 0x0a, 0x0b, 0xd0, 0x5c, 0xd5, 0xbd, 0xa9, 0x65, 0x47, 0x8e, 0x9c, 0xb4, 0x44, 0x98, 0xa0, 0xd3, + 0x7b, 0x50, 0x7a, 0x50, 0xfd, 0xc8, 0x8b, 0x83, 0x74, 0x0e, 0xf1, 0x25, 0x0a, 0x86, 0x5d, 0x34, + 0x39, 0x8e, 0xfc, 0x38, 0xdc, 0xbf, 0xa5, 0xab, 0xc5, 0xe9, 0x3c, 0xfc, 0x2b, 0xd3, 0x4d, 0x3a, + 0xe6, 0xe2, 0x0f, 0x68, 0x57, 0x77, 0xbd, 0x35, 0x59, 0xcf, 0x14, 0x6b, 0xc1, 0x80, 0xd2, 0x24, + 0x88, 0xfc, 0x78, 0x93, 0x6e, 0x1d, 0xbf, 0x7d, 0xc2, 0x38, 0x43, 0xcf, 0xa5, 0x35, 0xab, 0xdc, + 0x13, 0x37, 0x6b, 0xff, 0xdf, 0x59, 0x37, 0x50, 0xd2, 0x2f, 0xae, 0xea, 0x4f, 0xa7, 0xcf, 0x9d, + 0x51, 0x8f, 0xe9, 0x4e, 0xae, 0xf0, 0xb0, 0x4b, 0xc9, 0x6a, 0x61, 0x15, 0x64, 0xb3, 0xb9, 0xe4, + 0xd4, 0x39, 0xb0, 0x9d, 0xf8, 0xdc, 0x15, 0xbf, 0x47, 0xcf, 0x34, 0x3f, 0x40, 0xcb, 0xb2, 0xd9, + 0x8c, 0x33, 0x67, 0xc6, 0xf9, 0x48, 0xef, 0x26, 0x4b, 0x22, 0x14, 0x2a, 0xd0, 0x86, 0x29, 0xc3, + 0x72, 0x01, 0x64, 0x13, 0x79, 0xf1, 0x59, 0xba, 0x44, 0x17, 0x3f, 0xd0, 0x8b, 0x7f, 0xae, 0x87, + 0x77, 0xc8, 0x6f, 0xe0, 0x71, 0x3a, 0xce, 0xf0, 0xc4, 0x1f, 0x51, 0x70, 0xcf, 0x84, 0x05, 0x77, + 0x95, 0x70, 0xff, 0x92, 0x8e, 0xd7, 0xa6, 0xf3, 0xb5, 0xe9, 0xdd, 0xa0, 0xa6, 0x63, 0xd2, 0xd5, + 0xd1, 0x27, 0xef, 0xfa, 0x80, 0x5e, 0x73, 0xd9, 0xd2, 0x0e, 0x4c, 0x29, 0xea, 0x9f, 0x6b, 0x8f, + 0xae, 0xcf, 0x17, 0x26, 0xdd, 0xe6, 0xdf, 0xaf, 0xaa, 0xda, 0x1c, 0x6c, 0x4e, 0xb9, 0x6c, 0x93, + 0xa9, 0x24, 0x79, 0x2a, 0x49, 0xb8, 0xa8, 0xa1, 0x33, 0x49, 0x25, 0x2b, 0xd5, 0xf3, 0x05, 0x77, + 0x9f, 0x31, 0x3f, 0x71, 0x1d, 0x2f, 0x7f, 0x07, 0x00, 0x00, 0xff, 0xff, 0x70, 0x4f, 0x0f, 0xeb, + 0xc6, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/workflowsummary.pb.go b/client/gogrpc/conductor/model/workflowsummary.pb.go index 85d5936c8e..8d1a794f5c 100644 --- a/client/gogrpc/conductor/model/workflowsummary.pb.go +++ b/client/gogrpc/conductor/model/workflowsummary.pb.go @@ -26,7 +26,7 @@ type WorkflowSummary struct { StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime" json:"start_time,omitempty"` UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,enum=com.netflix.conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` Input string `protobuf:"bytes,9,opt,name=input" json:"input,omitempty"` Output string `protobuf:"bytes,10,opt,name=output" json:"output,omitempty"` ReasonForIncompletion string `protobuf:"bytes,11,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` @@ -42,7 +42,7 @@ func (m *WorkflowSummary) Reset() { *m = WorkflowSummary{} } func (m *WorkflowSummary) String() string { return proto.CompactTextString(m) } func (*WorkflowSummary) ProtoMessage() {} func (*WorkflowSummary) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowsummary_235407b7a0ddc9c7, []int{0} + return fileDescriptor_workflowsummary_3f8ed40c0bd9261f, []int{0} } func (m *WorkflowSummary) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowSummary.Unmarshal(m, b) @@ -161,40 +161,40 @@ func (m *WorkflowSummary) GetFailedReferenceTaskNames() string { } func init() { - proto.RegisterType((*WorkflowSummary)(nil), "com.netflix.conductor.proto.WorkflowSummary") + proto.RegisterType((*WorkflowSummary)(nil), "conductor.proto.WorkflowSummary") } func init() { - proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_235407b7a0ddc9c7) + proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_3f8ed40c0bd9261f) } -var fileDescriptor_workflowsummary_235407b7a0ddc9c7 = []byte{ +var fileDescriptor_workflowsummary_3f8ed40c0bd9261f = []byte{ // 428 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x92, 0xd1, 0x8b, 0xd3, 0x40, - 0x10, 0xc6, 0x89, 0x77, 0x6d, 0xaf, 0x7b, 0xd7, 0x8a, 0x4b, 0xd5, 0xd5, 0x22, 0x16, 0x45, 0xe8, - 0x53, 0x02, 0x2a, 0x3e, 0x08, 0xbe, 0xdc, 0x83, 0x50, 0x10, 0x95, 0x5c, 0x41, 0xf0, 0x25, 0x6c, - 0x77, 0x27, 0x75, 0x69, 0xb2, 0x1b, 0x76, 0x27, 0x77, 0xd7, 0xbf, 0xc7, 0x7f, 0x54, 0x32, 0xdb, - 0x94, 0xea, 0x83, 0x6f, 0x99, 0xdf, 0xf7, 0xcd, 0x4e, 0xe6, 0x63, 0xd8, 0xbc, 0x76, 0x1a, 0xaa, - 0xec, 0xce, 0xf9, 0x5d, 0x59, 0xb9, 0xbb, 0xd0, 0xd6, 0xb5, 0xf4, 0xfb, 0xb4, 0xf1, 0x0e, 0x1d, - 0x9f, 0x2b, 0x57, 0xa7, 0x16, 0xb0, 0xac, 0xcc, 0x7d, 0xaa, 0x9c, 0xd5, 0xad, 0x42, 0xe7, 0xa3, - 0xf8, 0x7c, 0xf6, 0x77, 0x67, 0xa4, 0xaf, 0x7e, 0x9f, 0xb3, 0x87, 0x3f, 0x0e, 0xe8, 0x26, 0x3e, - 0xc6, 0x5f, 0xb3, 0x49, 0xef, 0x2a, 0x70, 0xdf, 0x80, 0x48, 0x16, 0xc9, 0x72, 0x9c, 0x5f, 0xf5, - 0x70, 0xbd, 0x6f, 0x80, 0x0b, 0x36, 0xba, 0x05, 0x1f, 0x8c, 0xb3, 0xe2, 0xc1, 0x22, 0x59, 0x0e, - 0xf2, 0xbe, 0xe4, 0x2f, 0xd9, 0xe5, 0xb1, 0xdd, 0x68, 0x71, 0x46, 0xcd, 0xac, 0x47, 0x2b, 0xcd, - 0xdf, 0xb0, 0xa9, 0x72, 0xde, 0x43, 0x25, 0xd1, 0x38, 0xdb, 0x79, 0xce, 0xc9, 0x33, 0x39, 0xa1, - 0x2b, 0xcd, 0x5f, 0x30, 0x16, 0x50, 0x7a, 0x2c, 0xd0, 0xd4, 0x20, 0x06, 0x64, 0x19, 0x13, 0x59, - 0x9b, 0x1a, 0xba, 0x31, 0x6d, 0xa3, 0x25, 0x42, 0xd4, 0x87, 0x71, 0x4c, 0x44, 0x64, 0x78, 0xc6, - 0x2e, 0xc0, 0xea, 0xa8, 0x8e, 0x48, 0x1d, 0x81, 0xd5, 0x24, 0x7d, 0x61, 0xc3, 0x80, 0x12, 0xdb, - 0x20, 0x2e, 0x16, 0xc9, 0x72, 0xfa, 0xf6, 0x7d, 0xfa, 0x9f, 0xe4, 0xd2, 0x3e, 0x9f, 0xe3, 0xc7, - 0x0d, 0xf5, 0xe6, 0x87, 0x37, 0xf8, 0x8c, 0x0d, 0x8c, 0x6d, 0x5a, 0x14, 0x63, 0x9a, 0x12, 0x0b, - 0xfe, 0x84, 0x0d, 0x5d, 0x8b, 0x1d, 0x66, 0x84, 0x0f, 0x15, 0xff, 0xc0, 0x9e, 0x7a, 0x90, 0xc1, - 0xd9, 0xa2, 0x74, 0xbe, 0x30, 0x56, 0xb9, 0xba, 0xa9, 0xa0, 0xdb, 0x59, 0x5c, 0x92, 0xf1, 0x71, - 0x94, 0x3f, 0x3b, 0xbf, 0x3a, 0x11, 0xbb, 0xd4, 0xe0, 0x1e, 0x54, 0x4b, 0x99, 0xd1, 0x52, 0x57, - 0x8b, 0x64, 0x79, 0x96, 0x4f, 0x8e, 0x94, 0x56, 0x9b, 0xb1, 0x01, 0xdc, 0x82, 0x45, 0x31, 0x89, - 0x3f, 0x43, 0x05, 0xff, 0xc4, 0xe6, 0xa5, 0x34, 0x15, 0xe8, 0xc2, 0x43, 0x09, 0x1e, 0xac, 0x82, - 0x02, 0x65, 0xd8, 0x15, 0x56, 0xd6, 0x10, 0xc4, 0x94, 0xbc, 0x22, 0x5a, 0xf2, 0xde, 0xb1, 0x96, - 0x61, 0xf7, 0xb5, 0xd3, 0xaf, 0xbf, 0x5d, 0x3f, 0xfa, 0xe7, 0x48, 0xbe, 0x6f, 0x7e, 0x7e, 0xdc, - 0x1a, 0xfc, 0xd5, 0x6e, 0xba, 0xe8, 0xb2, 0x43, 0x74, 0xd9, 0x31, 0xba, 0x4c, 0x55, 0x06, 0x2c, - 0x66, 0x5b, 0xb7, 0xf5, 0x8d, 0x3a, 0xe1, 0x74, 0x85, 0x9b, 0x21, 0x25, 0xfb, 0xee, 0x4f, 0x00, - 0x00, 0x00, 0xff, 0xff, 0x83, 0xba, 0xab, 0xd5, 0xcf, 0x02, 0x00, 0x00, + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0x51, 0x8b, 0xd3, 0x40, + 0x10, 0xc7, 0x89, 0x77, 0x6d, 0xaf, 0x7b, 0xd7, 0x1e, 0x2e, 0x55, 0x57, 0x8b, 0x58, 0x14, 0x21, + 0x4f, 0x09, 0x28, 0xf8, 0x20, 0x08, 0x72, 0x0f, 0x42, 0x5f, 0x44, 0x72, 0x05, 0xc1, 0x97, 0xb0, + 0xdd, 0x4c, 0xea, 0xd2, 0x64, 0x27, 0xec, 0x6e, 0xee, 0xda, 0x4f, 0xe2, 0xd7, 0x95, 0xcc, 0x36, + 0xa5, 0xd7, 0xb7, 0xcc, 0xef, 0xff, 0x9b, 0xdd, 0xcc, 0xb0, 0x6c, 0x5e, 0x63, 0x01, 0x55, 0xfa, + 0x88, 0x76, 0x5b, 0x56, 0xf8, 0xe8, 0xda, 0xba, 0x96, 0x76, 0x9f, 0x34, 0x16, 0x3d, 0xf2, 0x5b, + 0x85, 0xa6, 0x68, 0x95, 0x47, 0x1b, 0xc0, 0x9b, 0xd9, 0x53, 0x3b, 0xd0, 0xf7, 0xff, 0x2e, 0xd9, + 0xed, 0xef, 0x03, 0xba, 0x0f, 0x07, 0xf0, 0x0f, 0x6c, 0xd2, 0x5b, 0xb9, 0xdf, 0x37, 0x20, 0xa2, + 0x45, 0x14, 0x8f, 0xb3, 0x9b, 0x1e, 0xae, 0xf6, 0x0d, 0x70, 0xc1, 0x46, 0x0f, 0x60, 0x9d, 0x46, + 0x23, 0x9e, 0x2d, 0xa2, 0x78, 0x90, 0xf5, 0x25, 0x7f, 0xc7, 0xae, 0x8f, 0xed, 0xba, 0x10, 0x17, + 0xd4, 0xcc, 0x7a, 0xb4, 0x2c, 0xf8, 0x47, 0x36, 0x55, 0x68, 0x2d, 0x54, 0xd2, 0x6b, 0x34, 0x9d, + 0x73, 0x49, 0xce, 0xe4, 0x84, 0x2e, 0x0b, 0xfe, 0x96, 0x31, 0xe7, 0xa5, 0xf5, 0xb9, 0xd7, 0x35, + 0x88, 0x01, 0x29, 0x63, 0x22, 0x2b, 0x5d, 0x43, 0x77, 0x4d, 0xdb, 0x14, 0xd2, 0x43, 0xc8, 0x87, + 0xe1, 0x9a, 0x80, 0x48, 0x78, 0xcd, 0xae, 0xc0, 0x14, 0x21, 0x1d, 0x51, 0x3a, 0x02, 0x53, 0x50, + 0xf4, 0x9d, 0x0d, 0x9d, 0x97, 0xbe, 0x75, 0xe2, 0x6a, 0x11, 0xc5, 0xd3, 0x4f, 0x71, 0x72, 0xb6, + 0xad, 0xa4, 0xdf, 0xc9, 0xf1, 0xe3, 0x9e, 0xfc, 0xec, 0xd0, 0xc7, 0x67, 0x6c, 0xa0, 0x4d, 0xd3, + 0x7a, 0x31, 0xa6, 0x93, 0x43, 0xc1, 0x5f, 0xb2, 0x21, 0xb6, 0xbe, 0xc3, 0x8c, 0xf0, 0xa1, 0xe2, + 0x5f, 0xd8, 0x2b, 0x0b, 0xd2, 0xa1, 0xc9, 0x4b, 0xb4, 0xb9, 0x36, 0x0a, 0xeb, 0xa6, 0x82, 0x6e, + 0x4e, 0x71, 0x4d, 0xe2, 0x8b, 0x10, 0xff, 0x40, 0xbb, 0x3c, 0x09, 0xbb, 0x4d, 0xc1, 0x0e, 0x54, + 0x4b, 0x7b, 0xa2, 0x41, 0x6e, 0x16, 0x51, 0x7c, 0x91, 0x4d, 0x8e, 0x94, 0xc6, 0x99, 0xb1, 0x01, + 0x3c, 0x80, 0xf1, 0x62, 0x12, 0x7e, 0x86, 0x0a, 0xfe, 0x8d, 0xcd, 0x4b, 0xa9, 0x2b, 0x28, 0x72, + 0x0b, 0x25, 0x58, 0x30, 0x0a, 0x72, 0x2f, 0xdd, 0x36, 0x37, 0xb2, 0x06, 0x27, 0xa6, 0xe4, 0x8a, + 0xa0, 0x64, 0xbd, 0xb1, 0x92, 0x6e, 0xfb, 0xb3, 0xcb, 0xef, 0x2a, 0x36, 0x57, 0x58, 0x27, 0x06, + 0x7c, 0x59, 0xe9, 0xdd, 0xf9, 0x82, 0xee, 0x9e, 0x9f, 0xbd, 0x9a, 0x5f, 0xeb, 0x3f, 0x5f, 0x37, + 0xda, 0xff, 0x6d, 0xd7, 0x89, 0xc2, 0x3a, 0x3d, 0xb4, 0xa5, 0xc7, 0xb6, 0x54, 0x55, 0x1a, 0x8c, + 0x4f, 0x37, 0xb8, 0xb1, 0x8d, 0x3a, 0xe1, 0xf4, 0x2c, 0xd7, 0x43, 0x3a, 0xf5, 0xf3, 0xff, 0x00, + 0x00, 0x00, 0xff, 0xff, 0xa0, 0xee, 0x86, 0xf0, 0xd4, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/workflowtask.pb.go b/client/gogrpc/conductor/model/workflowtask.pb.go index e46ce0a2b1..9176ece14b 100644 --- a/client/gogrpc/conductor/model/workflowtask.pb.go +++ b/client/gogrpc/conductor/model/workflowtask.pb.go @@ -63,7 +63,7 @@ func (x WorkflowTask_Type) String() string { return proto.EnumName(WorkflowTask_Type_name, int32(x)) } func (WorkflowTask_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_24620b75c66c94aa, []int{0, 0} + return fileDescriptor_workflowtask_9c377873af38ad2e, []int{0, 0} } type WorkflowTask struct { @@ -94,7 +94,7 @@ func (m *WorkflowTask) Reset() { *m = WorkflowTask{} } func (m *WorkflowTask) String() string { return proto.CompactTextString(m) } func (*WorkflowTask) ProtoMessage() {} func (*WorkflowTask) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_24620b75c66c94aa, []int{0} + return fileDescriptor_workflowtask_9c377873af38ad2e, []int{0} } func (m *WorkflowTask) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowTask.Unmarshal(m, b) @@ -251,7 +251,7 @@ func (m *WorkflowTask_WorkflowTaskList) Reset() { *m = WorkflowTask_Work func (m *WorkflowTask_WorkflowTaskList) String() string { return proto.CompactTextString(m) } func (*WorkflowTask_WorkflowTaskList) ProtoMessage() {} func (*WorkflowTask_WorkflowTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_24620b75c66c94aa, []int{0, 0} + return fileDescriptor_workflowtask_9c377873af38ad2e, []int{0, 0} } func (m *WorkflowTask_WorkflowTaskList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Unmarshal(m, b) @@ -279,66 +279,66 @@ func (m *WorkflowTask_WorkflowTaskList) GetTasks() []*WorkflowTask { } func init() { - proto.RegisterType((*WorkflowTask)(nil), "com.netflix.conductor.proto.WorkflowTask") - proto.RegisterMapType((map[string]*WorkflowTask_WorkflowTaskList)(nil), "com.netflix.conductor.proto.WorkflowTask.DecisionCasesEntry") - proto.RegisterMapType((map[string]*_struct.Value)(nil), "com.netflix.conductor.proto.WorkflowTask.InputParametersEntry") - proto.RegisterType((*WorkflowTask_WorkflowTaskList)(nil), "com.netflix.conductor.proto.WorkflowTask.WorkflowTaskList") - proto.RegisterEnum("com.netflix.conductor.proto.WorkflowTask_Type", WorkflowTask_Type_name, WorkflowTask_Type_value) + proto.RegisterType((*WorkflowTask)(nil), "conductor.proto.WorkflowTask") + proto.RegisterMapType((map[string]*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.DecisionCasesEntry") + proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowTask.InputParametersEntry") + proto.RegisterType((*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.WorkflowTaskList") + proto.RegisterEnum("conductor.proto.WorkflowTask_Type", WorkflowTask_Type_name, WorkflowTask_Type_value) } func init() { - proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_24620b75c66c94aa) -} - -var fileDescriptor_workflowtask_24620b75c66c94aa = []byte{ - // 774 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xef, 0x6b, 0xdb, 0x3a, - 0x14, 0x7d, 0x6e, 0x7e, 0x5f, 0xa7, 0xa9, 0xab, 0xd7, 0xbe, 0x9a, 0xbc, 0xf7, 0xb6, 0xd0, 0x7d, - 0x58, 0x06, 0xc3, 0x81, 0x8e, 0xb1, 0x51, 0xc6, 0x46, 0xdb, 0x38, 0xe0, 0x35, 0x4d, 0x82, 0x93, - 0x36, 0xb4, 0x0c, 0x8c, 0x63, 0x2b, 0x99, 0x17, 0xc7, 0x0a, 0x96, 0xbc, 0x36, 0xb0, 0xbf, 0x63, - 0xdf, 0xf7, 0x9f, 0x0e, 0xc9, 0x76, 0x92, 0x76, 0xa5, 0x74, 0xec, 0x9b, 0x74, 0xef, 0xb9, 0xd7, - 0xe7, 0x1c, 0x5d, 0xc9, 0xa0, 0xce, 0x88, 0x8b, 0xfd, 0xc6, 0x35, 0x09, 0xa7, 0x63, 0x9f, 0x5c, - 0x33, 0x9b, 0x4e, 0xb5, 0x79, 0x48, 0x18, 0x41, 0xff, 0x3a, 0x64, 0xa6, 0x05, 0x98, 0x8d, 0x7d, - 0xef, 0x46, 0x73, 0x48, 0xe0, 0x46, 0x0e, 0x23, 0x61, 0x9c, 0xac, 0xfe, 0x1f, 0x97, 0xd1, 0x68, - 0x94, 0x56, 0xce, 0xed, 0xd0, 0x9e, 0xd1, 0x24, 0xfd, 0xdf, 0x84, 0x90, 0x89, 0x8f, 0x1b, 0x62, - 0x37, 0x8a, 0xc6, 0x0d, 0xca, 0xc2, 0xc8, 0x61, 0x71, 0x76, 0xff, 0x87, 0x0c, 0xe5, 0x61, 0x52, - 0x36, 0xb0, 0xe9, 0x14, 0x21, 0xc8, 0x06, 0xf6, 0x0c, 0xab, 0x52, 0x4d, 0xaa, 0x97, 0x4c, 0xb1, - 0x46, 0x1a, 0xfc, 0xcd, 0xc9, 0x58, 0x21, 0x1e, 0xe3, 0x10, 0x07, 0x0e, 0xb6, 0x04, 0x64, 0x43, - 0x40, 0xb6, 0x79, 0xca, 0x4c, 0x33, 0x1d, 0x8e, 0xaf, 0x81, 0xec, 0x62, 0xea, 0x84, 0xde, 0x9c, - 0x79, 0x24, 0x50, 0x33, 0x02, 0xb7, 0x1e, 0x42, 0x1e, 0x28, 0x5e, 0x30, 0x8f, 0x98, 0x25, 0xa8, - 0x62, 0x86, 0x43, 0xaa, 0x66, 0x6b, 0x99, 0xba, 0x7c, 0xf0, 0x5e, 0x7b, 0x40, 0xab, 0xb6, 0x4e, - 0x55, 0x33, 0x78, 0x87, 0xde, 0xb2, 0x81, 0x1e, 0xb0, 0x70, 0x61, 0x6e, 0x79, 0xb7, 0xa3, 0x5c, - 0x10, 0x5b, 0xcc, 0xb1, 0x9a, 0x8b, 0x05, 0xf1, 0x35, 0x7a, 0x0d, 0x7b, 0xee, 0x22, 0xb0, 0x67, - 0x9e, 0x63, 0x09, 0x61, 0x5c, 0x4e, 0x4c, 0x45, 0xcd, 0x0b, 0xd8, 0x4e, 0x92, 0xe6, 0xdf, 0xe1, - 0x92, 0x44, 0x3f, 0x54, 0x07, 0xc5, 0xb1, 0x29, 0xb6, 0xbe, 0xda, 0x7e, 0x94, 0xe2, 0x0b, 0x02, - 0x5f, 0xe1, 0xf1, 0x0b, 0x1e, 0x8e, 0x91, 0xcf, 0x61, 0x4b, 0x20, 0xf1, 0xcd, 0x3c, 0xc4, 0x94, - 0x72, 0x17, 0x8a, 0x2b, 0xa0, 0xbe, 0x8c, 0x22, 0x07, 0x2a, 0x2e, 0x76, 0x3c, 0xbe, 0xb6, 0x78, - 0x8a, 0xaa, 0x25, 0x61, 0xc3, 0xbb, 0xc7, 0xdb, 0xd0, 0x4c, 0xea, 0x4f, 0x78, 0x79, 0x6c, 0xc2, - 0xa6, 0xbb, 0x1e, 0x43, 0x6f, 0x40, 0x4d, 0xe5, 0x8e, 0x49, 0x38, 0x15, 0x9a, 0x69, 0xc2, 0x1f, - 0x04, 0xad, 0xdd, 0x24, 0xdf, 0x22, 0xe1, 0x94, 0x37, 0xa5, 0xb1, 0x8c, 0x53, 0x78, 0x76, 0x4f, - 0xe1, 0xda, 0xc9, 0xc5, 0x83, 0x20, 0x8b, 0x1e, 0x4f, 0xee, 0xf6, 0x58, 0x9d, 0x8f, 0x98, 0x8a, - 0x36, 0x94, 0x5d, 0x3c, 0xb6, 0x23, 0x9f, 0x09, 0xa5, 0x6a, 0x59, 0x08, 0x7d, 0xf1, 0x68, 0xa1, - 0x7c, 0x82, 0x44, 0x39, 0x17, 0x85, 0x2e, 0x01, 0x56, 0x94, 0xd4, 0x4d, 0xd1, 0xeb, 0xf0, 0xf1, - 0xa6, 0xad, 0x6f, 0xda, 0x1e, 0x65, 0x66, 0x69, 0x9c, 0xd2, 0x46, 0x4f, 0x41, 0xa6, 0xcc, 0x0e, - 0x99, 0xe5, 0x62, 0xdf, 0x5e, 0xa8, 0x95, 0x9a, 0x54, 0xcf, 0x99, 0x20, 0x42, 0x4d, 0x1e, 0x41, - 0x9f, 0x00, 0xd1, 0x68, 0x64, 0xa5, 0xd7, 0x2d, 0x71, 0x72, 0xab, 0x26, 0xd5, 0xe5, 0x03, 0xed, - 0x41, 0x0e, 0xfd, 0x68, 0x94, 0x7e, 0x59, 0x18, 0x43, 0x4d, 0x85, 0xde, 0x09, 0xa1, 0x3d, 0x28, - 0x7c, 0x21, 0x5e, 0x60, 0x91, 0x40, 0x55, 0x6a, 0x99, 0x7a, 0xc9, 0xcc, 0xf3, 0x6d, 0x37, 0xe0, - 0x93, 0x4c, 0xbd, 0x60, 0xaa, 0x6e, 0xc7, 0x93, 0xcc, 0xd7, 0xa8, 0x0a, 0x45, 0x22, 0xae, 0x94, - 0xed, 0xab, 0xa8, 0x26, 0xd5, 0x8b, 0xe6, 0x72, 0x5f, 0xed, 0x83, 0x72, 0x57, 0x26, 0xfa, 0x00, - 0xb9, 0xd8, 0x31, 0xe9, 0x77, 0xdd, 0x8f, 0xeb, 0xaa, 0x57, 0xb0, 0x73, 0xdf, 0xbd, 0x43, 0x0a, - 0x64, 0xa6, 0x78, 0x91, 0x3c, 0x1b, 0x7c, 0x89, 0x5e, 0x42, 0x4e, 0x5c, 0x14, 0xf1, 0x4e, 0xc8, - 0x07, 0xff, 0x68, 0xf1, 0x43, 0xa4, 0xa5, 0x0f, 0x91, 0x26, 0xee, 0x8b, 0x19, 0x83, 0x0e, 0x37, - 0xde, 0x4a, 0xd5, 0x6f, 0x80, 0x7e, 0x1d, 0xe6, 0x7b, 0x3a, 0xf7, 0x6e, 0x77, 0xfe, 0x93, 0x63, - 0x5f, 0x7d, 0x7d, 0xff, 0xbb, 0x04, 0xd9, 0x01, 0x7f, 0x1d, 0x00, 0xf2, 0x7d, 0xe3, 0xac, 0xd7, - 0xd6, 0x95, 0xbf, 0x90, 0x0c, 0x85, 0xe6, 0x65, 0xe7, 0xe8, 0xcc, 0x38, 0x51, 0x24, 0xb4, 0x09, - 0xa5, 0x56, 0xd7, 0x3c, 0xb5, 0x3e, 0x76, 0x8d, 0x8e, 0xb2, 0x81, 0x76, 0x61, 0x7b, 0xb9, 0xb5, - 0x52, 0x54, 0x06, 0x95, 0xa1, 0xd8, 0xd4, 0x4f, 0x8c, 0xbe, 0xd1, 0xed, 0x28, 0x59, 0x54, 0x84, - 0xac, 0x80, 0xe7, 0x90, 0x02, 0xe5, 0xfe, 0xf9, 0xb1, 0x35, 0xec, 0x9a, 0xa7, 0xad, 0x76, 0x77, - 0xa8, 0xe4, 0x51, 0x09, 0x72, 0xfa, 0x85, 0xde, 0x19, 0x28, 0x05, 0x0e, 0x1b, 0x1e, 0x19, 0x03, - 0xa5, 0xc8, 0x61, 0xe7, 0x7d, 0xdd, 0xb4, 0x9a, 0x7a, 0xcb, 0xe8, 0xe8, 0x4d, 0xa5, 0x74, 0xdc, - 0x3e, 0xae, 0xac, 0xf3, 0xee, 0x8d, 0xae, 0x0e, 0x27, 0x1e, 0xfb, 0x1c, 0x8d, 0xb8, 0xee, 0x46, - 0xa2, 0xbb, 0xb1, 0xd4, 0xdd, 0x70, 0x7c, 0x0f, 0x07, 0xac, 0x31, 0x21, 0x93, 0x70, 0xee, 0xac, - 0xc5, 0xc5, 0x7f, 0x62, 0x94, 0x17, 0xb6, 0xbc, 0xfa, 0x19, 0x00, 0x00, 0xff, 0xff, 0xbc, 0x9c, - 0x80, 0xcd, 0x6e, 0x06, 0x00, 0x00, + proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_9c377873af38ad2e) +} + +var fileDescriptor_workflowtask_9c377873af38ad2e = []byte{ + // 771 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x5d, 0x6f, 0xda, 0x48, + 0x14, 0x5d, 0x87, 0xef, 0x0b, 0x01, 0x67, 0x36, 0xd9, 0x58, 0xec, 0x66, 0x17, 0x65, 0x1f, 0x96, + 0x87, 0x95, 0xa9, 0x88, 0xaa, 0x56, 0x79, 0x6a, 0x12, 0x4c, 0xe5, 0x26, 0x01, 0x64, 0x48, 0x50, + 0x23, 0x55, 0x96, 0xb1, 0x07, 0xea, 0x62, 0x3c, 0x96, 0x67, 0xdc, 0x84, 0x3f, 0xd2, 0x7f, 0xd5, + 0xff, 0x54, 0xcd, 0xd8, 0x06, 0x42, 0xa2, 0xa8, 0x7d, 0x9b, 0x39, 0xf7, 0x9c, 0x3b, 0xf7, 0xcc, + 0xdc, 0x3b, 0xa0, 0x2c, 0x88, 0x83, 0xbd, 0xd6, 0x3d, 0x09, 0xe7, 0x53, 0x8f, 0xdc, 0x33, 0x8b, + 0xce, 0xd5, 0x20, 0x24, 0x8c, 0xa0, 0x9a, 0x4d, 0x7c, 0x27, 0xb2, 0x19, 0x09, 0x63, 0xa0, 0x7e, + 0x14, 0x53, 0x69, 0x34, 0x49, 0xd9, 0x81, 0x15, 0x5a, 0x0b, 0x9a, 0x84, 0xff, 0x9a, 0x11, 0x32, + 0xf3, 0x70, 0x4b, 0xec, 0x26, 0xd1, 0xb4, 0x45, 0x59, 0x18, 0xd9, 0x2c, 0x8e, 0x1e, 0x7f, 0x07, + 0xa8, 0x8c, 0x13, 0xd9, 0xc8, 0xa2, 0x73, 0x84, 0x20, 0xeb, 0x5b, 0x0b, 0xac, 0x48, 0x0d, 0xa9, + 0x59, 0x32, 0xc4, 0x1a, 0xa9, 0xf0, 0x3b, 0x2f, 0xc0, 0x0c, 0xf1, 0x14, 0x87, 0xd8, 0xb7, 0xb1, + 0x29, 0x28, 0x3b, 0x82, 0xb2, 0xc7, 0x43, 0x46, 0x1a, 0xe9, 0x71, 0x7e, 0x03, 0xca, 0x0e, 0xa6, + 0x76, 0xe8, 0x06, 0xcc, 0x25, 0xbe, 0x92, 0x11, 0xbc, 0x4d, 0x08, 0x7d, 0x02, 0xd9, 0xf5, 0x83, + 0x88, 0x99, 0xa2, 0x54, 0xcc, 0x70, 0x48, 0x95, 0x6c, 0x23, 0xd3, 0x2c, 0xb7, 0xdb, 0xea, 0x96, + 0x3f, 0x75, 0xb3, 0x3c, 0x55, 0xe7, 0xaa, 0xc1, 0x4a, 0xa4, 0xf9, 0x2c, 0x5c, 0x1a, 0x35, 0xf7, + 0x31, 0xca, 0x4d, 0xb0, 0x65, 0x80, 0x95, 0x5c, 0x6c, 0x82, 0xaf, 0xd1, 0x6b, 0x38, 0x74, 0x96, + 0xbe, 0xb5, 0x70, 0x6d, 0x53, 0x98, 0xe1, 0x16, 0xe2, 0xe3, 0x95, 0xbc, 0xa0, 0xed, 0x27, 0x61, + 0x7e, 0x0e, 0xb7, 0x21, 0xf2, 0xa1, 0x26, 0xc8, 0xb6, 0x45, 0xb1, 0xf9, 0xd5, 0xf2, 0xa2, 0x94, + 0x5f, 0x10, 0xfc, 0x2a, 0xc7, 0x6f, 0x39, 0x1c, 0x33, 0xff, 0x83, 0x9a, 0x60, 0xe2, 0x87, 0x20, + 0xc4, 0x94, 0x72, 0xe7, 0xc5, 0x35, 0x51, 0x5b, 0xa1, 0x68, 0x0c, 0x55, 0x07, 0xdb, 0x2e, 0x5f, + 0x9b, 0x3c, 0x44, 0x95, 0x92, 0xb0, 0xfe, 0xea, 0x65, 0xeb, 0x9d, 0x44, 0x73, 0xc1, 0x25, 0xb1, + 0xf1, 0x5d, 0x67, 0x13, 0x43, 0x6f, 0x40, 0x49, 0x2d, 0x4e, 0x49, 0x38, 0x17, 0x3e, 0x69, 0x52, + 0x33, 0x88, 0x52, 0x0e, 0x92, 0x78, 0x97, 0x84, 0x73, 0x9e, 0x94, 0xc6, 0xa5, 0x5f, 0xc2, 0xbf, + 0xcf, 0x08, 0x37, 0x5e, 0x28, 0x7e, 0xf0, 0xb2, 0xc8, 0xf1, 0xf7, 0x76, 0x8e, 0xf5, 0x9b, 0x88, + 0xd7, 0x7f, 0x07, 0x15, 0x07, 0x4f, 0xad, 0xc8, 0x63, 0xc2, 0x9d, 0x52, 0x11, 0xe6, 0x8e, 0x5e, + 0x34, 0xc7, 0xbb, 0x43, 0x48, 0xb8, 0x11, 0x74, 0x0d, 0xb0, 0x2e, 0x43, 0xd9, 0x15, 0x7a, 0xf5, + 0xe5, 0xcb, 0xd9, 0xdc, 0x5c, 0xb9, 0x94, 0x19, 0xa5, 0x69, 0x5a, 0x1e, 0xfa, 0x07, 0xca, 0x94, + 0x59, 0x21, 0x33, 0x1d, 0xec, 0x59, 0x4b, 0xa5, 0xda, 0x90, 0x9a, 0x39, 0x03, 0x04, 0xd4, 0xe1, + 0x08, 0x1a, 0x00, 0xa2, 0xd1, 0xc4, 0x4c, 0xc7, 0x27, 0xb9, 0xb1, 0x5a, 0x43, 0x6a, 0x96, 0xdb, + 0xc7, 0x4f, 0xce, 0x1d, 0x46, 0x93, 0xf4, 0x34, 0x61, 0x9a, 0x1a, 0x32, 0xdd, 0x82, 0xd0, 0x21, + 0x14, 0xbe, 0x10, 0xd7, 0x37, 0x89, 0xaf, 0xc8, 0x8d, 0x4c, 0xb3, 0x64, 0xe4, 0xf9, 0xb6, 0xef, + 0xf3, 0xce, 0xa4, 0xae, 0x3f, 0x57, 0xf6, 0xe2, 0xce, 0xe4, 0x6b, 0x54, 0x87, 0x22, 0x11, 0x63, + 0x61, 0x79, 0x0a, 0x6a, 0x48, 0xcd, 0xa2, 0xb1, 0xda, 0xd7, 0xdf, 0x83, 0xbc, 0x6d, 0x0d, 0x9d, + 0x40, 0x2e, 0xbe, 0x19, 0xe9, 0x67, 0x6e, 0x36, 0xe6, 0xd6, 0xef, 0x60, 0xff, 0xb9, 0xd9, 0x41, + 0x32, 0x64, 0xe6, 0x78, 0x99, 0x8c, 0x3b, 0x5f, 0xa2, 0xff, 0x21, 0x27, 0x9a, 0x5d, 0xcc, 0x77, + 0xb9, 0xfd, 0x87, 0x1a, 0x7f, 0x20, 0x6a, 0xfa, 0x81, 0xa8, 0xa2, 0xe7, 0x8d, 0x98, 0x74, 0xba, + 0xf3, 0x56, 0xaa, 0x07, 0x80, 0x9e, 0x36, 0xe7, 0x33, 0x99, 0x3b, 0x8f, 0x33, 0xff, 0xea, 0x93, + 0xae, 0x4f, 0x3c, 0xfe, 0x26, 0x41, 0x76, 0xc4, 0xa7, 0x1a, 0x20, 0x3f, 0xd4, 0xaf, 0x07, 0x57, + 0x9a, 0xfc, 0x1b, 0x2a, 0x43, 0xa1, 0xf3, 0xb1, 0x77, 0x76, 0xad, 0x5f, 0xc8, 0x12, 0xda, 0x85, + 0x52, 0xb7, 0x6f, 0x5c, 0x9a, 0x1f, 0xfa, 0x7a, 0x4f, 0xde, 0x41, 0x07, 0xb0, 0xb7, 0xda, 0x9a, + 0x29, 0x2b, 0x83, 0x2a, 0x50, 0xec, 0x68, 0x17, 0xfa, 0x50, 0xef, 0xf7, 0xe4, 0x2c, 0x2a, 0x42, + 0x56, 0xd0, 0x73, 0x48, 0x86, 0xca, 0xf0, 0xe6, 0xdc, 0x1c, 0xf7, 0x8d, 0xcb, 0xee, 0x55, 0x7f, + 0x2c, 0xe7, 0x51, 0x09, 0x72, 0xda, 0xad, 0xd6, 0x1b, 0xc9, 0x05, 0x4e, 0x1b, 0x9f, 0xe9, 0x23, + 0xb9, 0xc8, 0x69, 0x37, 0x43, 0xcd, 0x30, 0x3b, 0x5a, 0x57, 0xef, 0x69, 0x1d, 0xb9, 0x74, 0xee, + 0xc2, 0x9f, 0x36, 0x59, 0xa8, 0x3e, 0x66, 0x53, 0xcf, 0x7d, 0xd8, 0x36, 0x78, 0x5e, 0xdd, 0x34, + 0x35, 0x98, 0xdc, 0x9d, 0xce, 0x5c, 0xf6, 0x39, 0x9a, 0xa8, 0x36, 0x59, 0xb4, 0x12, 0x4d, 0x6b, + 0xa5, 0x69, 0xd9, 0x9e, 0x8b, 0x7d, 0xd6, 0x9a, 0x91, 0x59, 0x18, 0xd8, 0x1b, 0xb8, 0xf8, 0xf0, + 0x27, 0x79, 0x91, 0xf2, 0xe4, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0x28, 0xc2, 0xb6, 0x19, 0x2b, + 0x06, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/worker.go b/client/gogrpc/conductor/worker.go index ce9970a579..63a67fb8bd 100644 --- a/client/gogrpc/conductor/worker.go +++ b/client/gogrpc/conductor/worker.go @@ -9,7 +9,7 @@ import ( "sync/atomic" "time" - pb "github.com/netflix/conductor/client/gogrpc/conductor/grpc" + "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks" "github.com/netflix/conductor/client/gogrpc/conductor/model" ) @@ -137,7 +137,7 @@ func (worker *Worker) onError(err error) { } } -func (worker *Worker) runTask(req *pb.PollRequest) error { +func (worker *Worker) runTask(req *tasks.PollRequest) error { ctx, cancel := context.WithTimeout(context.Background(), worker.TaskTimeout) defer cancel() @@ -146,10 +146,11 @@ func (worker *Worker) runTask(req *pb.PollRequest) error { return err } - result, err := worker.Executor.Execute(ctx, task) + result, err := worker.Executor.Execute(ctx, task.Task) // TODO: what if the task failed? if err == nil { - _, err := worker.Client.Tasks().UpdateTask(context.Background(), result) + request := tasks.UpdateTaskRequest{Result: result} + _, err := worker.Client.Tasks().UpdateTask(context.Background(), &request) if err != nil { return err } @@ -160,7 +161,7 @@ func (worker *Worker) runTask(req *pb.PollRequest) error { func (worker *Worker) thread() { defer worker.waitThreads.Done() - pollRequest := &pb.PollRequest{ + pollRequest := &tasks.PollRequest{ TaskType: worker.TaskType, WorkerId: worker.Identifier, } diff --git a/grpc/src/main/proto/grpc/event_service.proto b/grpc/src/main/proto/grpc/event_service.proto index 5aa11dfea3..88ebb9e033 100644 --- a/grpc/src/main/proto/grpc/event_service.proto +++ b/grpc/src/main/proto/grpc/event_service.proto @@ -5,7 +5,7 @@ import "model/eventhandler.proto"; option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "EventServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/events"; service EventService { // POST / diff --git a/grpc/src/main/proto/grpc/metadata_service.proto b/grpc/src/main/proto/grpc/metadata_service.proto index 3abe4fc952..1716c6bbec 100644 --- a/grpc/src/main/proto/grpc/metadata_service.proto +++ b/grpc/src/main/proto/grpc/metadata_service.proto @@ -6,7 +6,7 @@ import "model/workflowdef.proto"; option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "MetadataServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/metadata"; service MetadataService { // POST /workflow diff --git a/grpc/src/main/proto/grpc/search.proto b/grpc/src/main/proto/grpc/search.proto index d7f87c9b65..e9ad0f069c 100644 --- a/grpc/src/main/proto/grpc/search.proto +++ b/grpc/src/main/proto/grpc/search.proto @@ -3,7 +3,7 @@ package conductor.grpc.search; option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "SearchPb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/search"; message Request { int32 start = 1; diff --git a/grpc/src/main/proto/grpc/task_service.proto b/grpc/src/main/proto/grpc/task_service.proto index 7f540dfa4f..916e4745da 100644 --- a/grpc/src/main/proto/grpc/task_service.proto +++ b/grpc/src/main/proto/grpc/task_service.proto @@ -7,7 +7,7 @@ import "model/task.proto"; option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "TaskServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/tasks"; service TaskService { // GET /poll/{tasktype} diff --git a/grpc/src/main/proto/grpc/workflow_service.proto b/grpc/src/main/proto/grpc/workflow_service.proto index 083b778df5..a1644cdbfe 100644 --- a/grpc/src/main/proto/grpc/workflow_service.proto +++ b/grpc/src/main/proto/grpc/workflow_service.proto @@ -10,7 +10,7 @@ import "model/rerunworkflowrequest.proto"; option java_package = "com.netflix.conductor.grpc"; option java_outer_classname = "WorkflowServicePb"; -option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc"; +option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/grpc/workflows"; service WorkflowService { // POST / From 7902df96e0f77b02ff0a2e9fad15d468368b899e Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 9 Jul 2018 17:37:56 +0200 Subject: [PATCH 087/163] gradle: Implement ProtoGen as custom gradle task --- build.gradle | 2 +- client/gogrpc/Gopkg.lock | 2 +- common/build.gradle | 29 ++++++++++++- grpc/build.gradle | 2 +- .../conductor/grpc/AbstractProtoMapper.java | 2 +- protogen/build.gradle | 10 ----- .../conductor/protogen/ConductorProtoGen.java | 41 ------------------- settings.gradle | 1 - 8 files changed, 31 insertions(+), 58 deletions(-) delete mode 100644 protogen/build.gradle delete mode 100644 protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java diff --git a/build.gradle b/build.gradle index 01ea4774b8..3fb7a2b5f9 100644 --- a/build.gradle +++ b/build.gradle @@ -1,5 +1,4 @@ buildscript { - repositories { jcenter() } @@ -34,6 +33,7 @@ subprojects { repositories { jcenter() + maven { url "https://dl.bintray.com/vmg/protogen" } } dependencies { diff --git a/client/gogrpc/Gopkg.lock b/client/gogrpc/Gopkg.lock index 4a1314672e..3f80dd76e9 100644 --- a/client/gogrpc/Gopkg.lock +++ b/client/gogrpc/Gopkg.lock @@ -108,6 +108,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "89f331d8a132d464f65394ad8511cdef46cdda0597be7b071e5d22cf65c35d2b" + inputs-digest = "ca4602bc2319dbe8e34a88c36f06c428a40582ae6be51bb01af6953432d754c5" solver-name = "gps-cdcl" solver-version = 1 diff --git a/common/build.gradle b/common/build.gradle index 43a037129d..fe39e5d06e 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -1,3 +1,13 @@ +buildscript { + repositories { + jcenter() + maven { url "https://dl.bintray.com/vmg/protogen" } + } + dependencies { + classpath "com.github.vmg.protogen:protogen-codegen:1.2.0" + } +} + dependencies { compile "com.github.rholder:guava-retrying:${revGuavaRetrying}" compile "org.slf4j:slf4j-api:${revSlf4j}" @@ -5,5 +15,20 @@ dependencies { compile "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" compile "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" compile "javax.inject:javax.inject:1" - compile "com.github.vmg.protogen:protogen-annotations:${revProtoGen}" -} \ No newline at end of file + compile "com.github.vmg.protogen:protogen-annotations:1.0.0" +} + +import com.github.vmg.protogen.ProtoGenTask; + +task protogen(dependsOn: jar, type: ProtoGenTask) { + protoPackage = "conductor.proto" + javaPackage = "com.netflix.conductor.proto" + goPackage = "github.com/netflix/conductor/client/gogrpc/conductor/model" + + protosDir = new File("${rootDir}/grpc/src/main/proto") + mapperDir = new File("${rootDir}/grpc/src/main/java/com/netflix/conductor/grpc") + mapperPackage = "com.netflix.conductor.grpc"; + + sourceJar = jar.archivePath + sourcePackage = "com.netflix.conductor.common" +} diff --git a/grpc/build.gradle b/grpc/build.gradle index 572cfdbd5d..ea63b793e4 100644 --- a/grpc/build.gradle +++ b/grpc/build.gradle @@ -42,4 +42,4 @@ idea { } } -compileJava.dependsOn(tasks.getByPath(":conductor-protogen:generate")) +compileJava.dependsOn(tasks.getByPath(":conductor-common:protogen")) diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 78eb08b95f..99ae498fb4 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -50,7 +50,7 @@ import java.util.stream.Collectors; import javax.annotation.Generated; -@Generated("ProtoGen") +@Generated("com.github.vmg.protogen.ProtoGen") public abstract class AbstractProtoMapper { public EventExecutionPb.EventExecution toProto(EventExecution from) { EventExecutionPb.EventExecution.Builder to = EventExecutionPb.EventExecution.newBuilder(); diff --git a/protogen/build.gradle b/protogen/build.gradle deleted file mode 100644 index 813c481255..0000000000 --- a/protogen/build.gradle +++ /dev/null @@ -1,10 +0,0 @@ -dependencies { - compile project(':conductor-common') - compile "com.github.vmg.protogen:protogen-codegen:${revProtoGen}" -} - -task generate(type: JavaExec) { - classpath = sourceSets.main.runtimeClasspath - main = "com.netflix.conductor.protogen.ConductorProtoGen" - workingDir = rootDir -} diff --git a/protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java b/protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java deleted file mode 100644 index ac93f58c38..0000000000 --- a/protogen/src/main/java/com/netflix/conductor/protogen/ConductorProtoGen.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.netflix.conductor.protogen; - -import com.github.vmg.protogen.ProtoGen; - -public class ConductorProtoGen { - private final static String PROTO_PACKAGE_NAME = "conductor.proto"; - private final static String JAVA_PACKAGE_NAME = "com.netflix.conductor.proto"; - private final static String GO_PACKAGE_NAME = "github.com/netflix/conductor/client/gogrpc/conductor/model"; - private final static String MAPPER_PACKAGE_NAME = "com.netflix.conductor.grpc"; - - public static void main(String[] args) throws Exception { - ProtoGen generator = new ProtoGen( - PROTO_PACKAGE_NAME, JAVA_PACKAGE_NAME, GO_PACKAGE_NAME - ); - - generator.process(com.netflix.conductor.common.metadata.events.EventExecution.class); - generator.process(com.netflix.conductor.common.metadata.events.EventHandler.class); - - generator.process(com.netflix.conductor.common.metadata.tasks.PollData.class); - generator.process(com.netflix.conductor.common.metadata.tasks.Task.class); - generator.process(com.netflix.conductor.common.metadata.tasks.TaskDef.class); - generator.process(com.netflix.conductor.common.metadata.tasks.TaskExecLog.class); - generator.process(com.netflix.conductor.common.metadata.tasks.TaskResult.class); - - generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTask.class); - generator.process(com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList.class); - generator.process(com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest.class); - generator.process(com.netflix.conductor.common.metadata.workflow.SkipTaskRequest.class); - generator.process(com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest.class); - generator.process(com.netflix.conductor.common.metadata.workflow.SubWorkflowParams.class); - generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowDef.class); - generator.process(com.netflix.conductor.common.metadata.workflow.WorkflowTask.class); - - generator.process(com.netflix.conductor.common.run.TaskSummary.class); - generator.process(com.netflix.conductor.common.run.Workflow.class); - generator.process(com.netflix.conductor.common.run.WorkflowSummary.class); - - generator.writeProtos("grpc/src/main/proto"); - generator.writeMapper(MAPPER_PACKAGE_NAME,"grpc/src/main/java/com/netflix/conductor/grpc/"); - } -} diff --git a/settings.gradle b/settings.gradle index a0e4a24ea8..1897436005 100644 --- a/settings.gradle +++ b/settings.gradle @@ -2,7 +2,6 @@ rootProject.name='conductor' include 'client','common','contribs','core', 'es5-persistence','jersey' include 'mysql-persistence', 'redis-persistence','server','test-harness','ui' -include 'protogen' include 'grpc', 'grpc-server', 'grpc-client' rootProject.children.each {it.name="conductor-${it.name}"} From 5ae0175457451f17dcd90fec9b8b28133c0a6391 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 9 Jul 2018 17:42:31 +0200 Subject: [PATCH 088/163] gradle: Move versions to versionsOfDependencies.gradle --- common/build.gradle | 6 +++--- versionsOfDependencies.gradle | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/common/build.gradle b/common/build.gradle index fe39e5d06e..530c593189 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -4,7 +4,7 @@ buildscript { maven { url "https://dl.bintray.com/vmg/protogen" } } dependencies { - classpath "com.github.vmg.protogen:protogen-codegen:1.2.0" + classpath "com.github.vmg.protogen:protogen-codegen:${revProtogenCodegen}" } } @@ -14,8 +14,8 @@ dependencies { compile "com.google.protobuf:protobuf-java:${revProtoBuf}" compile "com.fasterxml.jackson.core:jackson-databind:${revJacksonDatabind}" compile "com.fasterxml.jackson.core:jackson-core:${revJacksonCore}" - compile "javax.inject:javax.inject:1" - compile "com.github.vmg.protogen:protogen-annotations:1.0.0" + compile "javax.inject:javax.inject:${revJavaxInject}" + compile "com.github.vmg.protogen:protogen-annotations:${revProtogenAnnotations}" } import com.github.vmg.protogen.ProtoGenTask; diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index ac28ce972b..35f2bce3f9 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -21,6 +21,7 @@ ext { revHikariCP = '2.6.3' revJsonPath = '2.2.0' revJaxrsJackson = '2.7.5' + revJavaxInject = '1' revJacksonCore = '2.7.5' revJacksonDatabind = '2.7.5' revJedis = '2.8.1' @@ -42,7 +43,8 @@ ext { revOauthClient = '1.19.4' revOauthSignature = '1.19.4' revProtoBuf = '3.5.1' - revProtoGen = '0.4.0' + revProtogenAnnotations = '1.0.0' + revProtogenCodegen = '1.2.0' revRarefiedRedis = '0.0.17' revServo = '0.12.17' revServletApi = '3.1.0' From b0a0747cb326e792adc5ebe6e50ff349555c8233 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Mon, 9 Jul 2018 17:44:54 +0200 Subject: [PATCH 089/163] common: Extract constants in JsonMapperProvider --- .../common/utils/JsonMapperProvider.java | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java b/common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java index 94c38042d1..597b0ed926 100644 --- a/common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java +++ b/common/src/main/java/com/netflix/conductor/common/utils/JsonMapperProvider.java @@ -28,6 +28,9 @@ public JsonMapperProvider() {} * {@see AnySerializer}, {@see AnyDeserializer} */ private static class JsonProtoModule extends SimpleModule { + private final static String JSON_TYPE = "@type"; + private final static String JSON_VALUE = "@value"; + /** * AnySerializer converts a ProtoBuf {@link Any} object into its JSON * representation. @@ -87,8 +90,8 @@ protected class AnySerializer extends JsonSerializer { public void serialize(Any value, JsonGenerator jgen, SerializerProvider provider) throws IOException, JsonProcessingException { jgen.writeStartObject(); - jgen.writeStringField("@type", value.getTypeUrl()); - jgen.writeBinaryField("@value", value.getValue().toByteArray()); + jgen.writeStringField(JSON_TYPE, value.getTypeUrl()); + jgen.writeBinaryField(JSON_VALUE, value.getValue().toByteArray()); jgen.writeEndObject(); } } @@ -104,14 +107,16 @@ protected class AnyDeserializer extends JsonDeserializer { public Any deserialize(JsonParser p, DeserializationContext ctxt) throws IOException, JsonProcessingException { JsonNode root = p.getCodec().readTree(p); - JsonNode type = root.get("@type"); - JsonNode value = root.get("@value"); + JsonNode type = root.get(JSON_TYPE); + JsonNode value = root.get(JSON_VALUE); - if (type == null || !type.isTextual()) + if (type == null || !type.isTextual()) { throw ctxt.reportMappingException("invalid '@type' field when deserializing ProtoBuf Any object"); + } - if (value == null || !value.isTextual()) + if (value == null || !value.isTextual()) { throw ctxt.reportMappingException("invalid '@value' field when deserializing ProtoBuf Any object"); + } return Any.newBuilder() .setTypeUrl(type.textValue()) From fca70482e0c062474f1c92e8d87fd1db07d03a92 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 10 Jul 2018 14:54:06 +0200 Subject: [PATCH 090/163] es: Do not close a null ElasticSearch instance --- .../elasticsearch/es5/EmbeddedElasticSearchV5.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java index 3bf20aa319..9fec326b12 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5.java @@ -77,16 +77,15 @@ public synchronized void start(String clusterName, String host, int port) throws logger.info("Starting ElasticSearch for cluster {} ", settings.get("cluster.name")); instance = new PluginConfigurableNode(settings, singletonList(Netty4Plugin.class)); instance.start(); - Runtime.getRuntime().addShutdownHook(new Thread() { - @Override - public void run() { - try { + Runtime.getRuntime().addShutdownHook(new Thread(() -> { + try { + if (instance != null) { instance.close(); - } catch (IOException e) { - logger.error("Error closing ElasticSearch"); } + } catch (IOException e) { + logger.error("Error closing ElasticSearch"); } - }); + })); logger.info("ElasticSearch cluster {} started in local mode on port {}", instance.settings().get("cluster.name"), getPort()); } From e98bb9f92adbda3b6ca392d73d87a6c75bb84ef5 Mon Sep 17 00:00:00 2001 From: Vicent Marti Date: Tue, 10 Jul 2018 14:55:08 +0200 Subject: [PATCH 091/163] gradle: Drop dependency on Bintray for protogen --- build.gradle | 1 - common/build.gradle | 1 - 2 files changed, 2 deletions(-) diff --git a/build.gradle b/build.gradle index 3fb7a2b5f9..be00b83218 100644 --- a/build.gradle +++ b/build.gradle @@ -33,7 +33,6 @@ subprojects { repositories { jcenter() - maven { url "https://dl.bintray.com/vmg/protogen" } } dependencies { diff --git a/common/build.gradle b/common/build.gradle index 530c593189..fc0b019e11 100644 --- a/common/build.gradle +++ b/common/build.gradle @@ -1,7 +1,6 @@ buildscript { repositories { jcenter() - maven { url "https://dl.bintray.com/vmg/protogen" } } dependencies { classpath "com.github.vmg.protogen:protogen-codegen:${revProtogenCodegen}" From d306e2a02a7c220199e2ed6740897322c616d26d Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 11 Jul 2018 16:10:01 +0200 Subject: [PATCH 092/163] First step towards ephemeral workflows. Change Workflow instances to contain the definition of the workflow they are an instance of, rather than having to look it up. --- .../common/metadata/workflow/WorkflowDef.java | 32 +- .../metadata/workflow/WorkflowTask.java | 53 + .../conductor/common/run/Workflow.java | 770 +++--- .../conductor/common/run/WorkflowSummary.java | 4 +- .../conductor/contribs/http/TestHttpTask.java | 687 ++--- .../core/events/ActionProcessor.java | 13 +- .../core/execution/DeciderService.java | 55 +- .../core/execution/ParametersUtils.java | 7 +- .../core/execution/WorkflowExecutor.java | 446 +++- .../execution/mapper/DecisionTaskMapper.java | 10 +- .../execution/mapper/DynamicTaskMapper.java | 2 +- .../execution/mapper/EventTaskMapper.java | 2 +- .../mapper/ForkJoinDynamicTaskMapper.java | 14 +- .../execution/mapper/ForkJoinTaskMapper.java | 10 +- .../core/execution/mapper/JoinTaskMapper.java | 2 +- .../execution/mapper/SimpleTaskMapper.java | 2 +- .../mapper/SubWorkflowTaskMapper.java | 22 +- .../execution/mapper/TaskMapperContext.java | 8 +- .../mapper/UserDefinedTaskMapper.java | 2 +- .../core/execution/mapper/WaitTaskMapper.java | 2 +- .../conductor/core/execution/tasks/Event.java | 23 +- .../netflix/conductor/dao/MetadataDAO.java | 253 +- .../conductor/service/MetadataService.java | 11 +- .../core/events/TestEventProcessor.java | 40 +- .../core/execution/TestDeciderOutcomes.java | 744 +++--- .../core/execution/TestDeciderService.java | 160 +- .../core/execution/TestWorkflowExecutor.java | 6 +- .../mapper/DecisionTaskMapperTest.java | 29 +- .../mapper/DynamicTaskMapperTest.java | 9 +- .../execution/mapper/EventTaskMapperTest.java | 8 +- .../mapper/ForkJoinDynamicTaskMapperTest.java | 16 +- .../mapper/ForkJoinTaskMapperTest.java | 16 +- .../execution/mapper/JoinTaskMapperTest.java | 8 +- .../mapper/SimpleTaskMapperTest.java | 15 +- .../mapper/SubWorkflowTaskMapperTest.java | 10 +- .../mapper/UserDefinedTaskMapperTest.java | 16 +- .../execution/mapper/WaitTaskMapperTest.java | 9 +- .../core/execution/tasks/TestEvent.java | 516 ++-- .../conductor/dao/ExecutionDAOTest.java | 410 +++ core/src/test/resources/def.json | 1 - core/src/test/resources/test.json | 2216 ++++++++++------- .../ElasticSearchConfiguration.java | 1 + .../server/service/MetadataServiceImpl.java | 45 +- .../server/service/WorkflowServiceImpl.java | 36 +- .../conductor/grpc/AbstractProtoMapper.java | 6 + grpc/src/main/proto/model/workflow.proto | 2 + .../server/resources/MetadataResource.java | 163 +- .../server/resources/WorkflowResource.java | 434 ++-- mysql-persistence/build.gradle | 25 +- .../conductor/dao/mysql/MySQLMetadataDAO.java | 98 +- ...BaseDAOTest.java => MySQLDAOTestUtil.java} | 36 +- .../dao/mysql/MySQLExecutionDAOTest.java | 482 +--- .../dao/mysql/MySQLMetadataDAOTest.java | 32 +- .../dao/mysql/MySQLQueueDAOTest.java | 42 +- redis-persistence/build.gradle | 2 + .../dao/dynomite/RedisExecutionDAO.java | 29 +- .../dao/dynomite/RedisMetadataDAO.java | 36 +- .../dao/dynomite/RedisExecutionDAOTest.java | 500 +--- .../dao/dynomite/RedisMetadataDAOTest.java | 36 +- .../integration/WorkflowServiceTest.java | 83 +- 60 files changed, 4606 insertions(+), 4141 deletions(-) create mode 100644 core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java delete mode 100644 core/src/test/resources/def.json rename mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/{MySQLBaseDAOTest.java => MySQLDAOTestUtil.java} (78%) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 01f72be73c..51d4e2bcf0 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -23,6 +23,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import com.github.vmg.protogen.annotations.*; @@ -35,7 +36,7 @@ @ProtoMessage public class WorkflowDef extends Auditable { - @ProtoField(id = 1) + @ProtoField(id = 1) private String name; @ProtoField(id = 2) @@ -242,4 +243,33 @@ public List all(){ } return all; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkflowDef that = (WorkflowDef) o; + return getVersion() == that.getVersion() && + getSchemaVersion() == that.getSchemaVersion() && + Objects.equals(getName(), that.getName()) && + Objects.equals(getDescription(), that.getDescription()) && + Objects.equals(getTasks(), that.getTasks()) && + Objects.equals(getInputParameters(), that.getInputParameters()) && + Objects.equals(getOutputParameters(), that.getOutputParameters()) && + Objects.equals(getFailureWorkflow(), that.getFailureWorkflow()); + } + + @Override + public int hashCode() { + return Objects.hash( + getName(), + getDescription(), + getVersion(), + getTasks(), + getInputParameters(), + getOutputParameters(), + getFailureWorkflow(), + getSchemaVersion() + ); + } } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index e0d524d2ab..b47b5a42d2 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -28,6 +28,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; /** @@ -573,4 +574,56 @@ public WorkflowTask get(String taskReferenceName){ public String toString() { return name + "/" + taskReferenceName; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + WorkflowTask that = (WorkflowTask) o; + return getStartDelay() == that.getStartDelay() && + isOptional() == that.isOptional() && + Objects.equals(getName(), that.getName()) && + Objects.equals(getTaskReferenceName(), that.getTaskReferenceName()) && + Objects.equals(getDescription(), that.getDescription()) && + Objects.equals(getInputParameters(), that.getInputParameters()) && + Objects.equals(getType(), that.getType()) && + Objects.equals(getDynamicTaskNameParam(), that.getDynamicTaskNameParam()) && + Objects.equals(getCaseValueParam(), that.getCaseValueParam()) && + Objects.equals(getCaseExpression(), that.getCaseExpression()) && + Objects.equals(getDecisionCases(), that.getDecisionCases()) && + Objects.equals(getDynamicForkJoinTasksParam(), that.getDynamicForkJoinTasksParam()) && + Objects.equals(getDynamicForkTasksParam(), that.getDynamicForkTasksParam()) && + Objects.equals(getDynamicForkTasksInputParamName(), that.getDynamicForkTasksInputParamName()) && + Objects.equals(getDefaultCase(), that.getDefaultCase()) && + Objects.equals(getForkTasks(), that.getForkTasks()) && + Objects.equals(getSubWorkflowParam(), that.getSubWorkflowParam()) && + Objects.equals(getJoinOn(), that.getJoinOn()) && + Objects.equals(getSink(), that.getSink()); + } + + @Override + public int hashCode() { + + return Objects.hash( + getName(), + getTaskReferenceName(), + getDescription(), + getInputParameters(), + getType(), + getDynamicTaskNameParam(), + getCaseValueParam(), + getCaseExpression(), + getDecisionCases(), + getDynamicForkJoinTasksParam(), + getDynamicForkTasksParam(), + getDynamicForkTasksInputParamName(), + getDefaultCase(), + getForkTasks(), + getStartDelay(), + getSubWorkflowParam(), + getJoinOn(), + getSink(), + isOptional() + ); + } } diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index 21459d01b8..632f757850 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.common.run; @@ -20,342 +17,433 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import com.github.vmg.protogen.annotations.*; import com.netflix.conductor.common.metadata.Auditable; import com.netflix.conductor.common.metadata.tasks.Task; - +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; @ProtoMessage -public class Workflow extends Auditable{ - - @ProtoEnum - public enum WorkflowStatus { - RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), PAUSED(false, true); - - private boolean terminal; - - private boolean successful; - - WorkflowStatus(boolean terminal, boolean successful){ - this.terminal = terminal; - this.successful = successful; - } - - public boolean isTerminal(){ - return terminal; - } - - public boolean isSuccessful(){ - return successful; - } - } - - @ProtoField(id = 1) - private WorkflowStatus status = WorkflowStatus.RUNNING; - - @ProtoField(id = 2) - private long endTime; - - @ProtoField(id = 3) - private String workflowId; - - @ProtoField(id = 4) - private String parentWorkflowId; - - @ProtoField(id = 5) - private String parentWorkflowTaskId; - - @ProtoField(id = 6) - private List tasks = new LinkedList<>(); - - @ProtoField(id = 8) - private Map input = new HashMap<>(); - - @ProtoField(id = 9) - private Map output = new HashMap<>();; - - @ProtoField(id = 10) - private String workflowType; - - @ProtoField(id = 11) - private int version; - - @ProtoField(id = 12) - private String correlationId; - - @ProtoField(id = 13) - private String reRunFromWorkflowId; - - @ProtoField(id = 14) - private String reasonForIncompletion; - - @ProtoField(id = 15) - private int schemaVersion; - - @ProtoField(id = 16) - private String event; - - @ProtoField(id = 17) - private Map taskToDomain = new HashMap<>(); - - @ProtoField(id = 18) - private Set failedReferenceTaskNames = new HashSet<>(); - - public Workflow(){ - - } - /** - * @return the status - */ - public WorkflowStatus getStatus() { - return status; - } - - /** - * @param status the status to set - */ - public void setStatus(WorkflowStatus status) { - this.status = status; - } - - /** - * @return the startTime - */ - public long getStartTime() { - return getCreateTime(); - } - - /** - * @param startTime the startTime to set - */ - public void setStartTime(long startTime) { - this.setCreateTime(startTime); - } - - /** - * @return the endTime - */ - public long getEndTime() { - return endTime; - } - - /** - * @param endTime the endTime to set - */ - public void setEndTime(long endTime) { - this.endTime = endTime; - } - - /** - * @return the workflowId - */ - public String getWorkflowId() { - return workflowId; - } - /** - * @param workflowId the workflowId to set - */ - public void setWorkflowId(String workflowId) { - this.workflowId = workflowId; - } - /** - * @return the tasks which are scheduled, in progress or completed. - */ - public List getTasks() { - return tasks; - } - /** - * @param tasks the tasks to set - */ - public void setTasks(List tasks) { - this.tasks = tasks; - } - - /** - * @return the input - */ - public Map getInput() { - return input; - } - /** - * @param input the input to set - */ - public void setInput(Map input) { - this.input = input; - } - /** - * @return the task to domain map - */ - public Map getTaskToDomain() { - return taskToDomain; - } - /** - * @param taskToDomain the task to domain map - */ - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - /** - * @return the output - */ - public Map getOutput() { - return output; - } - /** - * @param output the output to set - */ - public void setOutput(Map output) { - this.output = output; - } - - /** - * - * @return The correlation id used when starting the workflow - */ - public String getCorrelationId() { - return correlationId; - } - - /** - * - * @param correlationId the correlation id - */ - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - - /** - * - * @return Workflow Type / Definition - */ - public String getWorkflowType() { - return workflowType; - } - - /** - * - * @param workflowType Workflow type - */ - public void setWorkflowType(String workflowType) { - this.workflowType = workflowType; - } - - - /** - * @return the version - */ - public int getVersion() { - return version; - } - /** - * @param version the version to set - */ - public void setVersion(int version) { - this.version = version; - } - - public String getReRunFromWorkflowId() { - return reRunFromWorkflowId; - } - - public void setReRunFromWorkflowId(String reRunFromWorkflowId) { - this.reRunFromWorkflowId = reRunFromWorkflowId; - } - - public String getReasonForIncompletion() { - return reasonForIncompletion; - } - - public void setReasonForIncompletion(String reasonForIncompletion) { - this.reasonForIncompletion = reasonForIncompletion; - } - - /** - * @return the parentWorkflowId - */ - public String getParentWorkflowId() { - return parentWorkflowId; - } - /** - * @param parentWorkflowId the parentWorkflowId to set - */ - public void setParentWorkflowId(String parentWorkflowId) { - this.parentWorkflowId = parentWorkflowId; - } - - /** - * @return the parentWorkflowTaskId - */ - public String getParentWorkflowTaskId() { - return parentWorkflowTaskId; - } - /** - * @param parentWorkflowTaskId the parentWorkflowTaskId to set - */ - public void setParentWorkflowTaskId(String parentWorkflowTaskId) { - this.parentWorkflowTaskId = parentWorkflowTaskId; - } - /** - * @return the schemaVersion Version of the schema for the workflow definition - */ - public int getSchemaVersion() { - return schemaVersion; - } - /** - * @param schemaVersion the schemaVersion to set - */ - public void setSchemaVersion(int schemaVersion) { - this.schemaVersion = schemaVersion; - } - - /** - * - * @return Name of the event that started the workflow - */ - public String getEvent() { - return event; - } - - /** - * - * @param event Name of the event that started the workflow - */ - public void setEvent(String event) { - this.event = event; - } - - public Set getFailedReferenceTaskNames() { - return failedReferenceTaskNames; - } - - public void setFailedReferenceTaskNames(Set failedReferenceTaskNames) { - this.failedReferenceTaskNames = failedReferenceTaskNames; - } - - @Override - public String toString() { - return workflowType + "." + version + "/" + workflowId + "." + status; - } - - public Task getTaskByRefName(String refName) { - if (refName == null) { - throw new RuntimeException("refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); - } - LinkedList found = new LinkedList(); - for (Task t : tasks) { - if (t.getReferenceTaskName() == null) { - throw new RuntimeException("Task " + t.getTaskDefName() + ", seq=" + t.getSeq() + " does not have reference name specified."); - } - if (t.getReferenceTaskName().equals(refName)) { - found.add(t); - } - } - if (found.isEmpty()) { - return null; - } - return found.getLast(); - } - -} \ No newline at end of file +public class Workflow extends Auditable { + @ProtoEnum + public enum WorkflowStatus { + RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), PAUSED(false, true); + + private boolean terminal; + + private boolean successful; + + WorkflowStatus(boolean terminal, boolean successful) { + this.terminal = terminal; + this.successful = successful; + } + + public boolean isTerminal() { + return terminal; + } + + public boolean isSuccessful() { + return successful; + } + } + + @ProtoField(id = 1) + private WorkflowStatus status = WorkflowStatus.RUNNING; + + @ProtoField(id = 2) + private long endTime; + + @ProtoField(id = 3) + private String workflowId; + + @ProtoField(id = 4) + private String parentWorkflowId; + + @ProtoField(id = 5) + private String parentWorkflowTaskId; + + @ProtoField(id = 6) + private List tasks = new LinkedList<>(); + + @ProtoField(id = 8) + private Map input = new HashMap<>(); + + @ProtoField(id = 9) + private Map output = new HashMap<>(); + + @ProtoField(id = 10) + @Deprecated + private String workflowType; + + @ProtoField(id = 11) + @Deprecated + private int version; + + @ProtoField(id = 12) + private String correlationId; + + @ProtoField(id = 13) + private String reRunFromWorkflowId; + + @ProtoField(id = 14) + private String reasonForIncompletion; + + @ProtoField(id = 15) + @Deprecated + private int schemaVersion; + + @ProtoField(id = 16) + private String event; + + @ProtoField(id = 17) + private Map taskToDomain = new HashMap<>(); + + @ProtoField(id = 18) + private Set failedReferenceTaskNames = new HashSet<>(); + + @ProtoField(id = 19) + private WorkflowDef workflowDefinition; + + public Workflow() { + + } + + /** + * @return the status + */ + public WorkflowStatus getStatus() { + return status; + } + + /** + * @param status the status to set + */ + public void setStatus(WorkflowStatus status) { + this.status = status; + } + + /** + * @return the startTime + */ + public long getStartTime() { + return getCreateTime(); + } + + /** + * @param startTime the startTime to set + */ + public void setStartTime(long startTime) { + this.setCreateTime(startTime); + } + + /** + * @return the endTime + */ + public long getEndTime() { + return endTime; + } + + /** + * @param endTime the endTime to set + */ + public void setEndTime(long endTime) { + this.endTime = endTime; + } + + /** + * @return the workflowId + */ + public String getWorkflowId() { + return workflowId; + } + + /** + * @param workflowId the workflowId to set + */ + public void setWorkflowId(String workflowId) { + this.workflowId = workflowId; + } + + /** + * @return the tasks which are scheduled, in progress or completed. + */ + public List getTasks() { + return tasks; + } + + /** + * @param tasks the tasks to set + */ + public void setTasks(List tasks) { + this.tasks = tasks; + } + + /** + * @return the input + */ + public Map getInput() { + return input; + } + + /** + * @param input the input to set + */ + public void setInput(Map input) { + this.input = input; + } + + /** + * @return the task to domain map + */ + public Map getTaskToDomain() { + return taskToDomain; + } + + /** + * @param taskToDomain the task to domain map + */ + public void setTaskToDomain(Map taskToDomain) { + this.taskToDomain = taskToDomain; + } + + /** + * @return the output + */ + public Map getOutput() { + return output; + } + + /** + * @param output the output to set + */ + public void setOutput(Map output) { + this.output = output; + } + + /** + * @return The correlation id used when starting the workflow + */ + public String getCorrelationId() { + return correlationId; + } + + /** + * @param correlationId the correlation id + */ + public void setCorrelationId(String correlationId) { + this.correlationId = correlationId; + } + + /** + * @return Workflow Type / Definition + */ + @Deprecated + public String getWorkflowType() { + return getWorkflowName(); + } + + /** + * @param workflowType Workflow type + */ + @Deprecated + public void setWorkflowType(String workflowType) { + this.workflowType = workflowType; + } + + + /** + * @return the version + */ + @Deprecated + public int getVersion() { + return getWorkflowVersion(); + } + + /** + * @param version the version to set + */ + @Deprecated + public void setVersion(int version) { + this.version = version; + } + + public String getReRunFromWorkflowId() { + return reRunFromWorkflowId; + } + + public void setReRunFromWorkflowId(String reRunFromWorkflowId) { + this.reRunFromWorkflowId = reRunFromWorkflowId; + } + + public String getReasonForIncompletion() { + return reasonForIncompletion; + } + + public void setReasonForIncompletion(String reasonForIncompletion) { + this.reasonForIncompletion = reasonForIncompletion; + } + + /** + * @return the parentWorkflowId + */ + public String getParentWorkflowId() { + return parentWorkflowId; + } + + /** + * @param parentWorkflowId the parentWorkflowId to set + */ + public void setParentWorkflowId(String parentWorkflowId) { + this.parentWorkflowId = parentWorkflowId; + } + + /** + * @return the parentWorkflowTaskId + */ + public String getParentWorkflowTaskId() { + return parentWorkflowTaskId; + } + + /** + * @param parentWorkflowTaskId the parentWorkflowTaskId to set + */ + public void setParentWorkflowTaskId(String parentWorkflowTaskId) { + this.parentWorkflowTaskId = parentWorkflowTaskId; + } + + /** + * @return the schemaVersion Version of the schema for the workflow definition + */ + public int getSchemaVersion() { + return getWorkflowDefinition().getSchemaVersion(); + } + + /** + * @param schemaVersion the schemaVersion to set + */ + @Deprecated + public void setSchemaVersion(int schemaVersion) { + this.schemaVersion = schemaVersion; + } + + /** + * @return Name of the event that started the workflow + */ + public String getEvent() { + return event; + } + + /** + * @param event Name of the event that started the workflow + */ + public void setEvent(String event) { + this.event = event; + } + + public Set getFailedReferenceTaskNames() { + return failedReferenceTaskNames; + } + + public void setFailedReferenceTaskNames(Set failedReferenceTaskNames) { + this.failedReferenceTaskNames = failedReferenceTaskNames; + } + + public WorkflowDef getWorkflowDefinition() { + return workflowDefinition; + } + + public void setWorkflowDefinition(WorkflowDef workflowDefinition) { + this.workflowDefinition = workflowDefinition; + } + + /** + * Conveience method for accessing the workflow definition name. + * @return the workflow definition name. + */ + public String getWorkflowName() { + return getWorkflowDefinition().getName(); + } + + /** + * Conveience method for accessing the workflow definition version. + * @return the workflow definition version. + */ + public int getWorkflowVersion() { + return getWorkflowDefinition().getVersion(); + } + + public Task getTaskByRefName(String refName) { + if (refName == null) { + throw new RuntimeException("refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); + } + LinkedList found = new LinkedList(); + for (Task t : tasks) { + if (t.getReferenceTaskName() == null) { + throw new RuntimeException("Task " + t.getTaskDefName() + ", seq=" + t.getSeq() + " does not have reference name specified."); + } + if (t.getReferenceTaskName().equals(refName)) { + found.add(t); + } + } + if (found.isEmpty()) { + return null; + } + return found.getLast(); + } + + @Override + public String toString() { + return workflowDefinition.getName() + "." + workflowDefinition.getVersion() + "/" + workflowId + "." + status; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Workflow workflow = (Workflow) o; + return getEndTime() == workflow.getEndTime() && + getVersion() == workflow.getVersion() && + getSchemaVersion() == workflow.getSchemaVersion() && + getStatus() == workflow.getStatus() && + Objects.equals(getWorkflowId(), workflow.getWorkflowId()) && + Objects.equals(getParentWorkflowId(), workflow.getParentWorkflowId()) && + Objects.equals(getParentWorkflowTaskId(), workflow.getParentWorkflowTaskId()) && + Objects.equals(getTasks(), workflow.getTasks()) && + Objects.equals(getInput(), workflow.getInput()) && + Objects.equals(getOutput(), workflow.getOutput()) && + Objects.equals(getWorkflowType(), workflow.getWorkflowType()) && + Objects.equals(getCorrelationId(), workflow.getCorrelationId()) && + Objects.equals(getReRunFromWorkflowId(), workflow.getReRunFromWorkflowId()) && + Objects.equals(getReasonForIncompletion(), workflow.getReasonForIncompletion()) && + Objects.equals(getEvent(), workflow.getEvent()) && + Objects.equals(getTaskToDomain(), workflow.getTaskToDomain()) && + Objects.equals(getFailedReferenceTaskNames(), workflow.getFailedReferenceTaskNames()) && + Objects.equals(getWorkflowDefinition(), workflow.getWorkflowDefinition()); + } + + @Override + public int hashCode() { + return Objects.hash( + getStatus(), + getEndTime(), + getWorkflowId(), + getParentWorkflowId(), + getParentWorkflowTaskId(), + getTasks(), + getInput(), + getOutput(), + getWorkflowType(), + getVersion(), + getCorrelationId(), + getReRunFromWorkflowId(), + getReasonForIncompletion(), + getSchemaVersion(), + getEvent(), + getTaskToDomain(), + getFailedReferenceTaskNames(), + getWorkflowDefinition() + ); + } +} diff --git a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java index 53d95cf4d9..d174a7d1f6 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java +++ b/common/src/main/java/com/netflix/conductor/common/run/WorkflowSummary.java @@ -89,8 +89,8 @@ public WorkflowSummary(Workflow workflow) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'"); sdf.setTimeZone(gmt); - this.workflowType = workflow.getWorkflowType(); - this.version = workflow.getVersion(); + this.workflowType = workflow.getWorkflowName(); + this.version = workflow.getWorkflowVersion(); this.workflowId = workflow.getWorkflowId(); this.correlationId = workflow.getCorrelationId(); if(workflow.getCreateTime() != null){ diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index c31e8bbb78..236e402ac1 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -1,20 +1,17 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.contribs.http; @@ -43,6 +40,7 @@ import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; import com.netflix.conductor.dao.MetadataDAO; + import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; @@ -52,9 +50,6 @@ import org.junit.BeforeClass; import org.junit.Test; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.PrintWriter; @@ -64,6 +59,10 @@ import java.util.Set; import java.util.stream.Collectors; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; + import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -78,333 +77,337 @@ @SuppressWarnings("unchecked") public class TestHttpTask { - private static final String ERROR_RESPONSE = "Something went wrong!"; - - private static final String TEXT_RESPONSE = "Text Response"; - - private static final double NUM_RESPONSE = 42.42d; - - private static String JSON_RESPONSE; - - private HttpTask httpTask; - - private WorkflowExecutor executor = mock(WorkflowExecutor.class); - - private Workflow workflow = new Workflow(); - - private static Server server; - - private static ObjectMapper objectMapper = new ObjectMapper(); - - @BeforeClass - public static void init() throws Exception { - - Map map = new HashMap<>(); - map.put("key", "value1"); - map.put("num", 42); - JSON_RESPONSE = objectMapper.writeValueAsString(map); - - server = new Server(7009); - ServletContextHandler servletContextHandler = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS); - servletContextHandler.setHandler(new EchoHandler()); - server.start(); - } - - @AfterClass - public static void cleanup() { - if(server != null) { - try { - server.stop(); - } catch (Exception e) { - e.printStackTrace(); - } - } - } - - @Before - public void setup() { - RestClientManager rcm = new RestClientManager(); - Configuration config = mock(Configuration.class); - when(config.getServerId()).thenReturn("test_server_id"); - httpTask = new HttpTask(rcm, config); - } - - @Test - public void testPost() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/post"); - Map body = new HashMap<>(); - body.put("input_key1", "value1"); - body.put("input_key2", 45.3d); - input.setBody(body); - input.setMethod("POST"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - assertEquals(task.getReasonForIncompletion(), Task.Status.COMPLETED, task.getStatus()); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(Task.Status.COMPLETED, task.getStatus()); - assertTrue("response is: " + response, response instanceof Map); - Map map = (Map) response; - Set inputKeys = body.keySet(); - Set responseKeys = map.keySet(); - inputKeys.containsAll(responseKeys); - responseKeys.containsAll(inputKeys); - } - - - @Test - public void testPostNoContent() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/post2"); - Map body = new HashMap<>(); - body.put("input_key1", "value1"); - body.put("input_key2", 45.3d); - input.setBody(body); - input.setMethod("POST"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - assertEquals(task.getReasonForIncompletion(), Task.Status.COMPLETED, task.getStatus()); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(Task.Status.COMPLETED, task.getStatus()); - assertNull("response is: " + response, response); - } - - @Test - public void testFailure() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/failure"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - assertEquals("Task output: " + task.getOutputData(), Task.Status.FAILED, task.getStatus()); - assertEquals(ERROR_RESPONSE, task.getReasonForIncompletion()); - - task.setStatus(Status.SCHEDULED); - task.getInputData().remove(HttpTask.REQUEST_PARAMETER_NAME); - httpTask.start(workflow, task, executor); - assertEquals(Task.Status.FAILED, task.getStatus()); - assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); - } - - @Test - public void testTextGET() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/text"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(Task.Status.COMPLETED, task.getStatus()); - assertEquals(TEXT_RESPONSE, response); - } - - @Test - public void testNumberGET() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/numeric"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(Task.Status.COMPLETED, task.getStatus()); - assertEquals(NUM_RESPONSE, response); - assertTrue(response instanceof Number); - } - - @Test - public void testJsonGET() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/json"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - Map hr = (Map) task.getOutputData().get("response"); - Object response = hr.get("body"); - assertEquals(Task.Status.COMPLETED, task.getStatus()); - assertTrue(response instanceof Map); - Map map = (Map) response; - assertEquals(JSON_RESPONSE, objectMapper.writeValueAsString(map)); - } - - @Test - public void testExecute() throws Exception { - - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/json"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - task.setStatus(Status.SCHEDULED); - task.setScheduledTime(0); - boolean executed = httpTask.execute(workflow, task, executor); - assertFalse(executed); - - } - - @Test - public void testOptional() throws Exception { - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/failure"); - input.setMethod("GET"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - assertEquals("Task output: " + task.getOutputData(), Task.Status.FAILED, task.getStatus()); - assertEquals(ERROR_RESPONSE, task.getReasonForIncompletion()); - assertTrue(!task.getStatus().isSuccessful()); - - task.setStatus(Status.SCHEDULED); - task.getInputData().remove(HttpTask.REQUEST_PARAMETER_NAME); - task.setReferenceTaskName("t1"); - httpTask.start(workflow, task, executor); - assertEquals(Task.Status.FAILED, task.getStatus()); - assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); - assertTrue(!task.getStatus().isSuccessful()); - - Workflow workflow = new Workflow(); - workflow.getTasks().add(task); - - WorkflowDef def = new WorkflowDef(); - WorkflowTask wft = new WorkflowTask(); - wft.setOptional(true); - wft.setName("HTTP"); - wft.setWorkflowTaskType(Type.USER_DEFINED); - wft.setTaskReferenceName("t1"); - def.getTasks().add(wft); - MetadataDAO metadataDAO = mock(MetadataDAO.class); - ParametersUtils parametersUtils = new ParametersUtils(); - Map taskMappers = new HashMap<>(); - taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); - taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); - taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); - new DeciderService(metadataDAO, taskMappers).decide(workflow, def); - - System.out.println(workflow.getTasks()); - System.out.println(workflow.getStatus()); - } - - @Test - public void testOAuth() throws Exception { - Task task = new Task(); - Input input = new Input(); - input.setUri("http://localhost:7009/oauth"); - input.setMethod("POST"); - input.setOauthConsumerKey("someKey"); - input.setOauthConsumerSecret("someSecret"); - task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); - - httpTask.start(workflow, task, executor); - - Map response = (Map) task.getOutputData().get("response"); - Map body = (Map) response.get("body"); - - assertEquals("someKey", body.get("oauth_consumer_key")); - assertTrue("Should have OAuth nonce", body.containsKey("oauth_nonce")); - assertTrue("Should have OAuth signature", body.containsKey("oauth_signature")); - assertTrue("Should have OAuth signature method", body.containsKey("oauth_signature_method")); - assertTrue("Should have OAuth oauth_timestamp", body.containsKey("oauth_timestamp")); - assertTrue("Should have OAuth oauth_version", body.containsKey("oauth_version")); - - assertEquals("Task output: " + task.getOutputData(), Status.COMPLETED, task.getStatus()); - } - - private static class EchoHandler extends AbstractHandler { - - private TypeReference> mapOfObj = new TypeReference>() {}; - - @Override - public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) - throws IOException, ServletException { - if(request.getMethod().equals("GET") && request.getRequestURI().equals("/text")) { - PrintWriter writer = response.getWriter(); - writer.print(TEXT_RESPONSE); - writer.flush(); - writer.close(); - } else if(request.getMethod().equals("GET") && request.getRequestURI().equals("/json")) { - response.addHeader("Content-Type", "application/json"); - PrintWriter writer = response.getWriter(); - writer.print(JSON_RESPONSE); - writer.flush(); - writer.close(); - } else if(request.getMethod().equals("GET") && request.getRequestURI().equals("/failure")) { - response.addHeader("Content-Type", "text/plain"); - response.setStatus(500); - PrintWriter writer = response.getWriter(); - writer.print(ERROR_RESPONSE); - writer.flush(); - writer.close(); - } else if(request.getMethod().equals("POST") && request.getRequestURI().equals("/post")) { - response.addHeader("Content-Type", "application/json"); - BufferedReader reader = request.getReader(); - Map input = objectMapper.readValue(reader, mapOfObj); - Set keys = input.keySet(); - for(String key : keys) { - input.put(key, key); - } - PrintWriter writer = response.getWriter(); - writer.print(objectMapper.writeValueAsString(input)); - writer.flush(); - writer.close(); - } else if(request.getMethod().equals("POST") && request.getRequestURI().equals("/post2")) { - response.addHeader("Content-Type", "application/json"); - response.setStatus(204); - BufferedReader reader = request.getReader(); - Map input = objectMapper.readValue(reader, mapOfObj); - Set keys = input.keySet(); - System.out.println(keys); - response.getWriter().close(); - - } else if(request.getMethod().equals("GET") && request.getRequestURI().equals("/numeric")) { - PrintWriter writer = response.getWriter(); - writer.print(NUM_RESPONSE); - writer.flush(); - writer.close(); - } else if(request.getMethod().equals("POST") && request.getRequestURI().equals("/oauth")) { - //echo back oauth parameters generated in the Authorization header in the response - Map params = parseOauthParameters(request); - response.addHeader("Content-Type", "application/json"); - PrintWriter writer = response.getWriter(); - writer.print(objectMapper.writeValueAsString(params)); - writer.flush(); - writer.close(); - } - } - - private Map parseOauthParameters(HttpServletRequest request) { - String paramString = request.getHeader("Authorization").replaceAll("^OAuth (.*)", "$1"); - return Arrays.stream(paramString.split("\\s*,\\s*")) - .map(pair -> pair.split("=")) - .collect(Collectors.toMap(o -> o[0], o -> o[1].replaceAll("\"",""))); - } - } + private static final String ERROR_RESPONSE = "Something went wrong!"; + + private static final String TEXT_RESPONSE = "Text Response"; + + private static final double NUM_RESPONSE = 42.42d; + + private static String JSON_RESPONSE; + + private HttpTask httpTask; + + private WorkflowExecutor executor = mock(WorkflowExecutor.class); + + private Workflow workflow = new Workflow(); + + private static Server server; + + private static ObjectMapper objectMapper = new ObjectMapper(); + + @BeforeClass + public static void init() throws Exception { + + Map map = new HashMap<>(); + map.put("key", "value1"); + map.put("num", 42); + JSON_RESPONSE = objectMapper.writeValueAsString(map); + + server = new Server(7009); + ServletContextHandler servletContextHandler = new ServletContextHandler(server, "/", ServletContextHandler.SESSIONS); + servletContextHandler.setHandler(new EchoHandler()); + server.start(); + } + + @AfterClass + public static void cleanup() { + if (server != null) { + try { + server.stop(); + } catch (Exception e) { + e.printStackTrace(); + } + } + } + + @Before + public void setup() { + RestClientManager rcm = new RestClientManager(); + Configuration config = mock(Configuration.class); + when(config.getServerId()).thenReturn("test_server_id"); + httpTask = new HttpTask(rcm, config); + } + + @Test + public void testPost() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/post"); + Map body = new HashMap<>(); + body.put("input_key1", "value1"); + body.put("input_key2", 45.3d); + input.setBody(body); + input.setMethod("POST"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + assertEquals(task.getReasonForIncompletion(), Task.Status.COMPLETED, task.getStatus()); + Map hr = (Map) task.getOutputData().get("response"); + Object response = hr.get("body"); + assertEquals(Task.Status.COMPLETED, task.getStatus()); + assertTrue("response is: " + response, response instanceof Map); + Map map = (Map) response; + Set inputKeys = body.keySet(); + Set responseKeys = map.keySet(); + inputKeys.containsAll(responseKeys); + responseKeys.containsAll(inputKeys); + } + + + @Test + public void testPostNoContent() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/post2"); + Map body = new HashMap<>(); + body.put("input_key1", "value1"); + body.put("input_key2", 45.3d); + input.setBody(body); + input.setMethod("POST"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + assertEquals(task.getReasonForIncompletion(), Task.Status.COMPLETED, task.getStatus()); + Map hr = (Map) task.getOutputData().get("response"); + Object response = hr.get("body"); + assertEquals(Task.Status.COMPLETED, task.getStatus()); + assertNull("response is: " + response, response); + } + + @Test + public void testFailure() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/failure"); + input.setMethod("GET"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + assertEquals("Task output: " + task.getOutputData(), Task.Status.FAILED, task.getStatus()); + assertEquals(ERROR_RESPONSE, task.getReasonForIncompletion()); + + task.setStatus(Status.SCHEDULED); + task.getInputData().remove(HttpTask.REQUEST_PARAMETER_NAME); + httpTask.start(workflow, task, executor); + assertEquals(Task.Status.FAILED, task.getStatus()); + assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); + } + + @Test + public void testTextGET() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/text"); + input.setMethod("GET"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + Map hr = (Map) task.getOutputData().get("response"); + Object response = hr.get("body"); + assertEquals(Task.Status.COMPLETED, task.getStatus()); + assertEquals(TEXT_RESPONSE, response); + } + + @Test + public void testNumberGET() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/numeric"); + input.setMethod("GET"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + Map hr = (Map) task.getOutputData().get("response"); + Object response = hr.get("body"); + assertEquals(Task.Status.COMPLETED, task.getStatus()); + assertEquals(NUM_RESPONSE, response); + assertTrue(response instanceof Number); + } + + @Test + public void testJsonGET() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/json"); + input.setMethod("GET"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + Map hr = (Map) task.getOutputData().get("response"); + Object response = hr.get("body"); + assertEquals(Task.Status.COMPLETED, task.getStatus()); + assertTrue(response instanceof Map); + Map map = (Map) response; + assertEquals(JSON_RESPONSE, objectMapper.writeValueAsString(map)); + } + + @Test + public void testExecute() throws Exception { + + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/json"); + input.setMethod("GET"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + task.setStatus(Status.SCHEDULED); + task.setScheduledTime(0); + boolean executed = httpTask.execute(workflow, task, executor); + assertFalse(executed); + + } + + @Test + public void testOptional() throws Exception { + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/failure"); + input.setMethod("GET"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + assertEquals("Task output: " + task.getOutputData(), Task.Status.FAILED, task.getStatus()); + assertEquals(ERROR_RESPONSE, task.getReasonForIncompletion()); + assertTrue(!task.getStatus().isSuccessful()); + + task.setStatus(Status.SCHEDULED); + task.getInputData().remove(HttpTask.REQUEST_PARAMETER_NAME); + task.setReferenceTaskName("t1"); + httpTask.start(workflow, task, executor); + assertEquals(Task.Status.FAILED, task.getStatus()); + assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); + assertTrue(!task.getStatus().isSuccessful()); + + WorkflowTask wft = new WorkflowTask(); + wft.setOptional(true); + wft.setName("HTTP"); + wft.setWorkflowTaskType(Type.USER_DEFINED); + wft.setTaskReferenceName("t1"); + + WorkflowDef def = new WorkflowDef(); + def.getTasks().add(wft); + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.getTasks().add(task); + + MetadataDAO metadataDAO = mock(MetadataDAO.class); + ParametersUtils parametersUtils = new ParametersUtils(); + Map taskMappers = new HashMap<>(); + taskMappers.put("DECISION", new DecisionTaskMapper()); + taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); + taskMappers.put("JOIN", new JoinTaskMapper()); + taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); + taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); + taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); + new DeciderService(metadataDAO, taskMappers).decide(workflow); + + System.out.println(workflow.getTasks()); + System.out.println(workflow.getStatus()); + } + + @Test + public void testOAuth() throws Exception { + Task task = new Task(); + Input input = new Input(); + input.setUri("http://localhost:7009/oauth"); + input.setMethod("POST"); + input.setOauthConsumerKey("someKey"); + input.setOauthConsumerSecret("someSecret"); + task.getInputData().put(HttpTask.REQUEST_PARAMETER_NAME, input); + + httpTask.start(workflow, task, executor); + + Map response = (Map) task.getOutputData().get("response"); + Map body = (Map) response.get("body"); + + assertEquals("someKey", body.get("oauth_consumer_key")); + assertTrue("Should have OAuth nonce", body.containsKey("oauth_nonce")); + assertTrue("Should have OAuth signature", body.containsKey("oauth_signature")); + assertTrue("Should have OAuth signature method", body.containsKey("oauth_signature_method")); + assertTrue("Should have OAuth oauth_timestamp", body.containsKey("oauth_timestamp")); + assertTrue("Should have OAuth oauth_version", body.containsKey("oauth_version")); + + assertEquals("Task output: " + task.getOutputData(), Status.COMPLETED, task.getStatus()); + } + + private static class EchoHandler extends AbstractHandler { + + private TypeReference> mapOfObj = new TypeReference>() { + }; + + @Override + public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) + throws IOException, ServletException { + if (request.getMethod().equals("GET") && request.getRequestURI().equals("/text")) { + PrintWriter writer = response.getWriter(); + writer.print(TEXT_RESPONSE); + writer.flush(); + writer.close(); + } else if (request.getMethod().equals("GET") && request.getRequestURI().equals("/json")) { + response.addHeader("Content-Type", "application/json"); + PrintWriter writer = response.getWriter(); + writer.print(JSON_RESPONSE); + writer.flush(); + writer.close(); + } else if (request.getMethod().equals("GET") && request.getRequestURI().equals("/failure")) { + response.addHeader("Content-Type", "text/plain"); + response.setStatus(500); + PrintWriter writer = response.getWriter(); + writer.print(ERROR_RESPONSE); + writer.flush(); + writer.close(); + } else if (request.getMethod().equals("POST") && request.getRequestURI().equals("/post")) { + response.addHeader("Content-Type", "application/json"); + BufferedReader reader = request.getReader(); + Map input = objectMapper.readValue(reader, mapOfObj); + Set keys = input.keySet(); + for (String key : keys) { + input.put(key, key); + } + PrintWriter writer = response.getWriter(); + writer.print(objectMapper.writeValueAsString(input)); + writer.flush(); + writer.close(); + } else if (request.getMethod().equals("POST") && request.getRequestURI().equals("/post2")) { + response.addHeader("Content-Type", "application/json"); + response.setStatus(204); + BufferedReader reader = request.getReader(); + Map input = objectMapper.readValue(reader, mapOfObj); + Set keys = input.keySet(); + System.out.println(keys); + response.getWriter().close(); + + } else if (request.getMethod().equals("GET") && request.getRequestURI().equals("/numeric")) { + PrintWriter writer = response.getWriter(); + writer.print(NUM_RESPONSE); + writer.flush(); + writer.close(); + } else if (request.getMethod().equals("POST") && request.getRequestURI().equals("/oauth")) { + //echo back oauth parameters generated in the Authorization header in the response + Map params = parseOauthParameters(request); + response.addHeader("Content-Type", "application/json"); + PrintWriter writer = response.getWriter(); + writer.print(objectMapper.writeValueAsString(params)); + writer.flush(); + writer.close(); + } + } + + private Map parseOauthParameters(HttpServletRequest request) { + String paramString = request.getHeader("Authorization").replaceAll("^OAuth (.*)", "$1"); + return Arrays.stream(paramString.split("\\s*,\\s*")) + .map(pair -> pair.split("=")) + .collect(Collectors.toMap(o -> o[0], o -> o[1].replaceAll("\"", ""))); + } + } } diff --git a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java index 6478c682b8..2dff29ac0c 100644 --- a/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java +++ b/core/src/main/java/com/netflix/conductor/core/events/ActionProcessor.java @@ -18,28 +18,30 @@ */ package com.netflix.conductor.core.events; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; + +import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.events.EventHandler.Action; import com.netflix.conductor.common.metadata.events.EventHandler.StartWorkflow; import com.netflix.conductor.common.metadata.events.EventHandler.TaskDetails; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.service.MetadataService; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; -import javax.inject.Singleton; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.inject.Inject; +import javax.inject.Singleton; + /** * @author Viren * Action Processor subscribes to the Event Actions queue and processes the actions (e.g. start workflow etc) @@ -130,13 +132,12 @@ private Map startWorkflow(Action action, Object payload, String Map op = new HashMap<>(); try { - WorkflowDef def = metadata.getWorkflowDef(params.getName(), params.getVersion()); Map inputParams = params.getInput(); Map workflowInput = pu.replace(inputParams, payload); workflowInput.put("conductor.event.messageId", messageId); workflowInput.put("conductor.event.name", event); - String id = executor.startWorkflow(def.getName(), def.getVersion(), params.getCorrelationId(), workflowInput, event); + String id = executor.startWorkflow(params.getName(), params.getVersion(), params.getCorrelationId(), workflowInput, event); op.put("workflowId", id); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index 307a6e5d0e..3f45fdcc16 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -19,6 +19,7 @@ package com.netflix.conductor.core.execution; import com.google.common.annotations.VisibleForTesting; + import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -32,11 +33,10 @@ import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.metrics.Monitors; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; -import javax.inject.Named; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; @@ -46,6 +46,9 @@ import java.util.Set; import java.util.stream.Collectors; +import javax.inject.Inject; +import javax.inject.Named; + import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED_WITH_ERRORS; import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS; import static com.netflix.conductor.common.metadata.tasks.Task.Status.READY_FOR_RERUN; @@ -76,9 +79,8 @@ public DeciderService(MetadataDAO metadataDAO, @Named("TaskMappers") Map tasks = workflow.getTasks(); //In case of a new workflow the list of executedTasks will also be empty @@ -89,15 +91,15 @@ public DeciderOutcome decide(Workflow workflow, WorkflowDef workflowDef) throws List tasksToBeScheduled = new LinkedList<>(); if (executedTasks.isEmpty()) { //this is the flow that the new workflow will go through - tasksToBeScheduled = startWorkflow(workflow, workflowDef); + tasksToBeScheduled = startWorkflow(workflow); if (tasksToBeScheduled == null) { tasksToBeScheduled = new LinkedList<>(); } } - return decide(workflowDef, workflow, tasksToBeScheduled); + return decide(workflow, tasksToBeScheduled); } - private DeciderOutcome decide(final WorkflowDef workflowDef, final Workflow workflow, List preScheduledTasks) throws TerminateWorkflowException { + private DeciderOutcome decide(final Workflow workflow, List preScheduledTasks) throws TerminateWorkflowException { DeciderOutcome outcome = new DeciderOutcome(); @@ -155,7 +157,7 @@ private DeciderOutcome decide(final WorkflowDef workflowDef, final Workflow work if (!pendingTask.getStatus().isSuccessful()) { WorkflowTask workflowTask = pendingTask.getWorkflowTask(); if (workflowTask == null) { - workflowTask = workflowDef.getTaskByRefName(pendingTask.getReferenceTaskName()); + workflowTask = workflow.getWorkflowDefinition().getTaskByRefName(pendingTask.getReferenceTaskName()); } if (workflowTask != null && workflowTask.isOptional()) { pendingTask.setStatus(COMPLETED_WITH_ERRORS); @@ -169,7 +171,7 @@ private DeciderOutcome decide(final WorkflowDef workflowDef, final Workflow work if (!pendingTask.isExecuted() && !pendingTask.isRetried() && pendingTask.getStatus().isTerminal()) { pendingTask.setExecuted(true); - List nextTasks = getNextTask(workflowDef, workflow, pendingTask); + List nextTasks = getNextTask(workflow, pendingTask); nextTasks.forEach(nextTask -> tasksToBeScheduled.putIfAbsent(nextTask.getReferenceTaskName(), nextTask)); outcome.tasksToBeUpdated.add(pendingTask); logger.debug("Scheduling Tasks from {}, next = {}", pendingTask.getTaskDefName(), @@ -189,8 +191,8 @@ private DeciderOutcome decide(final WorkflowDef workflowDef, final Workflow work .collect(Collectors.toList())); outcome.tasksToBeScheduled.addAll(unScheduledTasks); } - updateOutput(workflowDef, workflow); - if (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflowDef, workflow)) { + updateOutput(workflow); + if (outcome.tasksToBeScheduled.isEmpty() && checkForWorkflowCompletion(workflow)) { logger.debug("Marking workflow as complete. workflow=" + workflow.getWorkflowId() + ", tasks=" + workflow.getTasks()); outcome.isComplete = true; } @@ -198,7 +200,8 @@ private DeciderOutcome decide(final WorkflowDef workflowDef, final Workflow work return outcome; } - private List startWorkflow(Workflow workflow, WorkflowDef def) throws TerminateWorkflowException { + private List startWorkflow(Workflow workflow) throws TerminateWorkflowException { + final WorkflowDef def = workflow.getWorkflowDefinition(); logger.debug("Starting workflow " + def.getName() + "/" + workflow.getWorkflowId()); //The tasks will be empty in case of new workflow @@ -217,7 +220,7 @@ private List startWorkflow(Workflow workflow, WorkflowDef def) throws Term } //In case of a new workflow a the first non-skippable task will be scheduled - return getTasksToBeScheduled(def, workflow, taskToSchedule, 0); + return getTasksToBeScheduled(workflow, taskToSchedule, 0); } // Get the first task to schedule @@ -240,7 +243,8 @@ private List startWorkflow(Workflow workflow, WorkflowDef def) throws Term } - private void updateOutput(final WorkflowDef def, final Workflow workflow) { + private void updateOutput(final Workflow workflow) { + final WorkflowDef def = workflow.getWorkflowDefinition(); List allTasks = workflow.getTasks(); if (allTasks.isEmpty()) { @@ -257,8 +261,7 @@ private void updateOutput(final WorkflowDef def, final Workflow workflow) { workflow.setOutput(output); } - private boolean checkForWorkflowCompletion(final WorkflowDef def, final Workflow workflow) throws TerminateWorkflowException { - + private boolean checkForWorkflowCompletion(final Workflow workflow) throws TerminateWorkflowException { List allTasks = workflow.getTasks(); if (allTasks.isEmpty()) { return false; @@ -267,7 +270,7 @@ private boolean checkForWorkflowCompletion(final WorkflowDef def, final Workflow Map taskStatusMap = new HashMap<>(); workflow.getTasks().forEach(task -> taskStatusMap.put(task.getReferenceTaskName(), task.getStatus())); - LinkedList wftasks = def.getTasks(); + LinkedList wftasks = workflow.getWorkflowDefinition().getTasks(); boolean allCompletedSuccessfully = wftasks.stream().parallel().allMatch(wftask -> { Status status = taskStatusMap.get(wftask.getTaskReferenceName()); return status != null && status.isSuccessful() && status.isTerminal(); @@ -278,7 +281,7 @@ private boolean checkForWorkflowCompletion(final WorkflowDef def, final Workflow .allMatch(Status::isTerminal); boolean noPendingSchedule = workflow.getTasks().stream().parallel().filter(wftask -> { - String next = getNextTasksToBeScheduled(def, workflow, wftask); + String next = getNextTasksToBeScheduled(workflow, wftask); return next != null && !taskStatusMap.containsKey(next); }).collect(Collectors.toList()).isEmpty(); @@ -290,7 +293,8 @@ private boolean checkForWorkflowCompletion(final WorkflowDef def, final Workflow } @VisibleForTesting - List getNextTask(WorkflowDef def, Workflow workflow, Task task) { + List getNextTask(Workflow workflow, Task task) { + final WorkflowDef def = workflow.getWorkflowDefinition(); // Get the following task after the last completed task if (SystemTaskType.is(task.getTaskType()) && SystemTaskType.DECISION.name().equals(task.getTaskType())) { @@ -305,13 +309,14 @@ List getNextTask(WorkflowDef def, Workflow workflow, Task task) { taskToSchedule = def.getNextTask(taskToSchedule.getTaskReferenceName()); } if (taskToSchedule != null) { - return getTasksToBeScheduled(def, workflow, taskToSchedule, 0); + return getTasksToBeScheduled(workflow, taskToSchedule, 0); } return Collections.emptyList(); } - private String getNextTasksToBeScheduled(WorkflowDef def, Workflow workflow, Task task) { + private String getNextTasksToBeScheduled(Workflow workflow, Task task) { + final WorkflowDef def = workflow.getWorkflowDefinition(); String taskReferenceName = task.getReferenceTaskName(); WorkflowTask taskToSchedule = def.getNextTask(taskReferenceName); @@ -432,12 +437,12 @@ private void timeoutTask(Task task) { task.setReasonForIncompletion(reason); } - public List getTasksToBeScheduled(WorkflowDef def, Workflow workflow, + public List getTasksToBeScheduled(Workflow workflow, WorkflowTask taskToSchedule, int retryCount) { - return getTasksToBeScheduled(def, workflow, taskToSchedule, retryCount, null); + return getTasksToBeScheduled(workflow, taskToSchedule, retryCount, null); } - public List getTasksToBeScheduled(WorkflowDef workflowDefinition, Workflow workflowInstance, + public List getTasksToBeScheduled(Workflow workflowInstance, WorkflowTask taskToSchedule, int retryCount, String retriedTaskId) { Map input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), @@ -456,7 +461,7 @@ public List getTasksToBeScheduled(WorkflowDef workflowDefinition, Workflow .collect(Collectors.toList()); String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(workflowDefinition, workflowInstance, taskToSchedule, + TaskMapperContext taskMapperContext = new TaskMapperContext(workflowInstance, taskToSchedule, input, retryCount, retriedTaskId, taskId, this); // for static forks, each branch of the fork creates a join task upon completion diff --git a/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java b/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java index 2530ef4fac..5a724f21c1 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java @@ -15,9 +15,10 @@ */ package com.netflix.conductor.core.execution; +import com.google.common.base.Preconditions; + import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; import com.jayway.jsonpath.Configuration; import com.jayway.jsonpath.DocumentContext; import com.jayway.jsonpath.JsonPath; @@ -87,8 +88,8 @@ public Map getTaskInputV2(Map input, Workflow wo wf.put("workflowId", workflow.getWorkflowId()); wf.put("parentWorkflowId", workflow.getParentWorkflowId()); wf.put("parentWorkflowTaskId", workflow.getParentWorkflowTaskId()); - wf.put("workflowType", workflow.getWorkflowType()); - wf.put("version", workflow.getVersion()); + wf.put("workflowType", workflow.getWorkflowName()); + wf.put("version", workflow.getWorkflowVersion()); wf.put("correlationId", workflow.getCorrelationId()); wf.put("reasonForIncompletion", workflow.getReasonForIncompletion()); wf.put("schemaVersion", workflow.getSchemaVersion()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 592b834a67..e03e0ae225 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. *

- * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at *

* http://www.apache.org/licenses/LICENSE-2.0 *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** * @@ -53,12 +50,15 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; import javax.inject.Inject; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; import static com.netflix.conductor.common.metadata.tasks.Task.Status.CANCELED; import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED; @@ -95,7 +95,13 @@ public class WorkflowExecutor { private int activeWorkerLastPollnSecs; @Inject - public WorkflowExecutor(DeciderService deciderService, MetadataDAO metadataDAO, ExecutionDAO executionDAO, QueueDAO queueDAO, Configuration config) { + public WorkflowExecutor( + DeciderService deciderService, + MetadataDAO metadataDAO, + ExecutionDAO executionDAO, + QueueDAO queueDAO, + Configuration config + ) { this.deciderService = deciderService; this.metadataDAO = metadataDAO; this.executionDAO = executionDAO; @@ -104,20 +110,112 @@ public WorkflowExecutor(DeciderService deciderService, MetadataDAO metadataDAO, activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); } - public String startWorkflow(String name, int version, String correlationId, Map input) throws Exception { + /** + * @throws ApplicationException + */ + public String startWorkflow(String name, Integer version, String correlationId, Map input) { return startWorkflow(name, version, correlationId, input, null); } - public String startWorkflow(String name, int version, String correlationId, Map input, String event) throws Exception { - return startWorkflow(name, version, input, correlationId, null, null, event); + /** + * @throws ApplicationException + */ + public String startWorkflow(String name, Integer version, String correlationId, Map input, String event) { + return startWorkflow( + name, + version, + input, + correlationId, + null, + null, + event + ); + } + + /** + * @throws ApplicationException + */ + public String startWorkflow( + String name, + Integer version, + String correlationId, + Map input, + String event, + Map taskToDomain + ) { + return startWorkflow( + name, + version, + input, + correlationId, + null, + null, + event, + taskToDomain + ); } - public String startWorkflow(String name, int version, String correlationId, Map input, String event, Map taskToDomain) throws Exception { - return startWorkflow(name, version, input, correlationId, null, null, event, taskToDomain); + /** + * @throws ApplicationException + */ + public String startWorkflow( + String name, + Integer version, + Map input, + String correlationId, + String parentWorkflowId, + String parentWorkflowTaskId, + String event + ) { + return startWorkflow( + name, + version, + input, + correlationId, + parentWorkflowId, + parentWorkflowTaskId, + event, + null + ); } - public String startWorkflow(String name, int version, Map input, String correlationId, String parentWorkflowId, String parentWorkflowTaskId, String event) throws Exception { - return startWorkflow(name, version, input, correlationId, parentWorkflowId, parentWorkflowTaskId, event, null); + /** + * @throws ApplicationException + */ + public String startWorkflow( + String name, + Integer version, + Map workflowInput, + String correlationId, + String parentWorkflowId, + String parentWorkflowTaskId, + String event, + Map taskToDomain + ) { + + Optional potentialDef = + version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); + + //Check if the workflow definition is valid + WorkflowDef workflowDefinition = potentialDef + .orElseThrow(() -> { + logger.error("There is no workflow defined with name {} and version {}", name, version); + return new ApplicationException( + Code.NOT_FOUND, + String.format("No such workflow defined. name=%s, version=%s", name, version) + ); + } + ); + + return startWorkflow( + workflowDefinition, + workflowInput, + correlationId, + parentWorkflowId, + parentWorkflowTaskId, + event, + taskToDomain + ); } private final Predicate validateLastPolledTime = pd -> pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollnSecs * 1000); @@ -126,70 +224,71 @@ public String startWorkflow(String name, int version, Map input, private final Predicate isNonTerminalTask = task -> !task.getStatus().isTerminal(); - public String startWorkflow(String workflowName, int workflowVersion, Map workflowInput, - String correlationId, String parentWorkflowId, String parentWorkflowTaskId, - String event, Map taskToDomain) throws Exception { - - - try { - //Check if the input to the workflow is not null - //QQ When is the payload of the input validated - if (workflowInput == null) { - logger.error("The input for the workflow {} cannot be NULL", workflowName); - throw new ApplicationException(INVALID_INPUT, "NULL input passed when starting workflow"); - } - - //Check if the workflow definition is valid - WorkflowDef workflowDefinition = metadataDAO.get(workflowName, workflowVersion); - if (workflowDefinition == null) { - logger.error("There is no workflow defined with name {} and version {}", workflowName, workflowVersion); - throw new ApplicationException(Code.NOT_FOUND, "No such workflow defined. name=" + workflowName + ", version=" + workflowVersion); - } - - //because everything else is a system defined task - Set missingTaskDefs = workflowDefinition.all().stream() - .filter(wft -> wft.getType().equals(WorkflowTask.Type.SIMPLE.name())) - .map(wft2 -> wft2.getName()) - .filter(task -> metadataDAO.getTaskDef(task) == null) - .collect(Collectors.toSet()); - - if (!missingTaskDefs.isEmpty()) { - logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefs); - throw new ApplicationException(INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs); - } - //A random UUID is assigned to the work flow instance - String workflowId = IDGenerator.generate(); - - // Persist the Workflow - Workflow wf = new Workflow(); - wf.setWorkflowId(workflowId); - wf.setCorrelationId(correlationId); - wf.setWorkflowType(workflowName); - wf.setVersion(workflowVersion); - wf.setInput(workflowInput); - wf.setStatus(WorkflowStatus.RUNNING); - wf.setParentWorkflowId(parentWorkflowId); - wf.setParentWorkflowTaskId(parentWorkflowTaskId); - wf.setOwnerApp(WorkflowContext.get().getClientApp()); - wf.setCreateTime(System.currentTimeMillis()); - wf.setUpdatedBy(null); - wf.setUpdateTime(null); - wf.setEvent(event); - wf.setTaskToDomain(taskToDomain); - executionDAO.createWorkflow(wf); - logger.info("A new instance of workflow {} created with workflow id {}", workflowName, workflowId); - //then decide to see if anything needs to be done as part of the workflow - decide(workflowId); - - return workflowId; + /** + * @throws ApplicationException + */ + public String startWorkflow( + WorkflowDef workflowDefinition, + Map workflowInput, + String correlationId, + String parentWorkflowId, + String parentWorkflowTaskId, + String event, + Map taskToDomain + ) { + //Check if the input to the workflow is not null + //QQ When is the payload of the input validated + if (workflowInput == null) { + logger.error("The input for the workflow {} cannot be NULL", workflowDefinition.getName()); + Monitors.recordWorkflowStartError(workflowDefinition.getName(), WorkflowContext.get().getClientApp()); + throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); + } - } catch (Exception e) { - Monitors.recordWorkflowStartError(workflowName, WorkflowContext.get().getClientApp()); - throw e; + //because everything else is a system defined task + Set missingTaskDefs = workflowDefinition.all().stream() + .filter(wft -> wft.getType().equals(WorkflowTask.Type.SIMPLE.name())) + .map(wft2 -> wft2.getName()) + .filter(task -> metadataDAO.getTaskDef(task) == null) + .collect(Collectors.toSet()); + + if (!missingTaskDefs.isEmpty()) { + logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefs); + Monitors.recordWorkflowStartError(workflowDefinition.getName(), WorkflowContext.get().getClientApp()); + throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs); } + //A random UUID is assigned to the work flow instance + String workflowId = IDGenerator.generate(); + + // Persist the Workflow + Workflow wf = new Workflow(); + wf.setWorkflowId(workflowId); + wf.setCorrelationId(correlationId); + wf.setWorkflowDefinition(workflowDefinition); + wf.setInput(workflowInput); + wf.setStatus(WorkflowStatus.RUNNING); + wf.setParentWorkflowId(parentWorkflowId); + wf.setParentWorkflowTaskId(parentWorkflowTaskId); + wf.setOwnerApp(WorkflowContext.get().getClientApp()); + wf.setCreateTime(System.currentTimeMillis()); + wf.setUpdatedBy(null); + wf.setUpdateTime(null); + wf.setEvent(event); + wf.setTaskToDomain(taskToDomain); + executionDAO.createWorkflow(wf); + logger.info("A new instance of workflow {} created with workflow id {}", wf.getWorkflowName(), workflowId); + //then decide to see if anything needs to be done as part of the workflow + decide(workflowId); + + return workflowId; } - public String resetCallbacksForInProgressTasks(String workflowId) throws Exception { + /** + * + * @param workflowId + * @return + * @throws ApplicationException + */ + public String resetCallbacksForInProgressTasks(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, true); if (workflow.getStatus().isTerminal()) { throw new ApplicationException(CONFLICT, "Workflow is completed. status=" + workflow.getStatus()); @@ -210,7 +309,7 @@ public String resetCallbacksForInProgressTasks(String workflowId) throws Excepti return workflowId; } - public String rerun(RerunWorkflowRequest request) throws Exception { + public String rerun(RerunWorkflowRequest request) { Preconditions.checkNotNull(request.getReRunFromWorkflowId(), "reRunFromWorkflowId is missing"); if (!rerunWF(request.getReRunFromWorkflowId(), request.getReRunFromTaskId(), request.getTaskInput(), request.getWorkflowInput(), request.getCorrelationId())) { @@ -219,13 +318,18 @@ public String rerun(RerunWorkflowRequest request) throws Exception { return request.getReRunFromWorkflowId(); } - public void rewind(String workflowId) throws Exception { + /** + * + * @param workflowId + * @throws ApplicationException + */ + public void rewind(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, true); if (!workflow.getStatus().isTerminal()) { throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); } - WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion()); + WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion()).get(); if (!workflowDef.isRestartable() && workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { // Can only restart non completed workflows when the configuration is set to false throw new ApplicationException(CONFLICT, String.format("WorkflowId: %s is an instance of WorkflowDef: %s and version: %d and is non restartable", workflowId, workflowDef.getName(), workflowDef.getVersion())); @@ -243,7 +347,10 @@ public void rewind(String workflowId) throws Exception { decide(workflowId); } - public void retry(String workflowId) throws Exception { + /** + * @throws ApplicationException + */ + public void retry(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, true); if (!workflow.getStatus().isTerminal()) { throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); @@ -320,13 +427,18 @@ public Task getPendingTaskByWorkflow(String taskReferenceName, String workflowId .orElse(null); } + /** + * + * @param wf + * @throws ApplicationException + */ @VisibleForTesting - void completeWorkflow(Workflow wf) throws Exception { + void completeWorkflow(Workflow wf) { logger.debug("Completing workflow execution for {}", wf.getWorkflowId()); Workflow workflow = executionDAO.getWorkflow(wf.getWorkflowId(), false); if (workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { - executionDAO.removeFromPendingWorkflow(workflow.getWorkflowType(), workflow.getWorkflowId()); + executionDAO.removeFromPendingWorkflow(workflow.getWorkflowName(), workflow.getWorkflowId()); logger.info("Workflow has already been completed. Current status={}, workflowId= {}", workflow.getStatus(), wf.getWorkflowId()); return; } @@ -349,18 +461,44 @@ void completeWorkflow(Workflow wf) throws Exception { logger.debug("Completed sub-workflow {}, deciding parent workflow {}", wf.getWorkflowId(), wf.getParentWorkflowId()); decide(parent.getWorkflowId()); } - Monitors.recordWorkflowCompletion(workflow.getWorkflowType(), workflow.getEndTime() - workflow.getStartTime(), wf.getOwnerApp()); + Monitors.recordWorkflowCompletion(workflow.getWorkflowName(), workflow.getEndTime() - workflow.getStartTime(), wf.getOwnerApp()); queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue logger.debug("Removed workflow {} from decider queue", wf.getWorkflowId()); } - public void terminateWorkflow(String workflowId, String reason) throws Exception { + @VisibleForTesting + Optional lookupWorkflowDefinition(String workflowName, int workflowVersion) { + // FIXME: Add messages. + checkNotNull(workflowName); + checkArgument(StringUtils.isNotBlank(workflowName)); + checkArgument(workflowVersion > 0); + + return metadataDAO.get(workflowName, workflowVersion); + } + + @VisibleForTesting + Optional lookupLatestWorkflowDefinition(String workflowName) { + // FIXME: Add messages. + checkNotNull(workflowName); + checkArgument(StringUtils.isNotBlank(workflowName)); + + return metadataDAO.getLatest(workflowName); + } + + public void terminateWorkflow(String workflowId, String reason) { Workflow workflow = executionDAO.getWorkflow(workflowId, true); workflow.setStatus(WorkflowStatus.TERMINATED); terminateWorkflow(workflow, reason, null); } - public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow) throws Exception { + /** + * + * @param workflow + * @param reason + * @param failureWorkflow + * @throws ApplicationException + */ + public void terminateWorkflow(Workflow workflow, String reason, String failureWorkflow) { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(WorkflowStatus.TERMINATED); @@ -377,7 +515,15 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo task.setStatus(CANCELED); if (isSystemTask.test(task)) { WorkflowSystemTask stt = WorkflowSystemTask.get(task.getTaskType()); - stt.cancel(workflow, task, this); + try { + stt.cancel(workflow, task, this); + } catch (Exception e) { + throw new ApplicationException( + Code.INTERNAL_ERROR, + String.format("Error canceling systems task: %s", stt.getName()), + e + ); + } //SystemTaskType.valueOf(task.getTaskType()).cancel(workflow, task, this); } executionDAO.updateTask(task); @@ -402,8 +548,21 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo try { - WorkflowDef latestFailureWorkflow = metadataDAO.getLatest(failureWorkflow); - String failureWFId = startWorkflow(failureWorkflow, latestFailureWorkflow.getVersion(), input, workflowId, null, null, null); + WorkflowDef latestFailureWorkflow = metadataDAO.getLatest(failureWorkflow) + .orElseThrow(() -> + new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) + ); + + String failureWFId = startWorkflow( + latestFailureWorkflow, + input, + workflowId, + null, + null, + null, + null + ); + workflow.getOutput().put("conductor.failure_workflow", failureWFId); } catch (Exception e) { @@ -414,14 +573,18 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo } queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue - executionDAO.removeFromPendingWorkflow(workflow.getWorkflowType(), workflow.getWorkflowId()); + executionDAO.removeFromPendingWorkflow(workflow.getWorkflowName(), workflow.getWorkflowId()); // Send to atlas - Monitors.recordWorkflowTermination(workflow.getWorkflowType(), workflow.getStatus(), workflow.getOwnerApp()); + Monitors.recordWorkflowTermination(workflow.getWorkflowName(), workflow.getStatus(), workflow.getOwnerApp()); } - - public void updateTask(TaskResult taskResult) throws Exception { + /** + * + * @param taskResult + * @throws ApplicationException + */ + public void updateTask(TaskResult taskResult) { if (taskResult == null) { logger.info("null task given for update..." + taskResult); throw new ApplicationException(Code.INVALID_INPUT, "Task object is null"); @@ -449,7 +612,7 @@ public void updateTask(TaskResult taskResult) throws Exception { String msg = String.format("Workflow %s is already completed as %s, task=%s, reason=%s", workflowInstance.getWorkflowId(), workflowInstance.getStatus(), task.getTaskType(), workflowInstance.getReasonForIncompletion()); logger.info(msg); - Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowType(), workflowInstance.getStatus()); + Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowName(), workflowInstance.getStatus()); return; } @@ -460,7 +623,7 @@ public void updateTask(TaskResult taskResult) throws Exception { String msg = String.format("Task is already completed as %s@%d, workflow status=%s, workflowId=%s, taskId=%s", task.getStatus(), task.getEndTime(), workflowInstance.getStatus(), workflowInstance.getWorkflowId(), task.getTaskId()); logger.info(msg); - Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowType(), task.getStatus()); + Monitors.recordUpdateConflict(task.getTaskType(), workflowInstance.getWorkflowName(), task.getStatus()); return; } @@ -485,14 +648,7 @@ public void updateTask(TaskResult taskResult) throws Exception { //In case of a FAILED_WITH_TERMINAL_ERROR the workflow will be terminated and the output of the task is never copied //ensuring the task output is copied to the workflow here if (FAILED_WITH_TERMINAL_ERROR.equals(task.getStatus())) { - //Update the task in the workflow instance - Task taskByRefName = workflowInstance.getTaskByRefName(task.getReferenceTaskName()); - taskByRefName.setStatus(task.getStatus()); - taskByRefName.setOutputData(task.getOutputData()); - taskByRefName.setReasonForIncompletion(task.getReasonForIncompletion()); - taskByRefName.setWorkerId(task.getWorkerId()); - taskByRefName.setCallbackAfterSeconds(task.getCallbackAfterSeconds()); - WorkflowDef workflowDef = metadataDAO.get(workflowInstance.getWorkflowType(), workflowInstance.getVersion()); + WorkflowDef workflowDef = workflowInstance.getWorkflowDefinition(); Map outputData = task.getOutputData(); if (!workflowDef.getOutputParameters().isEmpty()) { outputData = parametersUtils.getTaskInput(workflowDef.getOutputParameters(), workflowInstance, null, null); @@ -538,11 +694,11 @@ public void updateTask(TaskResult taskResult) throws Exception { } - public List getTasks(String taskType, String startKey, int count) throws Exception { + public List getTasks(String taskType, String startKey, int count) { return executionDAO.getTasks(taskType, startKey, count); } - public List getRunningWorkflows(String workflowName) throws Exception { + public List getRunningWorkflows(String workflowName) { return executionDAO.getPendingWorkflowsByType(workflowName); } @@ -550,30 +706,29 @@ public List getRunningWorkflows(String workflowName) throws Exception public List getWorkflows(String name, Integer version, Long startTime, Long endTime) { List workflowsByType = executionDAO.getWorkflowsByType(name, startTime, endTime); return workflowsByType.stream() - .filter(workflow -> workflow.getVersion() == version) + .filter(workflow -> workflow.getWorkflowVersion() == version) .map(Workflow::getWorkflowId) .collect(Collectors.toList()); } - public List getRunningWorkflowIds(String workflowName) throws Exception { + public List getRunningWorkflowIds(String workflowName) { return executionDAO.getRunningWorkflowIds(workflowName); } /** * @param workflowId ID of the workflow to evaluate the state for * @return true if the workflow has completed (success or failed), false otherwise. - * @throws Exception If there was an error - caller should retry in this case. + * @throws ApplicationException If there was an error - caller should retry in this case. */ - public boolean decide(String workflowId) throws Exception { + public boolean decide(String workflowId) { //If it is a new workflow the tasks will be still empty even though include tasks is true Workflow workflow = executionDAO.getWorkflow(workflowId, true); - //QQ the definition can be null here - WorkflowDef def = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion()); + // FIXME: The workflow could be null. try { - DeciderOutcome outcome = deciderService.decide(workflow, def); + DeciderOutcome outcome = deciderService.decide(workflow); if (outcome.isComplete) { completeWorkflow(workflow); return true; @@ -593,9 +748,18 @@ public boolean decide(String workflowId) throws Exception { for (Task task : tasksToBeScheduled) { if (isSystemTask.and(isNonTerminalTask).test(task)) { WorkflowSystemTask workflowSystemTask = WorkflowSystemTask.get(task.getTaskType()); - if (!workflowSystemTask.isAsync() && workflowSystemTask.execute(workflow, task, this)) { - tasksToBeUpdated.add(task); - stateChanged = true; + + try { + if (!workflowSystemTask.isAsync() && workflowSystemTask.execute(workflow, task, this)) { + tasksToBeUpdated.add(task); + stateChanged = true; + } + } catch (Exception e) { + throw new ApplicationException( + Code.INTERNAL_ERROR, + String.format("Unable to start system task: %s", workflowSystemTask.getName()), + e + ); } } } @@ -614,16 +778,16 @@ public boolean decide(String workflowId) throws Exception { } catch (TerminateWorkflowException tw) { logger.debug(tw.getMessage(), tw); - terminate(def, workflow, tw); + terminate(workflow, tw); return true; - } catch (Exception e) { - logger.error("Error deciding workflow: {}", workflowId, e); - throw e; } return false; } - public void pauseWorkflow(String workflowId) throws Exception { + /** + * @throws ApplicationException + */ + public void pauseWorkflow(String workflowId) { WorkflowStatus status = WorkflowStatus.PAUSED; Workflow workflow = executionDAO.getWorkflow(workflowId, false); if (workflow.getStatus().isTerminal()) { @@ -636,7 +800,12 @@ public void pauseWorkflow(String workflowId) throws Exception { executionDAO.updateWorkflow(workflow); } - public void resumeWorkflow(String workflowId) throws Exception { + /** + * + * @param workflowId + * @throws IllegalStateException + */ + public void resumeWorkflow(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, false); if (!workflow.getStatus().equals(WorkflowStatus.PAUSED)) { throw new IllegalStateException("The workflow " + workflowId + " is PAUSED so cannot resume"); @@ -646,7 +815,14 @@ public void resumeWorkflow(String workflowId) throws Exception { decide(workflowId); } - public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) throws Exception { + /** + * + * @param workflowId + * @param taskReferenceName + * @param skipTaskRequest + * @throws IllegalStateException + */ + public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) { Workflow wf = executionDAO.getWorkflow(workflowId, true); @@ -656,10 +832,9 @@ public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, Sk throw new IllegalStateException(errorMsg); } // Check if the reference name is as per the workflowdef - WorkflowDef wfd = metadataDAO.get(wf.getWorkflowType(), wf.getVersion()); - WorkflowTask wft = wfd.getTaskByRefName(taskReferenceName); + WorkflowTask wft = wf.getWorkflowDefinition().getTaskByRefName(taskReferenceName); if (wft == null) { - String errorMsg = String.format("The task referenced by %s does not exist in the WorkflowDefinition %s", taskReferenceName, wf.getWorkflowType()); + String errorMsg = String.format("The task referenced by %s does not exist in the WorkflowDefinition %s", taskReferenceName, wf.getWorkflowName()); throw new IllegalStateException(errorMsg); } // If the task is already started the again it cannot be skipped @@ -692,7 +867,7 @@ public Workflow getWorkflow(String workflowId, boolean includeTasks) { } - public void addTaskToQueue(Task task) throws Exception { + public void addTaskToQueue(Task task) { // put in queue String taskQueueName = QueueUtils.getQueueName(task); queueDAO.remove(taskQueueName, task.getTaskId()); //QQ why do we need to remove the existing task ?? @@ -827,7 +1002,7 @@ private long getTaskDuration(long s, Task task) { } @VisibleForTesting - boolean scheduleTask(Workflow workflow, List tasks) throws Exception { + boolean scheduleTask(Workflow workflow, List tasks) { if (tasks == null || tasks.isEmpty()) { return false; @@ -866,7 +1041,16 @@ boolean scheduleTask(Workflow workflow, List tasks) throws Exception { } task.setStartTime(System.currentTimeMillis()); if (!workflowSystemTask.isAsync()) { - workflowSystemTask.start(workflow, task, this); + try { + workflowSystemTask.start(workflow, task, this); + } catch (Exception e) { + String message = String.format( + "Unable to start task {id: %s, name: %s}", + task.getTaskId(), + task.getTaskDefName() + ); + throw new ApplicationException(Code.INTERNAL_ERROR, message, e); + } startedSystemTasks = true; executionDAO.updateTask(task); } else { @@ -878,19 +1062,19 @@ boolean scheduleTask(Workflow workflow, List tasks) throws Exception { return startedSystemTasks; } - private void addTaskToQueue(final List tasks) throws Exception { + private void addTaskToQueue(final List tasks) { for (Task task : tasks) { addTaskToQueue(task); } } - private void terminate(final WorkflowDef def, final Workflow workflow, TerminateWorkflowException tw) throws Exception { + private void terminate(final Workflow workflow, TerminateWorkflowException tw) { if (!workflow.getStatus().isTerminal()) { workflow.setStatus(tw.workflowStatus); } - String failureWorkflow = def.getFailureWorkflow(); + String failureWorkflow = workflow.getWorkflowDefinition().getFailureWorkflow(); if (failureWorkflow != null) { if (failureWorkflow.startsWith("$")) { String[] paramPathComponents = failureWorkflow.split("\\."); @@ -905,7 +1089,7 @@ private void terminate(final WorkflowDef def, final Workflow workflow, Terminate } private boolean rerunWF(String workflowId, String taskId, Map taskInput, - Map workflowInput, String correlationId) throws Exception { + Map workflowInput, String correlationId) { // Get the workflow Workflow workflow = executionDAO.getWorkflow(workflowId); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java index 87990378db..d482e07e8e 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java @@ -17,21 +17,24 @@ package com.netflix.conductor.core.execution.mapper; import com.google.common.annotations.VisibleForTesting; + import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.events.ScriptEvaluator; import com.netflix.conductor.core.execution.SystemTaskType; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.script.ScriptException; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; +import javax.script.ScriptException; + /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#DECISION} @@ -66,7 +69,6 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { List tasksToBeScheduled = new LinkedList<>(); WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule(); Workflow workflowInstance = taskMapperContext.getWorkflowInstance(); - WorkflowDef workflowDefinition = taskMapperContext.getWorkflowDefinition(); Map taskInput = taskMapperContext.getTaskInput(); int retryCount = taskMapperContext.getRetryCount(); String taskId = taskMapperContext.getTaskId(); @@ -80,7 +82,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { decisionTask.setTaskDefName(SystemTaskType.DECISION.name()); decisionTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); decisionTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - decisionTask.setWorkflowType(workflowInstance.getWorkflowType()); + decisionTask.setWorkflowType(workflowInstance.getWorkflowName()); decisionTask.setCorrelationId(workflowInstance.getCorrelationId()); decisionTask.setScheduledTime(System.currentTimeMillis()); decisionTask.setEndTime(System.currentTimeMillis()); @@ -103,7 +105,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { WorkflowTask selectedTask = selectedTasks.get(0); //Schedule the first task to be executed... //TODO break out this recursive call using function composition of what needs to be done and then walk back the condition tree List caseTasks = taskMapperContext.getDeciderService() - .getTasksToBeScheduled(workflowDefinition, workflowInstance, selectedTask, retryCount, taskMapperContext.getRetryTaskId()); + .getTasksToBeScheduled(workflowInstance, selectedTask, retryCount, taskMapperContext.getRetryTaskId()); tasksToBeScheduled.addAll(caseTasks); decisionTask.getInputData().put("hasChildren", "true"); } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java index 7d4e732ac2..d43115d491 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java @@ -80,7 +80,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter dynamicTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); dynamicTask.setInputData(input); dynamicTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - dynamicTask.setWorkflowType(workflowInstance.getWorkflowType()); + dynamicTask.setWorkflowType(workflowInstance.getWorkflowName()); dynamicTask.setStatus(Task.Status.SCHEDULED); dynamicTask.setTaskType(taskToSchedule.getType()); dynamicTask.setTaskDefName(taskToSchedule.getName()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java index ebf8e856aa..9e6d3fa8ec 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/EventTaskMapper.java @@ -57,7 +57,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { eventTask.setTaskDefName(taskToSchedule.getName()); eventTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); eventTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - eventTask.setWorkflowType(workflowInstance.getWorkflowType()); + eventTask.setWorkflowType(workflowInstance.getWorkflowName()); eventTask.setCorrelationId(workflowInstance.getCorrelationId()); eventTask.setScheduledTime(System.currentTimeMillis()); eventTask.setEndTime(System.currentTimeMillis()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java index cccb760644..c6d8acb3fe 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java @@ -15,9 +15,10 @@ */ package com.netflix.conductor.core.execution.mapper; +import com.google.common.annotations.VisibleForTesting; + import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; @@ -27,6 +28,7 @@ import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; + import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; @@ -107,7 +109,6 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter WorkflowTask taskToSchedule = taskMapperContext.getTaskToSchedule(); Workflow workflowInstance = taskMapperContext.getWorkflowInstance(); - WorkflowDef workflowDef = taskMapperContext.getWorkflowDefinition(); String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); @@ -130,7 +131,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter //Add each dynamic task to the mapped tasks and also get the last dynamic task in the list, // which indicates that the following task after that needs to be a join task for (WorkflowTask wft : dynForkTasks) {//TODO this is a cyclic dependency, break it out using function composition - List forkedTasks = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowDef, workflowInstance, wft, retryCount); + List forkedTasks = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowInstance, wft, retryCount); for (Task forkedTask : forkedTasks) { Map forkedTaskInput = tasksInput.get(forkedTask.getReferenceTaskName()); forkedTask.getInputData().putAll(forkedTaskInput); @@ -143,7 +144,10 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter //From the workflow definition get the next task and make sure that it is a JOIN task. //The dynamic fork tasks need to be followed by a join task - WorkflowTask joinWorkflowTask = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName()); + WorkflowTask joinWorkflowTask = workflowInstance + .getWorkflowDefinition() + .getNextTask(taskToSchedule.getTaskReferenceName()); + if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(WorkflowTask.Type.JOIN.name())) { throw new TerminateWorkflowException("Dynamic join definition is not followed by a join task. Check the blueprint"); } @@ -205,7 +209,7 @@ Task createJoinTask(Workflow workflowInstance, WorkflowTask joinWorkflowTask, Ha joinTask.setTaskDefName(SystemTaskType.JOIN.name()); joinTask.setReferenceTaskName(joinWorkflowTask.getTaskReferenceName()); joinTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - joinTask.setWorkflowType(workflowInstance.getWorkflowType()); + joinTask.setWorkflowType(workflowInstance.getWorkflowName()); joinTask.setCorrelationId(workflowInstance.getCorrelationId()); joinTask.setScheduledTime(System.currentTimeMillis()); joinTask.setEndTime(System.currentTimeMillis()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java index 64058d4460..6a6d368bea 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java @@ -63,7 +63,6 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter int retryCount = taskMapperContext.getRetryCount(); String taskId = taskMapperContext.getTaskId(); - WorkflowDef workflowDef = taskMapperContext.getWorkflowDefinition(); List tasksToBeScheduled = new LinkedList<>(); Task forkTask = new Task(); @@ -71,7 +70,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter forkTask.setTaskDefName(SystemTaskType.FORK.name()); forkTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); forkTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - forkTask.setWorkflowType(workflowInstance.getWorkflowType()); + forkTask.setWorkflowType(workflowInstance.getWorkflowName()); forkTask.setCorrelationId(workflowInstance.getCorrelationId()); forkTask.setScheduledTime(System.currentTimeMillis()); forkTask.setEndTime(System.currentTimeMillis()); @@ -85,11 +84,14 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter for (List wfts : forkTasks) { WorkflowTask wft = wfts.get(0); List tasks2 = taskMapperContext.getDeciderService() - .getTasksToBeScheduled(workflowDef, workflowInstance, wft, retryCount); + .getTasksToBeScheduled(workflowInstance, wft, retryCount); tasksToBeScheduled.addAll(tasks2); } - WorkflowTask joinWorkflowTask = workflowDef.getNextTask(taskToSchedule.getTaskReferenceName()); + WorkflowTask joinWorkflowTask = workflowInstance + .getWorkflowDefinition() + .getNextTask(taskToSchedule.getTaskReferenceName()); + if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(WorkflowTask.Type.JOIN.name())) { throw new TerminateWorkflowException("Dynamic join definition is not followed by a join task. Check the blueprint"); } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java index b88c5731e9..5f11beac8d 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java @@ -62,7 +62,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { joinTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); joinTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); joinTask.setCorrelationId(workflowInstance.getCorrelationId()); - joinTask.setWorkflowType(workflowInstance.getWorkflowType()); + joinTask.setWorkflowType(workflowInstance.getWorkflowName()); joinTask.setScheduledTime(System.currentTimeMillis()); joinTask.setEndTime(System.currentTimeMillis()); joinTask.setInputData(joinInput); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java index a18be48021..a6f71339d5 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java @@ -82,7 +82,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter simpleTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); simpleTask.setInputData(input); simpleTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - simpleTask.setWorkflowType(workflowInstance.getWorkflowType()); + simpleTask.setWorkflowType(workflowInstance.getWorkflowName()); simpleTask.setStatus(Task.Status.SCHEDULED); simpleTask.setTaskType(taskToSchedule.getName()); simpleTask.setTaskDefName(taskToSchedule.getName()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java index 0a3448e31b..378e6d8658 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java @@ -1,23 +1,21 @@ /** * Copyright 2018 Netflix, Inc. *

- * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at *

* http://www.apache.org/licenses/LICENSE-2.0 *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ package com.netflix.conductor.core.execution.mapper; import com.google.common.annotations.VisibleForTesting; + import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; @@ -27,16 +25,18 @@ import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.execution.tasks.SubWorkflow; import com.netflix.conductor.dao.MetadataDAO; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import javax.inject.Inject; + public class SubWorkflowTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(SubWorkflowTaskMapper.class); @@ -71,7 +71,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { subWorkflowTask.setTaskDefName(taskToSchedule.getName()); subWorkflowTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); subWorkflowTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - subWorkflowTask.setWorkflowType(workflowInstance.getWorkflowType()); + subWorkflowTask.setWorkflowType(workflowInstance.getWorkflowName()); subWorkflowTask.setCorrelationId(workflowInstance.getCorrelationId()); subWorkflowTask.setScheduledTime(System.currentTimeMillis()); subWorkflowTask.setEndTime(System.currentTimeMillis()); @@ -114,7 +114,7 @@ Integer getSubWorkflowVersion(Map resolvedParams, String subWork .map(Object::toString) .map(Integer::parseInt) .orElseGet( - () -> Optional.ofNullable(metadataDAO.getLatest(subWorkflowName)) + () -> metadataDAO.getLatest(subWorkflowName) .map(WorkflowDef::getVersion) .orElseThrow(() -> { String reason = String.format("The Task %s defined as a sub-workflow has no workflow definition available ", subWorkflowName); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java index d6fc661257..19f97a8dd5 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/TaskMapperContext.java @@ -28,7 +28,6 @@ */ public class TaskMapperContext { - private WorkflowDef workflowDefinition; private Workflow workflowInstance; private WorkflowTask taskToSchedule; private Map taskInput; @@ -38,10 +37,9 @@ public class TaskMapperContext { private DeciderService deciderService; - public TaskMapperContext(WorkflowDef workflowDefinition, Workflow workflowInstance, WorkflowTask taskToSchedule, + public TaskMapperContext(Workflow workflowInstance, WorkflowTask taskToSchedule, Map taskInput, int retryCount, String retryTaskId, String taskId, DeciderService deciderService) { - this.workflowDefinition = workflowDefinition; this.workflowInstance = workflowInstance; this.taskToSchedule = taskToSchedule; this.taskInput = taskInput; @@ -52,7 +50,7 @@ public TaskMapperContext(WorkflowDef workflowDefinition, Workflow workflowInstan } public WorkflowDef getWorkflowDefinition() { - return workflowDefinition; + return workflowInstance.getWorkflowDefinition(); } public Workflow getWorkflowInstance() { @@ -86,7 +84,7 @@ public DeciderService getDeciderService() { @Override public String toString() { return "TaskMapperContext{" + - "workflowDefinition=" + workflowDefinition + + "workflowDefinition=" + getWorkflowDefinition() + ", workflowInstance=" + workflowInstance + ", taskToSchedule=" + taskToSchedule + ", taskInput=" + taskInput + diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java index 81a30ec663..7518ddad58 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java @@ -79,7 +79,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter userDefinedTask.setTaskDefName(taskToSchedule.getName()); userDefinedTask.setReferenceTaskName(taskToSchedule.getTaskReferenceName()); userDefinedTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - userDefinedTask.setWorkflowType(workflowInstance.getWorkflowType()); + userDefinedTask.setWorkflowType(workflowInstance.getWorkflowName()); userDefinedTask.setCorrelationId(workflowInstance.getCorrelationId()); userDefinedTask.setScheduledTime(System.currentTimeMillis()); userDefinedTask.setTaskId(taskId); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java index e0e37809d1..b880f9b295 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java @@ -60,7 +60,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { waitTask.setTaskDefName(taskMapperContext.getTaskToSchedule().getName()); waitTask.setReferenceTaskName(taskMapperContext.getTaskToSchedule().getTaskReferenceName()); waitTask.setWorkflowInstanceId(workflowInstance.getWorkflowId()); - waitTask.setWorkflowType(workflowInstance.getWorkflowType()); + waitTask.setWorkflowType(workflowInstance.getWorkflowName()); waitTask.setCorrelationId(workflowInstance.getCorrelationId()); waitTask.setScheduledTime(System.currentTimeMillis()); waitTask.setEndTime(System.currentTimeMillis()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java index f1b9d7f5ec..1fcbb32cb2 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/tasks/Event.java @@ -18,15 +18,9 @@ */ package com.netflix.conductor.core.execution.tasks; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import com.google.common.annotations.VisibleForTesting; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.run.Workflow; @@ -36,6 +30,13 @@ import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.WorkflowExecutor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + /** * @author Viren * @@ -60,8 +61,8 @@ public void start(Workflow workflow, Task task, WorkflowExecutor provider) throw Map payload = new HashMap<>(); payload.putAll(task.getInputData()); payload.put("workflowInstanceId", workflow.getWorkflowId()); - payload.put("workflowType", workflow.getWorkflowType()); - payload.put("workflowVersion", workflow.getVersion()); + payload.put("workflowType", workflow.getWorkflowName()); + payload.put("workflowVersion", workflow.getWorkflowVersion()); payload.put("correlationId", workflow.getCorrelationId()); String payloadJson = om.writeValueAsString(payload); @@ -103,12 +104,12 @@ ObservableQueue getQueue(Workflow workflow, Task task) { if("conductor".equals(sinkValue)) { - queueName = sinkValue + ":" + workflow.getWorkflowType() + ":" + task.getReferenceTaskName(); + queueName = sinkValue + ":" + workflow.getWorkflowName() + ":" + task.getReferenceTaskName(); } else if(sinkValue.startsWith("conductor:")) { queueName = sinkValue.replaceAll("conductor:", ""); - queueName = "conductor:" + workflow.getWorkflowType() + ":" + queueName; + queueName = "conductor:" + workflow.getWorkflowName() + ":" + queueName; } else { task.setStatus(Status.FAILED); diff --git a/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java b/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java index 2a2d4ad5e8..9900d3eaf5 100644 --- a/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java +++ b/core/src/main/java/com/netflix/conductor/dao/MetadataDAO.java @@ -1,162 +1,129 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.dao; -import java.util.List; - import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import java.util.List; +import java.util.Optional; + /** - * @author Viren - * Data access layer for the workflow metadata - task definitions and workflow definitions + * @author Viren Data access layer for the workflow metadata - task definitions and workflow definitions */ public interface MetadataDAO { - /** - * - * @param taskDef task definition to be created - * @return name of the task definition - * - */ - public abstract String createTaskDef(TaskDef taskDef); - - /** - * - * @param taskDef task definition to be updated. - * @return name of the task definition - * - */ - public abstract String updateTaskDef(TaskDef taskDef); - - /** - * - * @param name Name of the task - * @return Task Definition - * - */ - public abstract TaskDef getTaskDef(String name); - - /** - * - * @return All the task definitions - * - */ - public abstract List getAllTaskDefs(); - - /** - * - * @param name Name of the task - */ - public abstract void removeTaskDef(String name); - - /** - * - * @param def workflow definition - * - */ - public abstract void create(WorkflowDef def); - - /** - * - * @param def workflow definition - * - */ - public abstract void update(WorkflowDef def); - - /** - * - * @param name Name of the workflow - * @return Workflow Definition - * - */ - public abstract WorkflowDef getLatest(String name); - - /** - * - * @param name Name of the workflow - * @param version version - * @return workflow definition - * - */ - public abstract WorkflowDef get(String name, int version); - - /** - * - * @return Names of all the workflows - * - */ - public abstract List findAll(); - - /** - * - * @return List of all the workflow definitions - * - */ - public abstract List getAll(); - - /** - * - * @return List of all the latest workflow definitions - * - */ - public abstract List getAllLatest(); - - /** - * - * @param name name of the workflow - * @return List of all the workflow definitions - * - */ - public abstract List getAllVersions(String name); - - /** - * - * @param eventHandler Event handler to be added. - * Will throw an exception if an event handler already exists with the name - */ - public abstract void addEventHandler(EventHandler eventHandler); - - /** - * - * @param eventHandler Event handler to be updated. - */ - public abstract void updateEventHandler(EventHandler eventHandler); - - /** - * - * @param name Removes the event handler from the system - */ - public abstract void removeEventHandlerStatus(String name); - - /** - * - * @return All the event handlers registered in the system - */ - public List getEventHandlers(); - - /** - * - * @param event name of the event - * @param activeOnly if true, returns only the active handlers - * @return Returns the list of all the event handlers for a given event - */ - public List getEventHandlersForEvent(String event, boolean activeOnly); + /** + * @param taskDef task definition to be created + * @return name of the task definition + */ + String createTaskDef(TaskDef taskDef); + + /** + * @param taskDef task definition to be updated. + * @return name of the task definition + */ + String updateTaskDef(TaskDef taskDef); + + /** + * @param name Name of the task + * @return Task Definition + */ + TaskDef getTaskDef(String name); + + /** + * @return All the task definitions + */ + List getAllTaskDefs(); + + /** + * @param name Name of the task + */ + void removeTaskDef(String name); + + /** + * @param def workflow definition + */ + void create(WorkflowDef def); + + /** + * @param def workflow definition + */ + void update(WorkflowDef def); + + /** + * @param name Name of the workflow + * @return Workflow Definition + */ + Optional getLatest(String name); + + /** + * @param name Name of the workflow + * @param version version + * @return workflow definition + */ + Optional get(String name, int version); + + /** + * @return Names of all the workflows + */ + List findAll(); + + /** + * @return List of all the workflow definitions + */ + List getAll(); + + /** + * @return List of all the latest workflow definitions + */ + List getAllLatest(); + + /** + * @param name name of the workflow + * @return List of all the workflow definitions + */ + List getAllVersions(String name); + + /** + * @param eventHandler Event handler to be added. Will throw an exception if an event handler already exists with + * the name + */ + void addEventHandler(EventHandler eventHandler); + + /** + * @param eventHandler Event handler to be updated. + */ + void updateEventHandler(EventHandler eventHandler); + + /** + * @param name Removes the event handler from the system + */ + void removeEventHandlerStatus(String name); + + /** + * @return All the event handlers registered in the system + */ + List getEventHandlers(); + + /** + * @param event name of the event + * @param activeOnly if true, returns only the active handlers + * @return Returns the list of all the event handlers for a given event + */ + List getEventHandlersForEvent(String event, boolean activeOnly); } diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java index 6a362a990d..ceea05961f 100644 --- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java +++ b/core/src/main/java/com/netflix/conductor/service/MetadataService.java @@ -19,6 +19,7 @@ package com.netflix.conductor.service; import com.google.common.base.Preconditions; + import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -29,9 +30,11 @@ import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.MetadataDAO; +import java.util.List; +import java.util.Optional; + import javax.inject.Inject; import javax.inject.Singleton; -import java.util.List; /** * @author Viren @@ -125,9 +128,9 @@ public void updateWorkflowDef(List wfs) { * @param version Optional. Version. If null, then retrieves the latest * @return Workflow definition */ - public WorkflowDef getWorkflowDef(String name, Integer version) { + public Optional getWorkflowDef(String name, Integer version) { if (version == null) { - return metadata.getLatest(name); + return getLatestWorkflow(name); } return metadata.get(name, version); } @@ -137,7 +140,7 @@ public WorkflowDef getWorkflowDef(String name, Integer version) { * @param name Name of the workflow to retrieve * @return Latest version of the workflow definition */ - public WorkflowDef getLatestWorkflow(String name) { + public Optional getLatestWorkflow(String name) { return metadata.getLatest(name); } diff --git a/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java b/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java index ea4dd10307..8776ddc6ac 100644 --- a/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java +++ b/core/src/test/java/com/netflix/conductor/core/events/TestEventProcessor.java @@ -18,26 +18,9 @@ */ package com.netflix.conductor.core.events; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.UUID; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; - -import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; +import com.google.common.util.concurrent.Uninterruptibles; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.util.concurrent.Uninterruptibles; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.events.EventHandler.Action; import com.netflix.conductor.common.metadata.events.EventHandler.Action.Type; @@ -50,8 +33,27 @@ import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; +import org.junit.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + import rx.Observable; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + /** * @author Viren * @@ -117,7 +119,7 @@ public String answer(InvocationOnMock invocation) throws Throwable { WorkflowDef def = new WorkflowDef(); def.setVersion(1); def.setName(action.getStartWorkflow().getName()); - when(metadata.getWorkflowDef(any(), any())).thenReturn(def); + when(metadata.getWorkflowDef(any(), any())).thenReturn(Optional.of(def)); ActionProcessor ap = new ActionProcessor(executor, metadata); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java index ad55514bb7..629bf12dde 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** * @@ -42,6 +39,7 @@ import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; import com.netflix.conductor.core.execution.tasks.Join; import com.netflix.conductor.dao.MetadataDAO; + import org.junit.Before; import org.junit.Test; @@ -67,373 +65,375 @@ */ public class TestDeciderOutcomes { - private DeciderService deciderService; + private DeciderService deciderService; - private static ObjectMapper objectMapper = new ObjectMapper(); + private static ObjectMapper objectMapper = new ObjectMapper(); - static { - objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + static { + objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); objectMapper.configure(DeserializationFeature.FAIL_ON_IGNORED_PROPERTIES, false); objectMapper.configure(DeserializationFeature.FAIL_ON_NULL_FOR_PRIMITIVES, false); objectMapper.setSerializationInclusion(Include.NON_NULL); objectMapper.setSerializationInclusion(Include.NON_EMPTY); - } - - - @Before - public void init() throws Exception { - - MetadataDAO metadataDAO = mock(MetadataDAO.class); - TaskDef taskDef = new TaskDef(); - taskDef.setRetryCount(1); - taskDef.setName("mockTaskDef"); - taskDef.setResponseTimeoutSeconds(0); - when(metadataDAO.getTaskDef(any())).thenReturn(taskDef); - ParametersUtils parametersUtils = new ParametersUtils(); - Map taskMappers = new HashMap<>(); - taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); - taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); - taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); - - this.deciderService = new DeciderService(metadataDAO, taskMappers); - } - - @Test - public void testWorkflowWithNoTasks() throws Exception { - InputStream stream = TestDeciderOutcomes.class.getResourceAsStream("/conditional_flow.json"); - WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class); - assertNotNull(def); - - Workflow workflow = new Workflow(); - workflow.setWorkflowType(def.getName()); - workflow.setStartTime(0); - workflow.getInput().put("param1", "nested"); - workflow.getInput().put("param2", "one"); - - DeciderOutcome outcome = deciderService.decide(workflow, def); - assertNotNull(outcome); - assertFalse(outcome.isComplete); - assertTrue(outcome.tasksToBeUpdated.isEmpty()); - assertEquals(3, outcome.tasksToBeScheduled.size()); - System.out.println(outcome.tasksToBeScheduled); - - outcome.tasksToBeScheduled.forEach(t -> t.setStatus(Status.COMPLETED)); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - outcome = deciderService.decide(workflow, def); - assertFalse(outcome.isComplete); - assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName()); - System.out.println(outcome.tasksToBeScheduled); - } - - - @Test - public void testRetries() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask task = new WorkflowTask(); - task.setName("test_task"); - task.setType("USER_TASK"); - task.setTaskReferenceName("t0"); - task.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - task.getInputParameters().put("requestId", "${workflow.input.requestId}"); - - def.getTasks().add(task); - def.setSchemaVersion(2); - - Workflow workflow = new Workflow(); - workflow.getInput().put("requestId", 123); - workflow.setStartTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow, def); - assertNotNull(outcome); - - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals(task.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - - String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); - assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); - assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId")); - - outcome.tasksToBeScheduled.get(0).setStatus(Status.FAILED); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - - outcome = deciderService.decide(workflow, def); - assertNotNull(outcome); - - assertEquals(1, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); - assertNotSame(task1Id, outcome.tasksToBeScheduled.get(0).getTaskId()); - assertEquals(outcome.tasksToBeScheduled.get(0).getTaskId(), outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); - assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getRetriedTaskId()); - assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId")); - - - WorkflowTask fork = new WorkflowTask(); - fork.setName("fork0"); - fork.setWorkflowTaskType(Type.FORK_JOIN_DYNAMIC); - fork.setTaskReferenceName("fork0"); - fork.setDynamicForkTasksInputParamName("forkedInputs"); - fork.setDynamicForkTasksParam("forks"); - fork.getInputParameters().put("forks", "${workflow.input.forks}"); - fork.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}"); - - WorkflowTask join = new WorkflowTask(); - join.setName("join0"); - join.setType("JOIN"); - join.setTaskReferenceName("join0"); - - def.getTasks().clear(); - def.getTasks().add(fork); - def.getTasks().add(join); - - List forks = new LinkedList<>(); - Map> forkedInputs = new HashMap<>(); - - for(int i = 0; i < 1; i++) { - WorkflowTask wft = new WorkflowTask(); - wft.setName("f" + i); - wft.setTaskReferenceName("f" + i); - wft.setWorkflowTaskType(Type.SIMPLE); - wft.getInputParameters().put("requestId", "${workflow.input.requestId}"); - wft.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - forks.add(wft); - Map input = new HashMap<>(); - input.put("k", "v"); - input.put("k1", 1); - forkedInputs.put(wft.getTaskReferenceName(), input); - } - workflow = new Workflow(); - workflow.getInput().put("requestId", 123); - workflow.setStartTime(System.currentTimeMillis()); - - workflow.getInput().put("forks", forks); - workflow.getInput().put("forkedInputs", forkedInputs); - - outcome = deciderService.decide(workflow, def); - assertNotNull(outcome); - assertEquals(3, outcome.tasksToBeScheduled.size()); - assertEquals(0, outcome.tasksToBeUpdated.size()); - - assertEquals("v", outcome.tasksToBeScheduled.get(1).getInputData().get("k")); - assertEquals(1, outcome.tasksToBeScheduled.get(1).getInputData().get("k1")); - assertEquals(outcome.tasksToBeScheduled.get(1).getTaskId(), outcome.tasksToBeScheduled.get(1).getInputData().get("taskId")); - System.out.println(outcome.tasksToBeScheduled.get(1).getInputData()); - task1Id = outcome.tasksToBeScheduled.get(1).getTaskId(); - - outcome.tasksToBeScheduled.get(1).setStatus(Status.FAILED); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - - outcome = deciderService.decide(workflow, def); - assertTrue(outcome.tasksToBeScheduled.stream().anyMatch(task1 -> task1.getReferenceTaskName().equals("f0"))); - Task task1 = outcome.tasksToBeScheduled.stream().filter(t -> t.getReferenceTaskName().equals("f0")).findFirst().get(); - assertEquals("v", task1.getInputData().get("k")); - assertEquals(1, task1.getInputData().get("k1")); - assertEquals(task1.getTaskId(), task1.getInputData().get("taskId")); - assertNotSame(task1Id, task1.getTaskId()); - assertEquals(task1Id, task1.getRetriedTaskId()); - System.out.println(task1.getInputData()); - - } - - @Test - public void testOptional() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask task1 = new WorkflowTask(); - task1.setName("task0"); - task1.setType("SIMPLE"); - task1.setTaskReferenceName("t0"); - task1.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - task1.setOptional(true); - - WorkflowTask task2 = new WorkflowTask(); - task2.setName("task1"); - task2.setType("SIMPLE"); - task2.setTaskReferenceName("t1"); - - def.getTasks().add(task1); - def.getTasks().add(task2); - def.setSchemaVersion(2); - - - Workflow workflow = new Workflow(); - workflow.setStartTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow, def); - assertNotNull(outcome); - - System.out.println("Schedule after starting: " + outcome.tasksToBeScheduled); - assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals(task1.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - System.out.println("TaskId of the scheduled task in input: " + outcome.tasksToBeScheduled.get(0).getInputData()); - String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); - assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); - - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - workflow.getTasks().get(0).setStatus(Status.FAILED); - - outcome = deciderService.decide(workflow, def); - - assertNotNull(outcome); - System.out.println("Schedule: " + outcome.tasksToBeScheduled); - System.out.println("Update: " + outcome.tasksToBeUpdated); - - assertEquals(1, outcome.tasksToBeUpdated.size()); - assertEquals(1, outcome.tasksToBeScheduled.size()); - - assertEquals(Task.Status.COMPLETED_WITH_ERRORS, workflow.getTasks().get(0).getStatus()); - assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); - assertEquals(task2.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - - } - - @Test - public void testOptionalWithDynamicFork() throws Exception { - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test"); - - WorkflowTask task1 = new WorkflowTask(); - task1.setName("fork0"); - task1.setWorkflowTaskType(Type.FORK_JOIN_DYNAMIC); - task1.setTaskReferenceName("fork0"); - task1.setDynamicForkTasksInputParamName("forkedInputs"); - task1.setDynamicForkTasksParam("forks"); - task1.getInputParameters().put("forks", "${workflow.input.forks}"); - task1.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}"); - - WorkflowTask task2 = new WorkflowTask(); - task2.setName("join0"); - task2.setType("JOIN"); - task2.setTaskReferenceName("join0"); - - workflowDef.getTasks().add(task1); - workflowDef.getTasks().add(task2); - workflowDef.setSchemaVersion(2); - - - Workflow workflow = new Workflow(); - List forkedTasks = new LinkedList<>(); - Map> forkedInputs = new HashMap<>(); - - for(int i = 0; i < 3; i++) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("f" + i); - workflowTask.setTaskReferenceName("f" + i); - workflowTask.setWorkflowTaskType(Type.SIMPLE); - workflowTask.setOptional(true); - forkedTasks.add(workflowTask); - - forkedInputs.put(workflowTask.getTaskReferenceName(), new HashMap<>()); - } - workflow.getInput().put("forks", forkedTasks); - workflow.getInput().put("forkedInputs", forkedInputs); - - - workflow.setStartTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow, workflowDef); - assertNotNull(outcome); - assertEquals(5, outcome.tasksToBeScheduled.size()); - assertEquals(0, outcome.tasksToBeUpdated.size()); - assertEquals(SystemTaskType.FORK.name(), outcome.tasksToBeScheduled.get(0).getTaskType()); - assertEquals(Task.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus()); - for(int i = 1; i < 4; i++) { - assertEquals(Task.Status.SCHEDULED, outcome.tasksToBeScheduled.get(i).getStatus()); - assertEquals("f"+ (i-1), outcome.tasksToBeScheduled.get(i).getTaskDefName()); - outcome.tasksToBeScheduled.get(i).setStatus(Status.FAILED); //let's mark them as failure - } - assertEquals(Task.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(4).getStatus()); - workflow.getTasks().clear(); - workflow.getTasks().addAll(outcome.tasksToBeScheduled); - - outcome = deciderService.decide(workflow, workflowDef); - assertNotNull(outcome); - assertEquals(SystemTaskType.JOIN.name(), outcome.tasksToBeScheduled.get(0).getTaskType()); - for(int i = 1; i < 4; i++) { - assertEquals(Task.Status.COMPLETED_WITH_ERRORS, outcome.tasksToBeUpdated.get(i).getStatus()); - assertEquals("f"+ (i-1), outcome.tasksToBeUpdated.get(i).getTaskDefName()); - } - assertEquals(Task.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(0).getStatus()); - new Join().execute(workflow, outcome.tasksToBeScheduled.get(0), null); - assertEquals(Task.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus()); - - outcome.tasksToBeScheduled.stream().map(task -> task.getStatus() + ":" + task.getTaskType() + ":").forEach(System.out::println); - outcome.tasksToBeUpdated.stream().map(task -> task.getStatus() + ":" + task.getTaskType() + ":").forEach(System.out::println); - } - - @Test - public void testDecisionCases() { - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - - WorkflowTask even = new WorkflowTask(); - even.setName("even"); - even.setType("SIMPLE"); - even.setTaskReferenceName("even"); - - WorkflowTask odd = new WorkflowTask(); - odd.setName("odd"); - odd.setType("SIMPLE"); - odd.setTaskReferenceName("odd"); - - WorkflowTask defaultt = new WorkflowTask(); - defaultt.setName("defaultt"); - defaultt.setType("SIMPLE"); - defaultt.setTaskReferenceName("defaultt"); - - - WorkflowTask decide = new WorkflowTask(); - decide.setName("decide"); - decide.setWorkflowTaskType(Type.DECISION); - decide.setTaskReferenceName("d0"); - decide.getInputParameters().put("Id", "${workflow.input.Id}"); - decide.getInputParameters().put("location", "${workflow.input.location}"); - decide.setCaseExpression("if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0) || $.location == 'usa') 'even'; else 'odd'; "); - - decide.getDecisionCases().put("even", Arrays.asList(even)); - decide.getDecisionCases().put("odd", Arrays.asList(odd)); - decide.setDefaultCase(Arrays.asList(defaultt)); - - def.getTasks().add(decide); - def.setSchemaVersion(2); - - - Workflow workflow = new Workflow(); - workflow.setStartTime(System.currentTimeMillis()); - DeciderOutcome outcome = deciderService.decide(workflow, def); - assertNotNull(outcome); - - System.out.println("Schedule after starting: " + outcome.tasksToBeScheduled); - assertEquals(2, outcome.tasksToBeScheduled.size()); - assertEquals(decide.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals(defaultt.getTaskReferenceName(), outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); //default - System.out.println(outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - assertEquals(Arrays.asList("bad input"), outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - - workflow.getInput().put("Id", 9); - workflow.getInput().put("location", "usa"); - outcome = deciderService.decide(workflow, def); - assertEquals(2, outcome.tasksToBeScheduled.size()); - assertEquals(decide.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals(even.getTaskReferenceName(), outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); //even because of location == usa - assertEquals(Arrays.asList("even"), outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - - workflow.getInput().put("Id", 9); - workflow.getInput().put("location", "canada"); - outcome = deciderService.decide(workflow, def); - assertEquals(2, outcome.tasksToBeScheduled.size()); - assertEquals(decide.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); - assertEquals(odd.getTaskReferenceName(), outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); //odd - assertEquals(Arrays.asList("odd"), outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); - } - + } + + @Before + public void init() throws Exception { + + MetadataDAO metadataDAO = mock(MetadataDAO.class); + TaskDef td = new TaskDef(); + td.setRetryCount(1); + when(metadataDAO.getTaskDef(any())).thenReturn(td); + ParametersUtils parametersUtils = new ParametersUtils(); + Map taskMappers = new HashMap<>(); + taskMappers.put("DECISION", new DecisionTaskMapper()); + taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); + taskMappers.put("JOIN", new JoinTaskMapper()); + taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); + taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); + taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); + + this.deciderService = new DeciderService(metadataDAO, taskMappers); + } + + @Test + public void testWorkflowWithNoTasks() throws Exception { + InputStream stream = TestDeciderOutcomes.class.getResourceAsStream("/conditional_flow.json"); + WorkflowDef def = objectMapper.readValue(stream, WorkflowDef.class); + assertNotNull(def); + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.setStartTime(0); + workflow.getInput().put("param1", "nested"); + workflow.getInput().put("param2", "one"); + + DeciderOutcome outcome = deciderService.decide(workflow); + assertNotNull(outcome); + assertFalse(outcome.isComplete); + assertTrue(outcome.tasksToBeUpdated.isEmpty()); + assertEquals(3, outcome.tasksToBeScheduled.size()); + System.out.println(outcome.tasksToBeScheduled); + + outcome.tasksToBeScheduled.forEach(t -> t.setStatus(Status.COMPLETED)); + workflow.getTasks().addAll(outcome.tasksToBeScheduled); + outcome = deciderService.decide(workflow); + assertFalse(outcome.isComplete); + assertEquals(outcome.tasksToBeUpdated.toString(), 3, outcome.tasksToBeUpdated.size()); + assertEquals(1, outcome.tasksToBeScheduled.size()); + assertEquals("junit_task_3", outcome.tasksToBeScheduled.get(0).getTaskDefName()); + System.out.println(outcome.tasksToBeScheduled); + } + + + @Test + public void testRetries() { + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + + WorkflowTask task = new WorkflowTask(); + task.setName("test_task"); + task.setType("USER_TASK"); + task.setTaskReferenceName("t0"); + task.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); + task.getInputParameters().put("requestId", "${workflow.input.requestId}"); + + def.getTasks().add(task); + def.setSchemaVersion(2); + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.getInput().put("requestId", 123); + workflow.setStartTime(System.currentTimeMillis()); + DeciderOutcome outcome = deciderService.decide(workflow); + assertNotNull(outcome); + + assertEquals(1, outcome.tasksToBeScheduled.size()); + assertEquals(task.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + + String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); + assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); + assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId")); + + outcome.tasksToBeScheduled.get(0).setStatus(Status.FAILED); + workflow.getTasks().addAll(outcome.tasksToBeScheduled); + + outcome = deciderService.decide(workflow); + assertNotNull(outcome); + + assertEquals(1, outcome.tasksToBeUpdated.size()); + assertEquals(1, outcome.tasksToBeScheduled.size()); + assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); + assertNotSame(task1Id, outcome.tasksToBeScheduled.get(0).getTaskId()); + assertEquals(outcome.tasksToBeScheduled.get(0).getTaskId(), outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); + assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getRetriedTaskId()); + assertEquals(123, outcome.tasksToBeScheduled.get(0).getInputData().get("requestId")); + + + WorkflowTask fork = new WorkflowTask(); + fork.setName("fork0"); + fork.setWorkflowTaskType(Type.FORK_JOIN_DYNAMIC); + fork.setTaskReferenceName("fork0"); + fork.setDynamicForkTasksInputParamName("forkedInputs"); + fork.setDynamicForkTasksParam("forks"); + fork.getInputParameters().put("forks", "${workflow.input.forks}"); + fork.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}"); + + WorkflowTask join = new WorkflowTask(); + join.setName("join0"); + join.setType("JOIN"); + join.setTaskReferenceName("join0"); + + def.getTasks().clear(); + def.getTasks().add(fork); + def.getTasks().add(join); + + List forks = new LinkedList<>(); + Map> forkedInputs = new HashMap<>(); + + for (int i = 0; i < 1; i++) { + WorkflowTask wft = new WorkflowTask(); + wft.setName("f" + i); + wft.setTaskReferenceName("f" + i); + wft.setWorkflowTaskType(Type.SIMPLE); + wft.getInputParameters().put("requestId", "${workflow.input.requestId}"); + wft.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); + forks.add(wft); + Map input = new HashMap<>(); + input.put("k", "v"); + input.put("k1", 1); + forkedInputs.put(wft.getTaskReferenceName(), input); + } + workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.getInput().put("requestId", 123); + workflow.setStartTime(System.currentTimeMillis()); + + workflow.getInput().put("forks", forks); + workflow.getInput().put("forkedInputs", forkedInputs); + + outcome = deciderService.decide(workflow); + assertNotNull(outcome); + assertEquals(3, outcome.tasksToBeScheduled.size()); + assertEquals(0, outcome.tasksToBeUpdated.size()); + + assertEquals("v", outcome.tasksToBeScheduled.get(1).getInputData().get("k")); + assertEquals(1, outcome.tasksToBeScheduled.get(1).getInputData().get("k1")); + assertEquals(outcome.tasksToBeScheduled.get(1).getTaskId(), outcome.tasksToBeScheduled.get(1).getInputData().get("taskId")); + System.out.println(outcome.tasksToBeScheduled.get(1).getInputData()); + task1Id = outcome.tasksToBeScheduled.get(1).getTaskId(); + + outcome.tasksToBeScheduled.get(1).setStatus(Status.FAILED); + workflow.getTasks().addAll(outcome.tasksToBeScheduled); + + outcome = deciderService.decide(workflow); + assertTrue(outcome.tasksToBeScheduled.stream().anyMatch(task1 -> task1.getReferenceTaskName().equals("f0"))); + Task task1 = outcome.tasksToBeScheduled.stream().filter(t -> t.getReferenceTaskName().equals("f0")).findFirst().get(); + assertEquals("v", task1.getInputData().get("k")); + assertEquals(1, task1.getInputData().get("k1")); + assertEquals(task1.getTaskId(), task1.getInputData().get("taskId")); + assertNotSame(task1Id, task1.getTaskId()); + assertEquals(task1Id, task1.getRetriedTaskId()); + System.out.println(task1.getInputData()); + + } + + @Test + public void testOptional() { + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + + WorkflowTask task1 = new WorkflowTask(); + task1.setName("task0"); + task1.setType("SIMPLE"); + task1.setTaskReferenceName("t0"); + task1.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); + task1.setOptional(true); + + WorkflowTask task2 = new WorkflowTask(); + task2.setName("task1"); + task2.setType("SIMPLE"); + task2.setTaskReferenceName("t1"); + + def.getTasks().add(task1); + def.getTasks().add(task2); + def.setSchemaVersion(2); + + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.setStartTime(System.currentTimeMillis()); + DeciderOutcome outcome = deciderService.decide(workflow); + assertNotNull(outcome); + + System.out.println("Schedule after starting: " + outcome.tasksToBeScheduled); + assertEquals(1, outcome.tasksToBeScheduled.size()); + assertEquals(task1.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + System.out.println("TaskId of the scheduled task in input: " + outcome.tasksToBeScheduled.get(0).getInputData()); + String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); + assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); + + workflow.getTasks().addAll(outcome.tasksToBeScheduled); + workflow.getTasks().get(0).setStatus(Status.FAILED); + + outcome = deciderService.decide(workflow); + + assertNotNull(outcome); + System.out.println("Schedule: " + outcome.tasksToBeScheduled); + System.out.println("Update: " + outcome.tasksToBeUpdated); + + assertEquals(1, outcome.tasksToBeUpdated.size()); + assertEquals(1, outcome.tasksToBeScheduled.size()); + + assertEquals(Task.Status.COMPLETED_WITH_ERRORS, workflow.getTasks().get(0).getStatus()); + assertEquals(task1Id, outcome.tasksToBeUpdated.get(0).getTaskId()); + assertEquals(task2.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + + } + + @Test + public void testOptionalWithDyammicFork() throws Exception { + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + + WorkflowTask task1 = new WorkflowTask(); + task1.setName("fork0"); + task1.setWorkflowTaskType(Type.FORK_JOIN_DYNAMIC); + task1.setTaskReferenceName("fork0"); + task1.setDynamicForkTasksInputParamName("forkedInputs"); + task1.setDynamicForkTasksParam("forks"); + task1.getInputParameters().put("forks", "${workflow.input.forks}"); + task1.getInputParameters().put("forkedInputs", "${workflow.input.forkedInputs}"); + + WorkflowTask task2 = new WorkflowTask(); + task2.setName("join0"); + task2.setType("JOIN"); + task2.setTaskReferenceName("join0"); + + def.getTasks().add(task1); + def.getTasks().add(task2); + def.setSchemaVersion(2); + + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + List forks = new LinkedList<>(); + Map> forkedInputs = new HashMap<>(); + + for (int i = 0; i < 3; i++) { + WorkflowTask wft = new WorkflowTask(); + wft.setName("f" + i); + wft.setTaskReferenceName("f" + i); + wft.setWorkflowTaskType(Type.SIMPLE); + wft.setOptional(true); + forks.add(wft); + + forkedInputs.put(wft.getTaskReferenceName(), new HashMap<>()); + } + workflow.getInput().put("forks", forks); + workflow.getInput().put("forkedInputs", forkedInputs); + + + workflow.setStartTime(System.currentTimeMillis()); + DeciderOutcome outcome = deciderService.decide(workflow); + assertNotNull(outcome); + assertEquals(5, outcome.tasksToBeScheduled.size()); + assertEquals(0, outcome.tasksToBeUpdated.size()); + + assertEquals(SystemTaskType.FORK.name(), outcome.tasksToBeScheduled.get(0).getTaskType()); + assertEquals(Task.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus()); + for (int i = 1; i < 4; i++) { + assertEquals(Task.Status.SCHEDULED, outcome.tasksToBeScheduled.get(i).getStatus()); + assertEquals("f" + (i - 1), outcome.tasksToBeScheduled.get(i).getTaskDefName()); + outcome.tasksToBeScheduled.get(i).setStatus(Status.FAILED); //let's mark them as failure + } + assertEquals(Task.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(4).getStatus()); + workflow.getTasks().clear(); + workflow.getTasks().addAll(outcome.tasksToBeScheduled); + + outcome = deciderService.decide(workflow); + assertNotNull(outcome); + assertEquals(SystemTaskType.JOIN.name(), outcome.tasksToBeScheduled.get(0).getTaskType()); + for (int i = 1; i < 4; i++) { + assertEquals(Task.Status.COMPLETED_WITH_ERRORS, outcome.tasksToBeUpdated.get(i).getStatus()); + assertEquals("f" + (i - 1), outcome.tasksToBeUpdated.get(i).getTaskDefName()); + } + assertEquals(Task.Status.IN_PROGRESS, outcome.tasksToBeScheduled.get(0).getStatus()); + new Join().execute(workflow, outcome.tasksToBeScheduled.get(0), null); + assertEquals(Task.Status.COMPLETED, outcome.tasksToBeScheduled.get(0).getStatus()); + + outcome.tasksToBeScheduled.stream().map(task -> task.getStatus() + ":" + task.getTaskType() + ":").forEach(System.out::println); + outcome.tasksToBeUpdated.stream().map(task -> task.getStatus() + ":" + task.getTaskType() + ":").forEach(System.out::println); + } + + @Test + public void testDecisionCases() { + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + + WorkflowTask even = new WorkflowTask(); + even.setName("even"); + even.setType("SIMPLE"); + even.setTaskReferenceName("even"); + + WorkflowTask odd = new WorkflowTask(); + odd.setName("odd"); + odd.setType("SIMPLE"); + odd.setTaskReferenceName("odd"); + + WorkflowTask defaultt = new WorkflowTask(); + defaultt.setName("defaultt"); + defaultt.setType("SIMPLE"); + defaultt.setTaskReferenceName("defaultt"); + + + WorkflowTask decide = new WorkflowTask(); + decide.setName("decide"); + decide.setWorkflowTaskType(Type.DECISION); + decide.setTaskReferenceName("d0"); + decide.getInputParameters().put("Id", "${workflow.input.Id}"); + decide.getInputParameters().put("location", "${workflow.input.location}"); + decide.setCaseExpression("if ($.Id == null) 'bad input'; else if ( ($.Id != null && $.Id % 2 == 0) || $.location == 'usa') 'even'; else 'odd'; "); + + decide.getDecisionCases().put("even", Arrays.asList(even)); + decide.getDecisionCases().put("odd", Arrays.asList(odd)); + decide.setDefaultCase(Arrays.asList(defaultt)); + + def.getTasks().add(decide); + def.setSchemaVersion(2); + + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.setStartTime(System.currentTimeMillis()); + DeciderOutcome outcome = deciderService.decide(workflow); + assertNotNull(outcome); + + System.out.println("Schedule after starting: " + outcome.tasksToBeScheduled); + assertEquals(2, outcome.tasksToBeScheduled.size()); + assertEquals(decide.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + assertEquals(defaultt.getTaskReferenceName(), outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); //default + System.out.println(outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); + assertEquals(Arrays.asList("bad input"), outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); + + workflow.getInput().put("Id", 9); + workflow.getInput().put("location", "usa"); + outcome = deciderService.decide(workflow); + assertEquals(2, outcome.tasksToBeScheduled.size()); + assertEquals(decide.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + assertEquals(even.getTaskReferenceName(), outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); //even because of location == usa + assertEquals(Arrays.asList("even"), outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); + + workflow.getInput().put("Id", 9); + workflow.getInput().put("location", "canada"); + outcome = deciderService.decide(workflow); + assertEquals(2, outcome.tasksToBeScheduled.size()); + assertEquals(decide.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + assertEquals(odd.getTaskReferenceName(), outcome.tasksToBeScheduled.get(1).getReferenceTaskName()); //odd + assertEquals(Arrays.asList("odd"), outcome.tasksToBeScheduled.get(0).getOutputData().get("caseOutput")); + } } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index cf40fabb6f..90ac876bcd 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. *

- * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at *

* http://www.apache.org/licenses/LICENSE-2.0 *

- * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** * @@ -30,6 +27,7 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper; import com.netflix.conductor.core.execution.mapper.DynamicTaskMapper; @@ -42,12 +40,12 @@ import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; -import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.spectator.api.Counter; import com.netflix.spectator.api.DefaultRegistry; import com.netflix.spectator.api.Registry; import com.netflix.spectator.api.Spectator; + import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -60,6 +58,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -84,8 +83,6 @@ @SuppressWarnings("Duplicates") public class TestDeciderService { - private Workflow workflow; - private DeciderService deciderService; private ParametersUtils parametersUtils; @@ -104,9 +101,13 @@ public static void init() { public void setup() { MetadataDAO metadataDAO = mock(MetadataDAO.class); TaskDef taskDef = new TaskDef(); + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName("TestDeciderService"); + workflowDef.setVersion(1); + when(metadataDAO.getTaskDef(any())).thenReturn(taskDef); - when(metadataDAO.getLatest(any())).thenReturn(workflowDef); + when(metadataDAO.getLatest(any())).thenReturn(Optional.of(workflowDef)); parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); @@ -121,44 +122,14 @@ public void setup() { taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); deciderService = new DeciderService(metadataDAO, taskMappers); - - workflow = new Workflow(); - workflow.getInput().put("requestId", "request id 001"); - workflow.getInput().put("hasAwards", true); - workflow.getInput().put("channelMapping", 5); - Map name = new HashMap<>(); - name.put("name", "The Who"); - name.put("year", 1970); - Map name2 = new HashMap<>(); - name2.put("name", "The Doors"); - name2.put("year", 1975); - - List names = new LinkedList<>(); - names.add(name); - names.add(name2); - - workflow.getOutput().put("name", name); - workflow.getOutput().put("names", names); - workflow.getOutput().put("awards", 200); - - Task task = new Task(); - task.setReferenceTaskName("task2"); - task.getOutputData().put("location", "http://location"); - task.setStatus(Status.COMPLETED); - - Task task2 = new Task(); - task2.setReferenceTaskName("task3"); - task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc"); - task2.setStatus(Status.SCHEDULED); - - workflow.getTasks().add(task); - workflow.getTasks().add(task2); } @Test public void testGetTaskInputV2() throws Exception { + Workflow workflow = createDefaultWorkflow(); + + workflow.getWorkflowDefinition().setSchemaVersion(2); - workflow.setSchemaVersion(2); Map ip = new HashMap<>(); ip.put("workflowInputParam", "${workflow.input.requestId}"); ip.put("taskOutputParam", "${task2.output.location}"); @@ -186,11 +157,12 @@ public void testGetTaskInputV2() throws Exception { assertNull(taskInput.get("nullValue")); assertEquals(workflow.getTasks().get(0).getStatus().name(), taskInput.get("task2Status")); //task2 and task3 are the tasks respectively System.out.println(taskInput); - workflow.setSchemaVersion(1); } @Test public void testGetTaskInputV2Partial() throws Exception { + Workflow workflow = createDefaultWorkflow(); + System.setProperty("EC2_INSTANCE", "i-123abcdef990"); Map wfi = new HashMap<>(); Map wfmap = new HashMap<>(); @@ -207,7 +179,7 @@ public void testGetTaskInputV2Partial() throws Exception { wfi.put(ref, io); }); - workflow.setSchemaVersion(2); + workflow.getWorkflowDefinition().setSchemaVersion(2); Map ip = new HashMap<>(); ip.put("workflowInputParam", "${workflow.input.requestId}"); @@ -256,7 +228,6 @@ public void testGetTaskInputV2Partial() throws Exception { assertNull(taskInput.get("taskOutputParam3")); assertNotNull(taskInput.get("partial")); assertEquals("http://location/something?host=i-123abcdef990", taskInput.get("partial")); - workflow.setSchemaVersion(1); } @SuppressWarnings("unchecked") @@ -281,14 +252,18 @@ public void testGetTaskInput() throws Exception { json.add(m2); ip.put("complexJson", json); + WorkflowDef def = new WorkflowDef(); + def.setName("testGetTaskInput"); + def.setSchemaVersion(2); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); workflow.getInput().put("requestId", "request id 001"); Task task = new Task(); task.setReferenceTaskName("task2"); task.getOutputData().put("location", "http://location"); task.getOutputData().put("isPersonActive", true); workflow.getTasks().add(task); - workflow.setSchemaVersion(2); Map taskInput = parametersUtils.getTaskInput(ip, workflow, null, null); System.out.println(taskInput.get("complexJson")); assertNotNull(taskInput); @@ -309,14 +284,18 @@ public void testGetTaskInputV1() throws Exception { ip.put("workflowInputParam", "workflow.input.requestId"); ip.put("taskOutputParam", "task2.output.location"); + WorkflowDef def = new WorkflowDef(); + def.setSchemaVersion(1); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.getInput().put("requestId", "request id 001"); Task task = new Task(); task.setReferenceTaskName("task2"); task.getOutputData().put("location", "http://location"); task.getOutputData().put("isPersonActive", true); workflow.getTasks().add(task); - workflow.setSchemaVersion(1); Map taskInput = parametersUtils.getTaskInput(ip, workflow, null, null); assertNotNull(taskInput); @@ -389,14 +368,13 @@ public void testCaseStatement() throws Exception { WorkflowDef def = createConditionalWF(); Workflow wf = new Workflow(); + wf.setWorkflowDefinition(def); wf.setCreateTime(new Long(0)); wf.setWorkflowId("a"); wf.setCorrelationId("b"); - wf.setWorkflowType(def.getName()); - wf.setVersion(def.getVersion()); wf.setStatus(WorkflowStatus.RUNNING); - DeciderOutcome outcome = deciderService.decide(wf, def); + DeciderOutcome outcome = deciderService.decide(wf); List scheduledTasks = outcome.tasksToBeScheduled; assertNotNull(scheduledTasks); assertEquals(2, scheduledTasks.size()); @@ -528,7 +506,13 @@ public void testConcurrentTaskInputCalc() throws InterruptedException { workflowInput.put("inputLocation", "baggins://inputlocation/" + x); workflowInput.put("sourceType", "MuxedSource"); workflowInput.put("channelMapping", x); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName("testConcurrentTaskInputCalc"); + workflowDef.setVersion(1); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(workflowDef); workflow.setInput(workflowInput); Map taskInput = pu.getTaskInputV2(new HashMap<>(), workflow, null, def); @@ -572,8 +556,9 @@ public void testConcurrentTaskInputCalc() throws InterruptedException { @SuppressWarnings("unchecked") @Test public void testTaskRetry() throws Exception { + Workflow workflow = createDefaultWorkflow(); - workflow.setSchemaVersion(2); + workflow.getWorkflowDefinition().setSchemaVersion(2); Map ip = new HashMap<>(); ip.put("workflowInputParam", "${workflow.input.requestId}"); @@ -617,10 +602,7 @@ public void testFork() throws Exception { InputStream stream = TestDeciderOutcomes.class.getResourceAsStream("/test.json"); Workflow workflow = objectMapper.readValue(stream, Workflow.class); - InputStream defs = TestDeciderOutcomes.class.getResourceAsStream("/def.json"); - WorkflowDef def = objectMapper.readValue(defs, WorkflowDef.class); - - DeciderOutcome outcome = deciderService.decide(workflow, def); + DeciderOutcome outcome = deciderService.decide(workflow); assertFalse(outcome.isComplete); assertEquals(5, outcome.tasksToBeScheduled.size()); assertEquals(1, outcome.tasksToBeUpdated.size()); @@ -631,8 +613,7 @@ public void testDecideSuccessfulWorkflow() throws Exception { WorkflowDef workflowDef = createLinearWorkflow(); Workflow workflow = new Workflow(); - workflow.setWorkflowType(workflowDef.getName()); - workflow.setVersion(workflowDef.getVersion()); + workflow.setWorkflowDefinition(workflowDef); workflow.setStatus(WorkflowStatus.RUNNING); Task task1 = new Task(); @@ -645,7 +626,7 @@ public void testDecideSuccessfulWorkflow() throws Exception { workflow.getTasks().add(task1); - DeciderOutcome deciderOutcome = deciderService.decide(workflow, workflowDef); + DeciderOutcome deciderOutcome = deciderService.decide(workflow); assertNotNull(deciderOutcome); assertTrue(workflow.getTaskByRefName("s1").isExecuted()); assertFalse(workflow.getTaskByRefName("s1").isRetried()); @@ -665,7 +646,7 @@ public void testDecideSuccessfulWorkflow() throws Exception { task2.setStatus(Status.COMPLETED); workflow.getTasks().add(task2); - deciderOutcome = deciderService.decide(workflow, workflowDef); + deciderOutcome = deciderService.decide(workflow); assertNotNull(deciderOutcome); assertTrue(workflow.getTaskByRefName("s2").isExecuted()); assertFalse(workflow.getTaskByRefName("s2").isRetried()); @@ -681,8 +662,7 @@ public void testDecideFailedTask() throws Exception { WorkflowDef workflowDef = createLinearWorkflow(); Workflow workflow = new Workflow(); - workflow.setWorkflowType(workflowDef.getName()); - workflow.setVersion(workflowDef.getVersion()); + workflow.setWorkflowDefinition(workflowDef); workflow.setStatus(WorkflowStatus.RUNNING); Task task = new Task(); @@ -695,7 +675,7 @@ public void testDecideFailedTask() throws Exception { workflow.getTasks().add(task); - DeciderOutcome deciderOutcome = deciderService.decide(workflow, workflowDef); + DeciderOutcome deciderOutcome = deciderService.decide(workflow); assertNotNull(deciderOutcome); assertFalse(workflow.getTaskByRefName("s1").isExecuted()); assertTrue(workflow.getTaskByRefName("s1").isRetried()); @@ -712,15 +692,14 @@ public void testGetTasksToBeScheduled() throws Exception { WorkflowDef workflowDef = createLinearWorkflow(); Workflow workflow = new Workflow(); - workflow.setWorkflowType(workflowDef.getName()); - workflow.setVersion(workflowDef.getVersion()); + workflow.setWorkflowDefinition(workflowDef); workflow.setStatus(WorkflowStatus.RUNNING); WorkflowTask workflowTask1 = new WorkflowTask(); workflowTask1.setTaskReferenceName("s1"); workflowTask1.setType(Type.SIMPLE.name()); - List tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflowDef, workflow, workflowTask1, 0, null); + List tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask1, 0, null); assertNotNull(tasksToBeScheduled); assertEquals(1, tasksToBeScheduled.size()); assertEquals("s1", tasksToBeScheduled.get(0).getReferenceTaskName()); @@ -728,7 +707,7 @@ public void testGetTasksToBeScheduled() throws Exception { WorkflowTask workflowTask2 = new WorkflowTask(); workflowTask2.setTaskReferenceName("s2"); workflowTask2.setType(Type.SIMPLE.name()); - tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflowDef, workflow, workflowTask2, 0, null); + tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask2, 0, null); assertNotNull(tasksToBeScheduled); assertEquals(1, tasksToBeScheduled.size()); assertEquals("s2", tasksToBeScheduled.get(0).getReferenceTaskName()); @@ -848,6 +827,49 @@ private WorkflowDef createLinearWorkflow() { return workflowDef; } + private Workflow createDefaultWorkflow() { + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName("TestDeciderService"); + workflowDef.setVersion(1); + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(workflowDef); + + workflow.getInput().put("requestId", "request id 001"); + workflow.getInput().put("hasAwards", true); + workflow.getInput().put("channelMapping", 5); + Map name = new HashMap<>(); + name.put("name", "The Who"); + name.put("year", 1970); + Map name2 = new HashMap<>(); + name2.put("name", "The Doors"); + name2.put("year", 1975); + + List names = new LinkedList<>(); + names.add(name); + names.add(name2); + + workflow.getOutput().put("name", name); + workflow.getOutput().put("names", names); + workflow.getOutput().put("awards", 200); + + Task task = new Task(); + task.setReferenceTaskName("task2"); + task.getOutputData().put("location", "http://location"); + task.setStatus(Status.COMPLETED); + + Task task2 = new Task(); + task2.setReferenceTaskName("task3"); + task2.getOutputData().put("refId", "abcddef_1234_7890_aaffcc"); + task2.setStatus(Status.SCHEDULED); + + workflow.getTasks().add(task); + workflow.getTasks().add(task2); + + return workflow; + } + private WorkflowDef createNestedWorkflow() { WorkflowDef workflowDef = new WorkflowDef(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java index a07799a3ae..1b0168978c 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java @@ -21,6 +21,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; @@ -221,9 +222,12 @@ public void start(Workflow workflow, Task task, WorkflowExecutor executor) throw @Test @SuppressWarnings("unchecked") public void testCompleteWorkflow() throws Exception { + WorkflowDef def = new WorkflowDef(); + def.setName("test"); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); workflow.setWorkflowId("1"); - workflow.setWorkflowType("test"); workflow.setStatus(Workflow.WorkflowStatus.RUNNING); workflow.setOwnerApp("junit_test"); workflow.setStartTime(10L); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java index ff8c19e43e..0e12f8b966 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java @@ -8,11 +8,11 @@ import com.netflix.conductor.core.execution.DeciderService; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.utils.IDGenerator; + import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; -import org.mockito.Mockito; import java.util.Arrays; import java.util.HashMap; @@ -20,7 +20,7 @@ import java.util.List; import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -91,11 +91,14 @@ public void getMappedTasks() throws Exception { decisionCases.put("odd", Arrays.asList(task3)); decisionTask.setDecisionCases(decisionCases); //Workflow instance + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setSchemaVersion(2); + Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(workflowDef); Map workflowInput = new HashMap<>(); workflowInput.put("Id", "22"); workflowInstance.setInput(workflowInput); - workflowInstance.setSchemaVersion(2); Map body = new HashMap<>(); body.put("input", taskDefinitionInput); @@ -105,14 +108,13 @@ public void getMappedTasks() throws Exception { workflowInstance, null, null); - WorkflowDef workflowDef = new WorkflowDef(); Task theTask = new Task(); theTask.setReferenceTaskName("Foo"); theTask.setTaskId(IDGenerator.generate()); - when(deciderService.getTasksToBeScheduled(workflowDef, workflowInstance, task2, 0, null)) + when(deciderService.getTasksToBeScheduled(workflowInstance, task2, 0, null)) .thenReturn(Arrays.asList(theTask)); - TaskMapperContext taskMapperContext = new TaskMapperContext(workflowDef, workflowInstance, decisionTask, + TaskMapperContext taskMapperContext = new TaskMapperContext(workflowInstance, decisionTask, input, 0, null, IDGenerator.generate(), deciderService); //When @@ -141,6 +143,7 @@ public void getEvaluatedCaseValue() throws Exception { decisionTask.setDecisionCases(decisionCases); Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(new WorkflowDef()); Map workflowInput = new HashMap<>(); workflowInput.put("p1", "workflow.input.param1"); workflowInput.put("p2", "workflow.input.param2"); @@ -177,12 +180,16 @@ public void getEvaluatedCaseValueUsingExpression() throws Exception { decisionCases.put("even", Arrays.asList(task2)); decisionCases.put("odd", Arrays.asList(task3)); decisionTask.setDecisionCases(decisionCases); + //Workflow instance + WorkflowDef def = new WorkflowDef(); + def.setSchemaVersion(2); + Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(def); Map workflowInput = new HashMap<>(); workflowInput.put("Id", "22"); workflowInstance.setInput(workflowInput); - workflowInstance.setSchemaVersion(2); Map body = new HashMap<>(); body.put("input", taskDefinitionInput); @@ -219,12 +226,16 @@ public void getEvaluatedCaseValueException() { decisionCases.put("even", Arrays.asList(task2)); decisionCases.put("odd", Arrays.asList(task3)); decisionTask.setDecisionCases(decisionCases); + //Workflow instance + WorkflowDef def = new WorkflowDef(); + def.setSchemaVersion(2); + Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(def); Map workflowInput = new HashMap<>(); workflowInput.put(".Id", "22"); workflowInstance.setInput(workflowInput); - workflowInstance.setSchemaVersion(2); Map body = new HashMap<>(); body.put("input", taskDefinitionInput); @@ -244,4 +255,4 @@ public void getEvaluatedCaseValueException() { -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java index 1a4a1fcf58..245d7fd327 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java @@ -57,7 +57,12 @@ public void getMappedTasks() throws Exception { when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(TaskDef.class), anyString())).thenReturn(taskInput); String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), workflowTask, taskInput, 0, null, taskId, null); + + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, workflowTask, taskInput, 0, null, taskId, null); List mappedTasks = dynamicTaskMapper.getMappedTasks(taskMapperContext); @@ -121,4 +126,4 @@ public void getDynamicTaskDefinitionNull() { } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java index 2a5ed99e51..ce5398f3a7 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java @@ -38,7 +38,11 @@ public void getMappedTasks() throws Exception { when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(TaskDef.class), anyString())).thenReturn(eventTaskInput); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToBeScheduled, null, 0, null, taskId, null); + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToBeScheduled, null, 0, null, taskId, null); List mappedTasks = eventTaskMapper.getMappedTasks(taskMapperContext); assertEquals(1, mappedTasks.size()); @@ -48,4 +52,4 @@ public void getMappedTasks() throws Exception { } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java index 77e368136b..e4b1a69e61 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java @@ -65,6 +65,7 @@ public void getMappedTasksException() throws Exception { def.setInputParameters(Arrays.asList("param1", "param2")); Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(def); WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); @@ -113,12 +114,12 @@ public void getMappedTasksException() throws Exception { Task simpleTask2 = new Task(); simpleTask2.setReferenceTaskName("xdt2"); - when(deciderService.getTasksToBeScheduled(def, workflowInstance, wt2, 0 )).thenReturn(Arrays.asList(simpleTask1)); - when(deciderService.getTasksToBeScheduled(def, workflowInstance, wt3, 0 )).thenReturn(Arrays.asList(simpleTask2)); + when(deciderService.getTasksToBeScheduled(workflowInstance, wt2, 0 )).thenReturn(Arrays.asList(simpleTask1)); + when(deciderService.getTasksToBeScheduled(workflowInstance, wt3, 0 )).thenReturn(Arrays.asList(simpleTask2)); String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(def, workflowInstance, dynamicForkJoinToSchedule, null,0, null, taskId, deciderService); + TaskMapperContext taskMapperContext = new TaskMapperContext(workflowInstance, dynamicForkJoinToSchedule, null,0, null, taskId, deciderService); //then expectedException.expect(TerminateWorkflowException.class); forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext); @@ -135,6 +136,7 @@ public void getMappedTasks() throws Exception { def.setInputParameters(Arrays.asList("param1", "param2")); Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(def); WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); @@ -184,12 +186,12 @@ public void getMappedTasks() throws Exception { Task simpleTask2 = new Task(); simpleTask2.setReferenceTaskName("xdt2"); - when(deciderService.getTasksToBeScheduled(def, workflowInstance, wt2, 0 )).thenReturn(Arrays.asList(simpleTask1)); - when(deciderService.getTasksToBeScheduled(def, workflowInstance, wt3, 0 )).thenReturn(Arrays.asList(simpleTask2)); + when(deciderService.getTasksToBeScheduled(workflowInstance, wt2, 0 )).thenReturn(Arrays.asList(simpleTask1)); + when(deciderService.getTasksToBeScheduled(workflowInstance, wt3, 0 )).thenReturn(Arrays.asList(simpleTask2)); String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(def, workflowInstance, dynamicForkJoinToSchedule, null,0, null, taskId, deciderService); + TaskMapperContext taskMapperContext = new TaskMapperContext(workflowInstance, dynamicForkJoinToSchedule, null,0, null, taskId, deciderService); //then List mappedTasks = forkJoinDynamicTaskMapper.getMappedTasks(taskMapperContext); @@ -361,4 +363,4 @@ public void getDynamicForkTasksAndInputException() throws Exception { forkJoinDynamicTaskMapper.getDynamicForkTasksAndInput(dynamicForkJoinToSchedule, new Workflow(), "dynamicTasks"); } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java index d4be91ac3c..fd016e2b7b 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java @@ -88,6 +88,7 @@ public void getMappedTasks() throws Exception { def.getTasks().add(wft4); Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); Task task1 = new Task(); task1.setReferenceTaskName(wft1.getTaskReferenceName()); @@ -95,12 +96,12 @@ public void getMappedTasks() throws Exception { Task task3 = new Task(); task3.setReferenceTaskName(wft3.getTaskReferenceName()); - Mockito.when(deciderService.getTasksToBeScheduled(def, workflow, wft1,0)).thenReturn(Arrays.asList(task1)); - Mockito.when(deciderService.getTasksToBeScheduled(def, workflow, wft2,0)).thenReturn(Arrays.asList(task3)); + Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1,0)).thenReturn(Arrays.asList(task1)); + Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2,0)).thenReturn(Arrays.asList(task3)); String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(def,workflow, forkTask, null, 0 , null, taskId, deciderService); + TaskMapperContext taskMapperContext = new TaskMapperContext(workflow, forkTask, null, 0 , null, taskId, deciderService); List mappedTasks = forkJoinTaskMapper.getMappedTasks(taskMapperContext); assertEquals(3, mappedTasks.size()); @@ -160,6 +161,7 @@ public void getMappedTasksException() throws Exception { def.getTasks().add(wft4); Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); Task task1 = new Task(); task1.setReferenceTaskName(wft1.getTaskReferenceName()); @@ -167,16 +169,16 @@ public void getMappedTasksException() throws Exception { Task task3 = new Task(); task3.setReferenceTaskName(wft3.getTaskReferenceName()); - Mockito.when(deciderService.getTasksToBeScheduled(def, workflow, wft1,0)).thenReturn(Arrays.asList(task1)); - Mockito.when(deciderService.getTasksToBeScheduled(def, workflow, wft2,0)).thenReturn(Arrays.asList(task3)); + Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft1,0)).thenReturn(Arrays.asList(task1)); + Mockito.when(deciderService.getTasksToBeScheduled(workflow, wft2,0)).thenReturn(Arrays.asList(task3)); String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(def,workflow, forkTask, null, 0 , null, taskId, deciderService); + TaskMapperContext taskMapperContext = new TaskMapperContext(workflow, forkTask, null, 0 , null, taskId, deciderService); expectedException.expect(TerminateWorkflowException.class); expectedException.expectMessage("Dynamic join definition is not followed by a join task. Check the blueprint"); forkJoinTaskMapper.getMappedTasks(taskMapperContext); } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java index 6220faf3d8..e6ade3bf4e 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java @@ -25,7 +25,11 @@ public void getMappedTasks() throws Exception { String taskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToSchedule, + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, null, 0, null, taskId, null); List mappedTasks = new JoinTaskMapper().getMappedTasks(taskMapperContext); @@ -34,4 +38,4 @@ public void getMappedTasks() throws Exception { assertEquals(SystemTaskType.JOIN.name(), mappedTasks.get(0).getTaskType()); } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java index d56291b70b..1e1b20a544 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java @@ -50,7 +50,11 @@ public void getMappedTasks() throws Exception { when(metadataDAO.getTaskDef("simple_task")).thenReturn(new TaskDef()); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); List mappedTasks = simpleTaskMapper.getMappedTasks(taskMapperContext); assertNotNull(mappedTasks); @@ -65,7 +69,12 @@ public void getMappedTasksException() throws Exception { taskToSchedule.setName("simple_task"); String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); + + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); when(metadataDAO.getTaskDef("simple_task")).thenReturn(null); //then @@ -78,4 +87,4 @@ public void getMappedTasksException() throws Exception { } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java index 9b560f56ff..dbb0f52fa2 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java @@ -20,6 +20,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -52,6 +53,7 @@ public void getMappedTasks() throws Exception { //Given WorkflowDef workflowDef = new WorkflowDef(); Workflow workflowInstance = new Workflow(); + workflowInstance.setWorkflowDefinition(workflowDef); WorkflowTask taskToSchedule = new WorkflowTask(); SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); subWorkflowParams.setName("Foo"); @@ -66,7 +68,7 @@ public void getMappedTasks() throws Exception { .thenReturn(subWorkflowParamMap); //When - TaskMapperContext taskMapperContext = new TaskMapperContext(workflowDef, workflowInstance, taskToSchedule, + TaskMapperContext taskMapperContext = new TaskMapperContext(workflowInstance, taskToSchedule, taskInput, 0, null, IDGenerator.generate(), deciderService); List mappedTasks = subWorkflowTaskMapper.getMappedTasks(taskMapperContext); @@ -121,7 +123,7 @@ public void getSubworkflowVersionFromMeta() throws Exception { WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName("FooWorkFlow"); workflowDef.setVersion(2); - when(metadataDAO.getLatest(any())).thenReturn(workflowDef); + when(metadataDAO.getLatest(any())).thenReturn(Optional.of(workflowDef)); Integer version = subWorkflowTaskMapper.getSubWorkflowVersion(subWorkflowParamMap, "FooWorkFlow"); @@ -131,7 +133,7 @@ public void getSubworkflowVersionFromMeta() throws Exception { @Test public void getSubworkflowVersionFromMetaException() throws Exception { Map subWorkflowParamMap = new HashMap<>(); - when(metadataDAO.getLatest(any())).thenReturn(null); + when(metadataDAO.getLatest(any())).thenReturn(Optional.empty()); expectedException.expect(TerminateWorkflowException.class); expectedException.expectMessage(String.format("The Task %s defined as a sub-workflow has no workflow definition available ", "FooWorkFlow")); @@ -139,4 +141,4 @@ public void getSubworkflowVersionFromMetaException() throws Exception { subWorkflowTaskMapper.getSubWorkflowVersion(subWorkflowParamMap, "FooWorkFlow"); } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java index 8f4faaac45..c7f38a1d56 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java @@ -48,7 +48,12 @@ public void getMappedTasks() throws Exception { String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); when(metadataDAO.getTaskDef("user_task")).thenReturn(new TaskDef()); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); + + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); //when List mappedTasks = userDefinedTaskMapper.getMappedTasks(taskMapperContext); @@ -67,7 +72,12 @@ public void getMappedTasksException() throws Exception { String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); when(metadataDAO.getTaskDef("user_task")).thenReturn(null); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); + + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); //then expectedException.expect(TerminateWorkflowException.class); @@ -77,4 +87,4 @@ public void getMappedTasksException() throws Exception { } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java index 4166ef6f47..6fcf7d9e17 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java @@ -27,7 +27,12 @@ public void getMappedTasks() throws Exception { String taskId = IDGenerator.generate(); ParametersUtils parametersUtils = new ParametersUtils(); - TaskMapperContext taskMapperContext = new TaskMapperContext(new WorkflowDef(), new Workflow(), taskToSchedule, new HashMap<>(), 0, null, taskId, null); + + WorkflowDef wd = new WorkflowDef(); + Workflow w = new Workflow(); + w.setWorkflowDefinition(wd); + + TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, new HashMap<>(), 0, null, taskId, null); WaitTaskMapper waitTaskMapper = new WaitTaskMapper(parametersUtils); //When List mappedTasks = waitTaskMapper.getMappedTasks(taskMapperContext); @@ -41,4 +46,4 @@ public void getMappedTasks() throws Exception { } -} \ No newline at end of file +} diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java index d4b3637954..48efcc5e54 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java @@ -1,46 +1,26 @@ /** * Copyright 2017 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** - * + * */ package com.netflix.conductor.core.execution.tasks; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; - -import com.netflix.conductor.core.events.EventQueues; -import org.junit.Before; -import org.junit.Test; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.core.events.MockQueueProvider; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.events.queue.ObservableQueue; @@ -49,237 +29,261 @@ import com.netflix.conductor.core.execution.TestConfiguration; import com.netflix.conductor.dao.QueueDAO; +import org.junit.Before; +import org.junit.Test; +import org.mockito.invocation.InvocationOnMock; +import org.mockito.stubbing.Answer; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + /** * @author Viren * */ public class TestEvent { - @Before - public void setup() { - EventQueues.providers = new HashMap<>(); - EventQueues.providers.put("sqs", new MockQueueProvider("sqs")); - EventQueues.providers.put("conductor", new MockQueueProvider("conductor")); - } - - @Test - public void testEvent() { - System.setProperty("QUEUE_NAME", "queue_name_001"); - ParametersUtils pu = new ParametersUtils(); - String eventt = "queue_${QUEUE_NAME}"; - String event = pu.replace(eventt).toString(); - assertNotNull(event); - assertEquals("queue_queue_name_001", event); - - eventt = "queue_9"; - event = pu.replace(eventt).toString(); - assertNotNull(event); - assertEquals(eventt, event); - } - - @Test - public void testSinkParam() { - String sink = "sqs:queue_name"; - - Workflow workflow = new Workflow(); - workflow.setWorkflowType("wf0"); - - Task task1 = new Task(); - task1.setReferenceTaskName("t1"); - task1.getOutputData().put("q", "t1_queue"); - workflow.getTasks().add(task1); - - Task task2 = new Task(); - task2.setReferenceTaskName("t2"); - task2.getOutputData().put("q", "task2_queue"); - workflow.getTasks().add(task2); - - Task task = new Task(); - task.setReferenceTaskName("event"); - task.getInputData().put("sink", sink); - task.setTaskType(WorkflowTask.Type.EVENT.name()); - workflow.getTasks().add(task); - - Event event = new Event(); - ObservableQueue queue = event.getQueue(workflow, task); - assertNotNull(task.getReasonForIncompletion(), queue); - assertEquals("queue_name", queue.getName()); - assertEquals("sqs", queue.getType()); - - sink = "sqs:${t1.output.q}"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals("t1_queue", queue.getName()); - assertEquals("sqs", queue.getType()); - System.out.println(task.getOutputData().get("event_produced")); - - sink = "sqs:${t2.output.q}"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals("task2_queue", queue.getName()); - assertEquals("sqs", queue.getType()); - System.out.println(task.getOutputData().get("event_produced")); - - sink = "conductor"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals(workflow.getWorkflowType() + ":" + task.getReferenceTaskName(), queue.getName()); - assertEquals("conductor", queue.getType()); - System.out.println(task.getOutputData().get("event_produced")); - - sink = "sqs:static_value"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNotNull(queue); - assertEquals("static_value", queue.getName()); - assertEquals("sqs", queue.getType()); - assertEquals(sink, task.getOutputData().get("event_produced")); - System.out.println(task.getOutputData().get("event_produced")); - - sink = "bad:queue"; - task.getInputData().put("sink", sink); - queue = event.getQueue(workflow, task); - assertNull(queue); - assertEquals(Task.Status.FAILED, task.getStatus()); - } - - @Test - public void test() throws Exception { - Event event = new Event(); - Workflow workflow = new Workflow(); - workflow.setWorkflowType("testWorkflow"); - workflow.setVersion(2); - - Task task = new Task(); - task.getInputData().put("sink", "conductor"); - task.setReferenceTaskName("task0"); - task.setTaskId("task_id_0"); - - QueueDAO dao = mock(QueueDAO.class); - String[] publishedQueue = new String[1]; - List publishedMessages = new LinkedList<>(); - - doAnswer(new Answer() { - - @SuppressWarnings("unchecked") - @Override - public Void answer(InvocationOnMock invocation) throws Throwable { - String queueName = invocation.getArgumentAt(0, String.class); - System.out.println(queueName); - publishedQueue[0] = queueName; - List messages = invocation.getArgumentAt(1, List.class); - publishedMessages.addAll(messages); - return null; - } - }).when(dao).push(any(), any()); - - doAnswer(new Answer>() { - - @Override - public List answer(InvocationOnMock invocation) throws Throwable { - String messageId = invocation.getArgumentAt(1, String.class); - if(publishedMessages.get(0).getId().equals(messageId)) { - publishedMessages.remove(0); - return Arrays.asList(messageId); - } - return null; - } - }).when(dao).remove(any(), any()); - - EventQueues.providers.put("conductor", new DynoEventQueueProvider(dao, new TestConfiguration())); - event.start(workflow, task, null); - - assertEquals(Task.Status.COMPLETED, task.getStatus()); - assertNotNull(task.getOutputData()); - assertEquals("conductor:" + workflow.getWorkflowType() + ":" + task.getReferenceTaskName(), task.getOutputData().get("event_produced")); - assertEquals(task.getOutputData().get("event_produced"), "conductor:" + publishedQueue[0]); - assertEquals(1, publishedMessages.size()); - assertEquals(task.getTaskId(), publishedMessages.get(0).getId()); - assertNotNull(publishedMessages.get(0).getPayload()); - - event.cancel(workflow, task, null); - assertTrue(publishedMessages.isEmpty()); - } - - - @Test - public void testFailures() throws Exception { - Event event = new Event(); - Workflow workflow = new Workflow(); - workflow.setWorkflowType("testWorkflow"); - workflow.setVersion(2); - - Task task = new Task(); - task.setReferenceTaskName("task0"); - task.setTaskId("task_id_0"); - - event.start(workflow, task, null); - assertEquals(Task.Status.FAILED, task.getStatus()); - assertTrue(task.getReasonForIncompletion() != null); - System.out.println(task.getReasonForIncompletion()); - - task.getInputData().put("sink", "bad_sink"); - task.setStatus(Status.SCHEDULED); - - event.start(workflow, task, null); - assertEquals(Task.Status.FAILED, task.getStatus()); - assertTrue(task.getReasonForIncompletion() != null); - System.out.println(task.getReasonForIncompletion()); - - task.setStatus(Status.SCHEDULED); - task.setScheduledTime(System.currentTimeMillis()); - event.execute(workflow, task, null); - assertEquals(Task.Status.SCHEDULED, task.getStatus()); - - task.setScheduledTime(System.currentTimeMillis() - 610_000); - event.start(workflow, task, null); - assertEquals(Task.Status.FAILED, task.getStatus()); - } - - @Test - public void testDynamicSinks() { - - Event event = new Event(); - Workflow workflow = new Workflow(); - workflow.setWorkflowType("testWorkflow"); - workflow.setVersion(2); - - Task task = new Task(); - task.setReferenceTaskName("task0"); - task.setTaskId("task_id_0"); - task.setStatus(Status.IN_PROGRESS); - task.getInputData().put("sink", "conductor:some_arbitary_queue"); - - - ObservableQueue queue = event.getQueue(workflow, task); - assertEquals(Task.Status.IN_PROGRESS, task.getStatus()); - assertNotNull(queue); - assertEquals("testWorkflow:some_arbitary_queue", queue.getName()); - assertEquals("testWorkflow:some_arbitary_queue", queue.getURI()); - assertEquals("conductor", queue.getType()); - assertEquals("conductor:testWorkflow:some_arbitary_queue", task.getOutputData().get("event_produced")); - - task.getInputData().put("sink", "conductor"); - queue = event.getQueue(workflow, task); - assertEquals("not in progress: " + task.getReasonForIncompletion(), Task.Status.IN_PROGRESS, task.getStatus()); - assertNotNull(queue); - assertEquals("testWorkflow:task0", queue.getName()); - - task.getInputData().put("sink", "sqs:my_sqs_queue_name"); - queue = event.getQueue(workflow, task); - assertEquals("not in progress: " + task.getReasonForIncompletion(), Task.Status.IN_PROGRESS, task.getStatus()); - assertNotNull(queue); - assertEquals("my_sqs_queue_name", queue.getName()); - assertEquals("sqs", queue.getType()); - - task.getInputData().put("sink", "sns:my_sqs_queue_name"); - queue = event.getQueue(workflow, task); - assertEquals(Task.Status.FAILED, task.getStatus()); - - - } - + WorkflowDef testWorkflowDefinition; + + @Before + public void setup() { + EventQueues.providers = new HashMap<>(); + EventQueues.providers.put("sqs", new MockQueueProvider("sqs")); + EventQueues.providers.put("conductor", new MockQueueProvider("conductor")); + + testWorkflowDefinition = new WorkflowDef(); + testWorkflowDefinition.setName("testWorkflow"); + testWorkflowDefinition.setVersion(2); + } + + @Test + public void testEvent() { + System.setProperty("QUEUE_NAME", "queue_name_001"); + ParametersUtils pu = new ParametersUtils(); + String eventt = "queue_${QUEUE_NAME}"; + String event = pu.replace(eventt).toString(); + assertNotNull(event); + assertEquals("queue_queue_name_001", event); + + eventt = "queue_9"; + event = pu.replace(eventt).toString(); + assertNotNull(event); + assertEquals(eventt, event); + } + + @Test + public void testSinkParam() { + String sink = "sqs:queue_name"; + + WorkflowDef def = new WorkflowDef(); + def.setName("wf0"); + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + + Task task1 = new Task(); + task1.setReferenceTaskName("t1"); + task1.getOutputData().put("q", "t1_queue"); + workflow.getTasks().add(task1); + + Task task2 = new Task(); + task2.setReferenceTaskName("t2"); + task2.getOutputData().put("q", "task2_queue"); + workflow.getTasks().add(task2); + + Task task = new Task(); + task.setReferenceTaskName("event"); + task.getInputData().put("sink", sink); + task.setTaskType(WorkflowTask.Type.EVENT.name()); + workflow.getTasks().add(task); + + Event event = new Event(); + ObservableQueue queue = event.getQueue(workflow, task); + assertNotNull(task.getReasonForIncompletion(), queue); + assertEquals("queue_name", queue.getName()); + assertEquals("sqs", queue.getType()); + + sink = "sqs:${t1.output.q}"; + task.getInputData().put("sink", sink); + queue = event.getQueue(workflow, task); + assertNotNull(queue); + assertEquals("t1_queue", queue.getName()); + assertEquals("sqs", queue.getType()); + System.out.println(task.getOutputData().get("event_produced")); + + sink = "sqs:${t2.output.q}"; + task.getInputData().put("sink", sink); + queue = event.getQueue(workflow, task); + assertNotNull(queue); + assertEquals("task2_queue", queue.getName()); + assertEquals("sqs", queue.getType()); + System.out.println(task.getOutputData().get("event_produced")); + + sink = "conductor"; + task.getInputData().put("sink", sink); + queue = event.getQueue(workflow, task); + assertNotNull(queue); + assertEquals(workflow.getWorkflowType() + ":" + task.getReferenceTaskName(), queue.getName()); + assertEquals("conductor", queue.getType()); + System.out.println(task.getOutputData().get("event_produced")); + + sink = "sqs:static_value"; + task.getInputData().put("sink", sink); + queue = event.getQueue(workflow, task); + assertNotNull(queue); + assertEquals("static_value", queue.getName()); + assertEquals("sqs", queue.getType()); + assertEquals(sink, task.getOutputData().get("event_produced")); + System.out.println(task.getOutputData().get("event_produced")); + + sink = "bad:queue"; + task.getInputData().put("sink", sink); + queue = event.getQueue(workflow, task); + assertNull(queue); + assertEquals(Task.Status.FAILED, task.getStatus()); + } + + @Test + public void test() throws Exception { + Event event = new Event(); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(testWorkflowDefinition); + + Task task = new Task(); + task.getInputData().put("sink", "conductor"); + task.setReferenceTaskName("task0"); + task.setTaskId("task_id_0"); + + QueueDAO dao = mock(QueueDAO.class); + String[] publishedQueue = new String[1]; + List publishedMessages = new LinkedList<>(); + + doAnswer(new Answer() { + + @SuppressWarnings("unchecked") + @Override + public Void answer(InvocationOnMock invocation) throws Throwable { + String queueName = invocation.getArgumentAt(0, String.class); + System.out.println(queueName); + publishedQueue[0] = queueName; + List messages = invocation.getArgumentAt(1, List.class); + publishedMessages.addAll(messages); + return null; + } + }).when(dao).push(any(), any()); + + doAnswer(new Answer>() { + + @Override + public List answer(InvocationOnMock invocation) throws Throwable { + String messageId = invocation.getArgumentAt(1, String.class); + if (publishedMessages.get(0).getId().equals(messageId)) { + publishedMessages.remove(0); + return Arrays.asList(messageId); + } + return null; + } + }).when(dao).remove(any(), any()); + + EventQueues.providers.put("conductor", new DynoEventQueueProvider(dao, new TestConfiguration())); + event.start(workflow, task, null); + + assertEquals(Task.Status.COMPLETED, task.getStatus()); + assertNotNull(task.getOutputData()); + assertEquals("conductor:" + workflow.getWorkflowType() + ":" + task.getReferenceTaskName(), task.getOutputData().get("event_produced")); + assertEquals(task.getOutputData().get("event_produced"), "conductor:" + publishedQueue[0]); + assertEquals(1, publishedMessages.size()); + assertEquals(task.getTaskId(), publishedMessages.get(0).getId()); + assertNotNull(publishedMessages.get(0).getPayload()); + + event.cancel(workflow, task, null); + assertTrue(publishedMessages.isEmpty()); + } + + + @Test + public void testFailures() throws Exception { + Event event = new Event(); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(testWorkflowDefinition); + + Task task = new Task(); + task.setReferenceTaskName("task0"); + task.setTaskId("task_id_0"); + + event.start(workflow, task, null); + assertEquals(Task.Status.FAILED, task.getStatus()); + assertTrue(task.getReasonForIncompletion() != null); + System.out.println(task.getReasonForIncompletion()); + + task.getInputData().put("sink", "bad_sink"); + task.setStatus(Status.SCHEDULED); + + event.start(workflow, task, null); + assertEquals(Task.Status.FAILED, task.getStatus()); + assertTrue(task.getReasonForIncompletion() != null); + System.out.println(task.getReasonForIncompletion()); + + task.setStatus(Status.SCHEDULED); + task.setScheduledTime(System.currentTimeMillis()); + event.execute(workflow, task, null); + assertEquals(Task.Status.SCHEDULED, task.getStatus()); + + task.setScheduledTime(System.currentTimeMillis() - 610_000); + event.start(workflow, task, null); + assertEquals(Task.Status.FAILED, task.getStatus()); + } + + @Test + public void testDynamicSinks() { + + Event event = new Event(); + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(testWorkflowDefinition); + + Task task = new Task(); + task.setReferenceTaskName("task0"); + task.setTaskId("task_id_0"); + task.setStatus(Status.IN_PROGRESS); + task.getInputData().put("sink", "conductor:some_arbitary_queue"); + + + ObservableQueue queue = event.getQueue(workflow, task); + assertEquals(Task.Status.IN_PROGRESS, task.getStatus()); + assertNotNull(queue); + assertEquals("testWorkflow:some_arbitary_queue", queue.getName()); + assertEquals("testWorkflow:some_arbitary_queue", queue.getURI()); + assertEquals("conductor", queue.getType()); + assertEquals("conductor:testWorkflow:some_arbitary_queue", task.getOutputData().get("event_produced")); + + task.getInputData().put("sink", "conductor"); + queue = event.getQueue(workflow, task); + assertEquals("not in progress: " + task.getReasonForIncompletion(), Task.Status.IN_PROGRESS, task.getStatus()); + assertNotNull(queue); + assertEquals("testWorkflow:task0", queue.getName()); + + task.getInputData().put("sink", "sqs:my_sqs_queue_name"); + queue = event.getQueue(workflow, task); + assertEquals("not in progress: " + task.getReasonForIncompletion(), Task.Status.IN_PROGRESS, task.getStatus()); + assertNotNull(queue); + assertEquals("my_sqs_queue_name", queue.getName()); + assertEquals("sqs", queue.getType()); + + task.getInputData().put("sink", "sns:my_sqs_queue_name"); + queue = event.getQueue(workflow, task); + assertEquals(Task.Status.FAILED, task.getStatus()); + + + } + } diff --git a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java new file mode 100644 index 0000000000..0b480b16c8 --- /dev/null +++ b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java @@ -0,0 +1,410 @@ +package com.netflix.conductor.dao; + +import com.netflix.conductor.common.metadata.tasks.PollData; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.core.execution.ApplicationException; + +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +import static junit.framework.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +public abstract class ExecutionDAOTest { + + abstract protected ExecutionDAO getExecutionDAO(); + + abstract protected MetadataDAO getMetadataDAO(); + + @Rule + public ExpectedException expected = ExpectedException.none(); + + @Test + public void testTaskExceedsLimit() throws Exception { + TaskDef def = new TaskDef(); + def.setName("task1"); + def.setConcurrentExecLimit(1); + getMetadataDAO().createTaskDef(def); + + List tasks = new LinkedList<>(); + for (int i = 0; i < 15; i++) { + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId("t_" + i); + task.setWorkflowInstanceId("workflow_" + i); + task.setReferenceTaskName("task1"); + task.setTaskDefName("task1"); + tasks.add(task); + task.setStatus(Task.Status.SCHEDULED); + } + + getExecutionDAO().createTasks(tasks); + assertFalse(getExecutionDAO().exceedsInProgressLimit(tasks.get(0))); + tasks.get(0).setStatus(Task.Status.IN_PROGRESS); + getExecutionDAO().updateTask(tasks.get(0)); + + for (Task task : tasks) { + assertTrue(getExecutionDAO().exceedsInProgressLimit(task)); + } + } + + @Test + public void testCreateTaskException() throws Exception { + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId("t1"); + task.setTaskDefName("task1"); + + expected.expect(ApplicationException.class); + expected.expectMessage("Workflow instance id cannot be null"); + getExecutionDAO().createTasks(Collections.singletonList(task)); + + task.setWorkflowInstanceId("wfid"); + expected.expect(ApplicationException.class); + expected.expectMessage("Task reference name cannot be null"); + getExecutionDAO().createTasks(Collections.singletonList(task)); + } + + @Test + public void testCreateTaskException2() throws Exception { + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId("t1"); + task.setTaskDefName("task1"); + task.setWorkflowInstanceId("wfid"); + + expected.expect(ApplicationException.class); + expected.expectMessage("Task reference name cannot be null"); + getExecutionDAO().createTasks(Collections.singletonList(task)); + } + + @Test + public void testPollData() throws Exception { + getExecutionDAO().updateLastPoll("taskDef", null, "workerId1"); + PollData pd = getExecutionDAO().getPollData("taskDef", null); + assertNotNull(pd); + assertTrue(pd.getLastPollTime() > 0); + assertEquals(pd.getQueueName(), "taskDef"); + assertEquals(pd.getDomain(), null); + assertEquals(pd.getWorkerId(), "workerId1"); + + getExecutionDAO().updateLastPoll("taskDef", "domain1", "workerId1"); + pd = getExecutionDAO().getPollData("taskDef", "domain1"); + assertNotNull(pd); + assertTrue(pd.getLastPollTime() > 0); + assertEquals(pd.getQueueName(), "taskDef"); + assertEquals(pd.getDomain(), "domain1"); + assertEquals(pd.getWorkerId(), "workerId1"); + + List pData = getExecutionDAO().getPollData("taskDef"); + assertEquals(pData.size(), 2); + + pd = getExecutionDAO().getPollData("taskDef", "domain2"); + assertTrue(pd == null); + } + + @Test + public void testTaskCreateDups() throws Exception { + List tasks = new LinkedList<>(); + String workflowId = UUID.randomUUID().toString(); + + for (int i = 0; i < 3; i++) { + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId(workflowId + "_t" + i); + task.setReferenceTaskName("t" + i); + task.setRetryCount(0); + task.setWorkflowInstanceId(workflowId); + task.setTaskDefName("task" + i); + task.setStatus(Task.Status.IN_PROGRESS); + tasks.add(task); + } + + //Let's insert a retried task + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId(workflowId + "_t" + 2); + task.setReferenceTaskName("t" + 2); + task.setRetryCount(1); + task.setWorkflowInstanceId(workflowId); + task.setTaskDefName("task" + 2); + task.setStatus(Task.Status.IN_PROGRESS); + tasks.add(task); + + //Duplicate task! + task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId(workflowId + "_t" + 1); + task.setReferenceTaskName("t" + 1); + task.setRetryCount(0); + task.setWorkflowInstanceId(workflowId); + task.setTaskDefName("task" + 1); + task.setStatus(Task.Status.IN_PROGRESS); + tasks.add(task); + + List created = getExecutionDAO().createTasks(tasks); + assertEquals(tasks.size() - 1, created.size()); //1 less + + Set srcIds = tasks.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()).collect(Collectors.toSet()); + Set createdIds = created.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()).collect(Collectors.toSet()); + + assertEquals(srcIds, createdIds); + + List pending = getExecutionDAO().getPendingTasksByWorkflow("task0", workflowId); + assertNotNull(pending); + assertEquals(1, pending.size()); + assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), pending.get(0))); + + List found = getExecutionDAO().getTasks(tasks.get(0).getTaskDefName(), null, 1); + assertNotNull(found); + assertEquals(1, found.size()); + assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), found.get(0))); + } + + @Test + public void testTaskOps() throws Exception { + List tasks = new LinkedList<>(); + String workflowId = UUID.randomUUID().toString(); + + for (int i = 0; i < 3; i++) { + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId(workflowId + "_t" + i); + task.setReferenceTaskName("testTaskOps" + i); + task.setRetryCount(0); + task.setWorkflowInstanceId(workflowId); + task.setTaskDefName("testTaskOps" + i); + task.setStatus(Task.Status.IN_PROGRESS); + tasks.add(task); + } + + for (int i = 0; i < 3; i++) { + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId("x" + workflowId + "_t" + i); + task.setReferenceTaskName("testTaskOps" + i); + task.setRetryCount(0); + task.setWorkflowInstanceId("x" + workflowId); + task.setTaskDefName("testTaskOps" + i); + task.setStatus(Task.Status.IN_PROGRESS); + getExecutionDAO().createTasks(Arrays.asList(task)); + } + + + List created = getExecutionDAO().createTasks(tasks); + assertEquals(tasks.size(), created.size()); + + List pending = getExecutionDAO().getPendingTasksForTaskType(tasks.get(0).getTaskDefName()); + assertNotNull(pending); + assertEquals(2, pending.size()); + //Pending list can come in any order. finding the one we are looking for and then comparing + Task matching = pending.stream().filter(task -> task.getTaskId().equals(tasks.get(0).getTaskId())).findAny().get(); + assertTrue(EqualsBuilder.reflectionEquals(matching, tasks.get(0))); + + List update = new LinkedList<>(); + for (int i = 0; i < 3; i++) { + Task found = getExecutionDAO().getTask(workflowId + "_t" + i); + assertNotNull(found); + found.getOutputData().put("updated", true); + found.setStatus(Task.Status.COMPLETED); + update.add(found); + } + getExecutionDAO().updateTasks(update); + + List taskIds = tasks.stream().map(Task::getTaskId).collect(Collectors.toList()); + List found = getExecutionDAO().getTasks(taskIds); + assertEquals(taskIds.size(), found.size()); + found.forEach(task -> { + assertTrue(task.getOutputData().containsKey("updated")); + assertEquals(true, task.getOutputData().get("updated")); + getExecutionDAO().removeTask(task.getTaskId()); + }); + + found = getExecutionDAO().getTasks(taskIds); + assertTrue(found.isEmpty()); + } + + @Test + public void testPending() throws Exception { + WorkflowDef def = new WorkflowDef(); + def.setName("pending_count_test"); + + Workflow workflow = createTestWorkflow(); + workflow.setWorkflowDefinition(def); + + String idBase = workflow.getWorkflowId(); + generateWorkflows(workflow, idBase, 10); + + long count = getExecutionDAO().getPendingWorkflowCount(def.getName()); + assertEquals(10, count); + + for (int i = 0; i < 10; i++) { + getExecutionDAO().removeFromPendingWorkflow(def.getName(), "x" + i + idBase); + } + count = getExecutionDAO().getPendingWorkflowCount(def.getName()); + assertEquals(0, count); + } + + @Test + public void test() throws Exception { + Workflow workflow = createTestWorkflow(); + + String workflowId = getExecutionDAO().createWorkflow(workflow); + List created = getExecutionDAO().createTasks(workflow.getTasks()); + assertEquals(workflow.getTasks().size(), created.size()); + + Workflow workflowWithTasks = getExecutionDAO().getWorkflow(workflow.getWorkflowId(), true); + assertEquals(workflowWithTasks.getWorkflowId(), workflowId); + assertTrue(!workflowWithTasks.getTasks().isEmpty()); + + assertEquals(workflow.getWorkflowId(), workflowId); + Workflow found = getExecutionDAO().getWorkflow(workflowId, false); + assertTrue(found.getTasks().isEmpty()); + + workflow.getTasks().clear(); + assertEquals(workflow, found); + + workflow.getInput().put("updated", true); + getExecutionDAO().updateWorkflow(workflow); + found = getExecutionDAO().getWorkflow(workflowId); + assertNotNull(found); + assertTrue(found.getInput().containsKey("updated")); + assertEquals(true, found.getInput().get("updated")); + + List running = getExecutionDAO().getRunningWorkflowIds(workflow.getWorkflowName()); + assertNotNull(running); + assertTrue(running.isEmpty()); + + workflow.setStatus(Workflow.WorkflowStatus.RUNNING); + getExecutionDAO().updateWorkflow(workflow); + + running = getExecutionDAO().getRunningWorkflowIds(workflow.getWorkflowName()); + assertNotNull(running); + assertEquals(1, running.size()); + assertEquals(workflow.getWorkflowId(), running.get(0)); + + List pending = getExecutionDAO().getPendingWorkflowsByType(workflow.getWorkflowName()); + assertNotNull(pending); + assertEquals(1, pending.size()); + assertEquals(3, pending.get(0).getTasks().size()); + pending.get(0).getTasks().clear(); + assertEquals(workflow, pending.get(0)); + + workflow.setStatus(Workflow.WorkflowStatus.COMPLETED); + getExecutionDAO().updateWorkflow(workflow); + running = getExecutionDAO().getRunningWorkflowIds(workflow.getWorkflowName()); + assertNotNull(running); + assertTrue(running.isEmpty()); + + List bytime = getExecutionDAO().getWorkflowsByType(workflow.getWorkflowName(), System.currentTimeMillis(), System.currentTimeMillis() + 100); + assertNotNull(bytime); + assertTrue(bytime.isEmpty()); + + bytime = getExecutionDAO().getWorkflowsByType(workflow.getWorkflowName(), workflow.getCreateTime() - 10, workflow.getCreateTime() + 10); + assertNotNull(bytime); + assertEquals(1, bytime.size()); + } + + protected Workflow createTestWorkflow() { + WorkflowDef def = new WorkflowDef(); + def.setName("Junit Workflow"); + def.setVersion(3); + def.setSchemaVersion(2); + + Workflow workflow = new Workflow(); + workflow.setWorkflowDefinition(def); + workflow.setCorrelationId("correlationX"); + workflow.setCreatedBy("junit_tester"); + workflow.setEndTime(200L); + + Map input = new HashMap<>(); + input.put("param1", "param1 value"); + input.put("param2", 100); + workflow.setInput(input); + + Map output = new HashMap<>(); + output.put("ouput1", "output 1 value"); + output.put("op2", 300); + workflow.setOutput(output); + + workflow.setOwnerApp("workflow"); + workflow.setParentWorkflowId("parentWorkflowId"); + workflow.setParentWorkflowTaskId("parentWFTaskId"); + workflow.setReasonForIncompletion("missing recipe"); + workflow.setReRunFromWorkflowId("re-run from id1"); + workflow.setStartTime(90L); + workflow.setStatus(Workflow.WorkflowStatus.FAILED); + workflow.setWorkflowId("workflow0"); + + List tasks = new LinkedList<>(); + + Task task = new Task(); + task.setScheduledTime(1L); + task.setSeq(1); + task.setTaskId("t1"); + task.setReferenceTaskName("t1"); + task.setWorkflowInstanceId(workflow.getWorkflowId()); + task.setTaskDefName("task1"); + + Task task2 = new Task(); + task2.setScheduledTime(2L); + task2.setSeq(2); + task2.setTaskId("t2"); + task2.setReferenceTaskName("t2"); + task2.setWorkflowInstanceId(workflow.getWorkflowId()); + task2.setTaskDefName("task2"); + + Task task3 = new Task(); + task3.setScheduledTime(2L); + task3.setSeq(3); + task3.setTaskId("t3"); + task3.setReferenceTaskName("t3"); + task3.setWorkflowInstanceId(workflow.getWorkflowId()); + task3.setTaskDefName("task3"); + + tasks.add(task); + tasks.add(task2); + tasks.add(task3); + + workflow.setTasks(tasks); + + workflow.setUpdatedBy("junit_tester"); + workflow.setUpdateTime(800L); + + return workflow; + } + + protected void generateWorkflows(Workflow base, String baseId, int count) { + for (int i = 0; i < count; i++) { + base.setWorkflowId("x" + i + baseId); + base.setCorrelationId("corr001"); + base.setStatus(Workflow.WorkflowStatus.RUNNING); + getExecutionDAO().createWorkflow(base); + } + } +} diff --git a/core/src/test/resources/def.json b/core/src/test/resources/def.json deleted file mode 100644 index 393807aaa8..0000000000 --- a/core/src/test/resources/def.json +++ /dev/null @@ -1 +0,0 @@ -{"createTime":1477681181098,"updateTime":1502738273998,"name":"performance_test_1","description":"performance_test_1","version":1,"tasks":[{"name":"perf_task_1","taskReferenceName":"perf_task_1","inputParameters":{"mod":"${workflow.input.mod}","oddEven":"${workflow.input.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"dyntask","taskReferenceName":"perf_task_2","inputParameters":{"taskToExecute":"${workflow.input.task2Name}"},"type":"DYNAMIC","dynamicTaskNameParam":"taskToExecute","startDelay":0},{"name":"perf_task_3","taskReferenceName":"perf_task_3","inputParameters":{"mod":"${perf_task_2.output.mod}","oddEven":"${perf_task_2.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"get_from_es","taskReferenceName":"get_es_1","type":"HTTP","startDelay":0},{"name":"oddEvenDecision","taskReferenceName":"oddEvenDecision","inputParameters":{"oddEven":"${perf_task_3.output.oddEven}"},"type":"DECISION","caseValueParam":"oddEven","decisionCases":{"0":[{"name":"perf_task_4","taskReferenceName":"perf_task_4","inputParameters":{"mod":"${perf_task_3.output.mod}","oddEven":"${perf_task_3.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"dynamic_fanout","taskReferenceName":"fanout1","inputParameters":{"dynamicTasks":"${perf_task_4.output.dynamicTasks}","input":"${perf_task_4.output.inputs}"},"type":"FORK_JOIN_DYNAMIC","dynamicForkTasksParam":"dynamicTasks","dynamicForkTasksInputParamName":"input","startDelay":0},{"name":"dynamic_join","taskReferenceName":"join1","type":"JOIN","startDelay":0},{"name":"perf_task_5","taskReferenceName":"perf_task_5","inputParameters":{"mod":"${perf_task_4.output.mod}","oddEven":"${perf_task_4.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_6","taskReferenceName":"perf_task_6","inputParameters":{"mod":"${perf_task_5.output.mod}","oddEven":"${perf_task_5.output.oddEven}"},"type":"SIMPLE","startDelay":0}],"1":[{"name":"perf_task_7","taskReferenceName":"perf_task_7","inputParameters":{"mod":"${perf_task_3.output.mod}","oddEven":"${perf_task_3.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_8","taskReferenceName":"perf_task_8","inputParameters":{"mod":"${perf_task_7.output.mod}","oddEven":"${perf_task_7.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_9","taskReferenceName":"perf_task_9","inputParameters":{"mod":"${perf_task_8.output.mod}","oddEven":"${perf_task_8.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"modDecision","taskReferenceName":"modDecision","inputParameters":{"mod":"${perf_task_8.output.mod}"},"type":"DECISION","caseValueParam":"mod","decisionCases":{"0":[{"name":"perf_task_12","taskReferenceName":"perf_task_12","inputParameters":{"mod":"${perf_task_9.output.mod}","oddEven":"${perf_task_9.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_13","taskReferenceName":"perf_task_13","inputParameters":{"mod":"${perf_task_12.output.mod}","oddEven":"${perf_task_12.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"sub_workflow_x","taskReferenceName":"wf1","inputParameters":{"mod":"${perf_task_12.output.mod}","oddEven":"${perf_task_12.output.oddEven}"},"type":"SUB_WORKFLOW","startDelay":0,"subWorkflowParam":{"name":"sub_flow_1","version":1}}],"1":[{"name":"perf_task_15","taskReferenceName":"perf_task_15","inputParameters":{"mod":"${perf_task_9.output.mod}","oddEven":"${perf_task_9.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_16","taskReferenceName":"perf_task_16","inputParameters":{"mod":"${perf_task_15.output.mod}","oddEven":"${perf_task_15.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"sub_workflow_x","taskReferenceName":"wf2","inputParameters":{"mod":"${perf_task_12.output.mod}","oddEven":"${perf_task_12.output.oddEven}"},"type":"SUB_WORKFLOW","startDelay":0,"subWorkflowParam":{"name":"sub_flow_1","version":1}}],"4":[{"name":"perf_task_18","taskReferenceName":"perf_task_18","inputParameters":{"mod":"${perf_task_9.output.mod}","oddEven":"${perf_task_9.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_19","taskReferenceName":"perf_task_19","inputParameters":{"mod":"${perf_task_18.output.mod}","oddEven":"${perf_task_18.output.oddEven}"},"type":"SIMPLE","startDelay":0}],"5":[{"name":"perf_task_21","taskReferenceName":"perf_task_21","inputParameters":{"mod":"${perf_task_9.output.mod}","oddEven":"${perf_task_9.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"sub_workflow_x","taskReferenceName":"wf3","inputParameters":{"mod":"${perf_task_12.output.mod}","oddEven":"${perf_task_12.output.oddEven}"},"type":"SUB_WORKFLOW","startDelay":0,"subWorkflowParam":{"name":"sub_flow_1","version":1}},{"name":"perf_task_22","taskReferenceName":"perf_task_22","inputParameters":{"mod":"${perf_task_21.output.mod}","oddEven":"${perf_task_21.output.oddEven}"},"type":"SIMPLE","startDelay":0}]},"defaultCase":[{"name":"perf_task_24","taskReferenceName":"perf_task_24","inputParameters":{"mod":"${perf_task_9.output.mod}","oddEven":"${perf_task_9.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"sub_workflow_x","taskReferenceName":"wf4","inputParameters":{"mod":"${perf_task_12.output.mod}","oddEven":"${perf_task_12.output.oddEven}"},"type":"SUB_WORKFLOW","startDelay":0,"subWorkflowParam":{"name":"sub_flow_1","version":1}},{"name":"perf_task_25","taskReferenceName":"perf_task_25","inputParameters":{"mod":"${perf_task_24.output.mod}","oddEven":"${perf_task_24.output.oddEven}"},"type":"SIMPLE","startDelay":0}],"startDelay":0}]},"startDelay":0},{"name":"perf_task_28","taskReferenceName":"perf_task_28","inputParameters":{"mod":"${perf_task_3.output.mod}","oddEven":"${perf_task_3.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_29","taskReferenceName":"perf_task_29","inputParameters":{"mod":"${perf_task_28.output.mod}","oddEven":"${perf_task_28.output.oddEven}"},"type":"SIMPLE","startDelay":0},{"name":"perf_task_30","taskReferenceName":"perf_task_30","inputParameters":{"mod":"${perf_task_29.output.mod}","oddEven":"${perf_task_29.output.oddEven}"},"type":"SIMPLE","startDelay":0}],"schemaVersion":2} \ No newline at end of file diff --git a/core/src/test/resources/test.json b/core/src/test/resources/test.json index 523a73f11e..e2c1a8b5da 100644 --- a/core/src/test/resources/test.json +++ b/core/src/test/resources/test.json @@ -1,941 +1,1277 @@ { - "ownerApp": "cpeworkflowtests", - "createTime": 1505587453961, - "updateTime": 1505588471071, - "status": "RUNNING", - "endTime": 0, - "workflowId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "tasks": [ - { - "taskType": "perf_task_1", - "status": "COMPLETED", - "inputData": { - "mod": "0", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_1", - "retryCount": 0, - "seq": 1, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_1", - "scheduledTime": 1505587453972, - "startTime": 1505587455481, - "endTime": 1505587455539, - "updateTime": 1505587455539, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "3a54e268-0054-4eab-aea2-e54d1b89896c", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "5", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_1", - "taskReferenceName": "perf_task_1", - "inputParameters": { - "mod": "${workflow.input.mod}", - "oddEven": "${workflow.input.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - "queueWaitTime": 1509, - "taskStatus": "COMPLETED" - }, - { - "taskType": "perf_task_10", - "status": "COMPLETED", - "inputData": { - "taskToExecute": "perf_task_10" - }, - "referenceTaskName": "perf_task_2", - "retryCount": 0, - "seq": 2, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_10", - "scheduledTime": 1505587455517, - "startTime": 1505587457017, - "endTime": 1505587457075, - "updateTime": 1505587457075, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "3731c3ee-f918-42b7-8bb3-fb016fc0ecae", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "1", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_10", - "taskReferenceName": "perf_task_2", - "inputParameters": { - "taskToExecute": "${workflow.input.task2Name}" - }, - "type": "DYNAMIC", - "dynamicTaskNameParam": "taskToExecute", - "startDelay": 0 - }, - "queueWaitTime": 1500, - "taskStatus": "COMPLETED" - }, - { - "taskType": "perf_task_3", - "status": "COMPLETED", - "inputData": { - "mod": "1", - "oddEven": "1" - }, - "referenceTaskName": "perf_task_3", - "retryCount": 0, - "seq": 3, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_3", - "scheduledTime": 1505587457064, - "startTime": 1505587459498, - "endTime": 1505587459560, - "updateTime": 1505587459560, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "738370d6-596f-4ae5-95bf-ca635c7f10dd", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "6", - "oddEven": "0", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_3", - "taskReferenceName": "perf_task_3", - "inputParameters": { - "mod": "${perf_task_2.output.mod}", - "oddEven": "${perf_task_2.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - "queueWaitTime": 2434, - "taskStatus": "COMPLETED" - }, - { - "taskType": "HTTP", - "status": "COMPLETED", - "inputData": { - "http_request": { - "uri": "/wfe_perf/workflow/_search?q=status:RUNNING&size=0&beta", - "method": "GET", - "vipAddress": "es_cpe_wfe.us-east-1.cloud.netflix.com" - } - }, - "referenceTaskName": "get_es_1", - "retryCount": 0, - "seq": 4, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "get_from_es", - "scheduledTime": 1505587459547, - "startTime": 1505587459996, - "endTime": 1505587460250, - "updateTime": 1505587460250, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "64b49d62-1dfb-4290-94d4-971b4d033f33", - "callbackAfterSeconds": 0, - "workerId": "i-04c53d07aba5b5e9c", - "outputData": { - "response": { - "headers": { - "Content-Length": [ - "121" - ], - "Content-Type": [ - "application/json; charset=UTF-8" - ] - }, - "reasonPhrase": "OK", - "body": { - "took": 1, - "timed_out": false, - "_shards": { - "total": 6, - "successful": 6, - "failed": 0 - }, - "hits": { - "total": 1, - "max_score": 0.0, - "hits": [] - } - }, - "statusCode": 200 - } - }, - "workflowTask": { - "name": "get_from_es", - "taskReferenceName": "get_es_1", - "type": "HTTP", - "startDelay": 0 - }, - "queueWaitTime": 449, - "taskStatus": "COMPLETED" - }, - { - "taskType": "DECISION", - "status": "COMPLETED", - "inputData": { - "hasChildren": "true", - "case": "0" - }, - "referenceTaskName": "oddEvenDecision", - "retryCount": 0, - "seq": 5, - "correlationId": "1505587453950", - "pollCount": 0, - "taskDefName": "DECISION", - "scheduledTime": 1505587460216, - "startTime": 1505587460241, - "endTime": 1505587460274, - "updateTime": 1505587460274, - "startDelayInSeconds": 0, - "retried": false, - "executed": true, - "callbackFromWorker": true, - "responseTimeoutSeconds": 0, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "5a596a36-09eb-4a11-a952-01ab5a7c362f", - "callbackAfterSeconds": 0, - "outputData": { - "caseOutput": [ - "0" - ] - }, - "workflowTask": { - "name": "oddEvenDecision", - "taskReferenceName": "oddEvenDecision", - "inputParameters": { - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "DECISION", - "caseValueParam": "oddEven", - "decisionCases": { - "0": [ - { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "dynamic_fanout", - "taskReferenceName": "fanout1", - "inputParameters": { - "dynamicTasks": "${perf_task_4.output.dynamicTasks}", - "input": "${perf_task_4.output.inputs}" - }, - "type": "FORK_JOIN_DYNAMIC", - "dynamicForkTasksParam": "dynamicTasks", - "dynamicForkTasksInputParamName": "input", - "startDelay": 0 - }, - { - "name": "dynamic_join", - "taskReferenceName": "join1", - "type": "JOIN", - "startDelay": 0 - }, - { - "name": "perf_task_5", - "taskReferenceName": "perf_task_5", - "inputParameters": { - "mod": "${perf_task_4.output.mod}", - "oddEven": "${perf_task_4.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_6", - "taskReferenceName": "perf_task_6", - "inputParameters": { - "mod": "${perf_task_5.output.mod}", - "oddEven": "${perf_task_5.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "1": [ - { - "name": "perf_task_7", - "taskReferenceName": "perf_task_7", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_8", - "taskReferenceName": "perf_task_8", - "inputParameters": { - "mod": "${perf_task_7.output.mod}", - "oddEven": "${perf_task_7.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_9", - "taskReferenceName": "perf_task_9", - "inputParameters": { - "mod": "${perf_task_8.output.mod}", - "oddEven": "${perf_task_8.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "modDecision", - "taskReferenceName": "modDecision", - "inputParameters": { - "mod": "${perf_task_8.output.mod}" - }, - "type": "DECISION", - "caseValueParam": "mod", - "decisionCases": { - "0": [ - { - "name": "perf_task_12", - "taskReferenceName": "perf_task_12", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_13", - "taskReferenceName": "perf_task_13", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf1", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "1": [ - { - "name": "perf_task_15", - "taskReferenceName": "perf_task_15", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_16", - "taskReferenceName": "perf_task_16", - "inputParameters": { - "mod": "${perf_task_15.output.mod}", - "oddEven": "${perf_task_15.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf2", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - } - ], - "4": [ - { - "name": "perf_task_18", - "taskReferenceName": "perf_task_18", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "perf_task_19", - "taskReferenceName": "perf_task_19", - "inputParameters": { - "mod": "${perf_task_18.output.mod}", - "oddEven": "${perf_task_18.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "5": [ - { - "name": "perf_task_21", - "taskReferenceName": "perf_task_21", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf3", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "perf_task_22", - "taskReferenceName": "perf_task_22", - "inputParameters": { - "mod": "${perf_task_21.output.mod}", - "oddEven": "${perf_task_21.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ] - }, - "defaultCase": [ - { - "name": "perf_task_24", - "taskReferenceName": "perf_task_24", - "inputParameters": { - "mod": "${perf_task_9.output.mod}", - "oddEven": "${perf_task_9.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - { - "name": "sub_workflow_x", - "taskReferenceName": "wf4", - "inputParameters": { - "mod": "${perf_task_12.output.mod}", - "oddEven": "${perf_task_12.output.oddEven}" - }, - "type": "SUB_WORKFLOW", - "startDelay": 0, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": 1 - } - }, - { - "name": "perf_task_25", - "taskReferenceName": "perf_task_25", - "inputParameters": { - "mod": "${perf_task_24.output.mod}", - "oddEven": "${perf_task_24.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - } - ], - "startDelay": 0 - } - ] - }, - "startDelay": 0 - }, - "queueWaitTime": 25, - "taskStatus": "COMPLETED" - }, - { - "taskType": "perf_task_4", - "status": "COMPLETED", - "inputData": { - "mod": "6", - "oddEven": "0" - }, - "referenceTaskName": "perf_task_4", - "retryCount": 0, - "seq": 6, - "correlationId": "1505587453950", - "pollCount": 1, - "taskDefName": "perf_task_4", - "scheduledTime": 1505587460234, - "startTime": 1505587463699, - "endTime": 1505587463718, - "updateTime": 1505587463718, - "startDelayInSeconds": 0, - "retried": false, - "executed": false, - "callbackFromWorker": true, - "responseTimeoutSeconds": 3600, - "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", - "taskId": "1bf3da08-9d16-4f8a-98c3-4a6efee0e03a", - "callbackAfterSeconds": 0, - "outputData": { - "mod": "9", - "oddEven": "1", - "inputs": { - "subflow_0": { - "mod": 4, - "oddEven": 0 - }, - "subflow_4": { - "mod": 4, - "oddEven": 0 - }, - "subflow_2": { - "mod": 4, - "oddEven": 0 - } - }, - "dynamicTasks": [ - { - "name": null, - "taskReferenceName": "subflow_0", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_2", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - }, - { - "name": null, - "taskReferenceName": "subflow_4", - "description": null, - "inputParameters": null, - "type": "SUB_WORKFLOW", - "dynamicTaskNameParam": null, - "caseValueParam": null, - "caseExpression": null, - "decisionCases": { - - }, - "dynamicForkJoinTasksParam": null, - "dynamicForkTasksParam": null, - "dynamicForkTasksInputParamName": null, - "defaultCase": [], - "forkTasks": [], - "startDelay": 0, - "joinOn": [], - "sink": null, - "optional": null, - "subWorkflowParam": { - "name": "sub_flow_1", - "version": null - } - } - ], - "attempt": 1 - }, - "workflowTask": { - "name": "perf_task_4", - "taskReferenceName": "perf_task_4", - "inputParameters": { - "mod": "${perf_task_3.output.mod}", - "oddEven": "${perf_task_3.output.oddEven}" - }, - "type": "SIMPLE", - "startDelay": 0 - }, - "queueWaitTime": 3465, - "taskStatus": "COMPLETED" - } - ], - "input": { - "mod": "0", - "oddEven": "0", - "task2Name": "perf_task_10" - }, - "workflowType": "performance_test_1", - "version": 1, - "correlationId": "1505587453950", - "schemaVersion": 2, - "taskToDomain": { - "*": "beta" - }, - "startTime": 1505587453961 -} \ No newline at end of file + "ownerApp": "cpeworkflowtests", + "createTime": 1505587453961, + "updateTime": 1505588471071, + "status": "RUNNING", + "endTime": 0, + "workflowId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "tasks": [ + { + "taskType": "perf_task_1", + "status": "COMPLETED", + "inputData": { + "mod": "0", + "oddEven": "0" + }, + "referenceTaskName": "perf_task_1", + "retryCount": 0, + "seq": 1, + "correlationId": "1505587453950", + "pollCount": 1, + "taskDefName": "perf_task_1", + "scheduledTime": 1505587453972, + "startTime": 1505587455481, + "endTime": 1505587455539, + "updateTime": 1505587455539, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "taskId": "3a54e268-0054-4eab-aea2-e54d1b89896c", + "callbackAfterSeconds": 0, + "outputData": { + "mod": "5", + "oddEven": "1", + "inputs": { + "subflow_0": { + "mod": 4, + "oddEven": 0 + }, + "subflow_4": { + "mod": 4, + "oddEven": 0 + }, + "subflow_2": { + "mod": 4, + "oddEven": 0 + } + }, + "dynamicTasks": [ + { + "name": null, + "taskReferenceName": "subflow_0", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_2", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_4", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + } + ], + "attempt": 1 + }, + "workflowTask": { + "name": "perf_task_1", + "taskReferenceName": "perf_task_1", + "inputParameters": { + "mod": "${workflow.input.mod}", + "oddEven": "${workflow.input.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + "queueWaitTime": 1509, + "taskStatus": "COMPLETED" + }, + { + "taskType": "perf_task_10", + "status": "COMPLETED", + "inputData": { + "taskToExecute": "perf_task_10" + }, + "referenceTaskName": "perf_task_2", + "retryCount": 0, + "seq": 2, + "correlationId": "1505587453950", + "pollCount": 1, + "taskDefName": "perf_task_10", + "scheduledTime": 1505587455517, + "startTime": 1505587457017, + "endTime": 1505587457075, + "updateTime": 1505587457075, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "taskId": "3731c3ee-f918-42b7-8bb3-fb016fc0ecae", + "callbackAfterSeconds": 0, + "outputData": { + "mod": "1", + "oddEven": "1", + "inputs": { + "subflow_0": { + "mod": 4, + "oddEven": 0 + }, + "subflow_4": { + "mod": 4, + "oddEven": 0 + }, + "subflow_2": { + "mod": 4, + "oddEven": 0 + } + }, + "dynamicTasks": [ + { + "name": null, + "taskReferenceName": "subflow_0", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_2", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_4", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + } + ], + "attempt": 1 + }, + "workflowTask": { + "name": "perf_task_10", + "taskReferenceName": "perf_task_2", + "inputParameters": { + "taskToExecute": "${workflow.input.task2Name}" + }, + "type": "DYNAMIC", + "dynamicTaskNameParam": "taskToExecute", + "startDelay": 0 + }, + "queueWaitTime": 1500, + "taskStatus": "COMPLETED" + }, + { + "taskType": "perf_task_3", + "status": "COMPLETED", + "inputData": { + "mod": "1", + "oddEven": "1" + }, + "referenceTaskName": "perf_task_3", + "retryCount": 0, + "seq": 3, + "correlationId": "1505587453950", + "pollCount": 1, + "taskDefName": "perf_task_3", + "scheduledTime": 1505587457064, + "startTime": 1505587459498, + "endTime": 1505587459560, + "updateTime": 1505587459560, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "taskId": "738370d6-596f-4ae5-95bf-ca635c7f10dd", + "callbackAfterSeconds": 0, + "outputData": { + "mod": "6", + "oddEven": "0", + "inputs": { + "subflow_0": { + "mod": 4, + "oddEven": 0 + }, + "subflow_4": { + "mod": 4, + "oddEven": 0 + }, + "subflow_2": { + "mod": 4, + "oddEven": 0 + } + }, + "dynamicTasks": [ + { + "name": null, + "taskReferenceName": "subflow_0", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_2", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_4", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + } + ], + "attempt": 1 + }, + "workflowTask": { + "name": "perf_task_3", + "taskReferenceName": "perf_task_3", + "inputParameters": { + "mod": "${perf_task_2.output.mod}", + "oddEven": "${perf_task_2.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + "queueWaitTime": 2434, + "taskStatus": "COMPLETED" + }, + { + "taskType": "HTTP", + "status": "COMPLETED", + "inputData": { + "http_request": { + "uri": "/wfe_perf/workflow/_search?q=status:RUNNING&size=0&beta", + "method": "GET", + "vipAddress": "es_cpe_wfe.us-east-1.cloud.netflix.com" + } + }, + "referenceTaskName": "get_es_1", + "retryCount": 0, + "seq": 4, + "correlationId": "1505587453950", + "pollCount": 1, + "taskDefName": "get_from_es", + "scheduledTime": 1505587459547, + "startTime": 1505587459996, + "endTime": 1505587460250, + "updateTime": 1505587460250, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "taskId": "64b49d62-1dfb-4290-94d4-971b4d033f33", + "callbackAfterSeconds": 0, + "workerId": "i-04c53d07aba5b5e9c", + "outputData": { + "response": { + "headers": { + "Content-Length": [ + "121" + ], + "Content-Type": [ + "application/json; charset=UTF-8" + ] + }, + "reasonPhrase": "OK", + "body": { + "took": 1, + "timed_out": false, + "_shards": { + "total": 6, + "successful": 6, + "failed": 0 + }, + "hits": { + "total": 1, + "max_score": 0.0, + "hits": [] + } + }, + "statusCode": 200 + } + }, + "workflowTask": { + "name": "get_from_es", + "taskReferenceName": "get_es_1", + "type": "HTTP", + "startDelay": 0 + }, + "queueWaitTime": 449, + "taskStatus": "COMPLETED" + }, + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "0" + }, + "referenceTaskName": "oddEvenDecision", + "retryCount": 0, + "seq": 5, + "correlationId": "1505587453950", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1505587460216, + "startTime": 1505587460241, + "endTime": 1505587460274, + "updateTime": 1505587460274, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "taskId": "5a596a36-09eb-4a11-a952-01ab5a7c362f", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "0" + ] + }, + "workflowTask": { + "name": "oddEvenDecision", + "taskReferenceName": "oddEvenDecision", + "inputParameters": { + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "DECISION", + "caseValueParam": "oddEven", + "decisionCases": { + "0": [ + { + "name": "perf_task_4", + "taskReferenceName": "perf_task_4", + "inputParameters": { + "mod": "${perf_task_3.output.mod}", + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "dynamic_fanout", + "taskReferenceName": "fanout1", + "inputParameters": { + "dynamicTasks": "${perf_task_4.output.dynamicTasks}", + "input": "${perf_task_4.output.inputs}" + }, + "type": "FORK_JOIN_DYNAMIC", + "dynamicForkTasksParam": "dynamicTasks", + "dynamicForkTasksInputParamName": "input", + "startDelay": 0 + }, + { + "name": "dynamic_join", + "taskReferenceName": "join1", + "type": "JOIN", + "startDelay": 0 + }, + { + "name": "perf_task_5", + "taskReferenceName": "perf_task_5", + "inputParameters": { + "mod": "${perf_task_4.output.mod}", + "oddEven": "${perf_task_4.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_6", + "taskReferenceName": "perf_task_6", + "inputParameters": { + "mod": "${perf_task_5.output.mod}", + "oddEven": "${perf_task_5.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "1": [ + { + "name": "perf_task_7", + "taskReferenceName": "perf_task_7", + "inputParameters": { + "mod": "${perf_task_3.output.mod}", + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_8", + "taskReferenceName": "perf_task_8", + "inputParameters": { + "mod": "${perf_task_7.output.mod}", + "oddEven": "${perf_task_7.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_9", + "taskReferenceName": "perf_task_9", + "inputParameters": { + "mod": "${perf_task_8.output.mod}", + "oddEven": "${perf_task_8.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "modDecision", + "taskReferenceName": "modDecision", + "inputParameters": { + "mod": "${perf_task_8.output.mod}" + }, + "type": "DECISION", + "caseValueParam": "mod", + "decisionCases": { + "0": [ + { + "name": "perf_task_12", + "taskReferenceName": "perf_task_12", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_13", + "taskReferenceName": "perf_task_13", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf1", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ], + "1": [ + { + "name": "perf_task_15", + "taskReferenceName": "perf_task_15", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_16", + "taskReferenceName": "perf_task_16", + "inputParameters": { + "mod": "${perf_task_15.output.mod}", + "oddEven": "${perf_task_15.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf2", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ], + "4": [ + { + "name": "perf_task_18", + "taskReferenceName": "perf_task_18", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_19", + "taskReferenceName": "perf_task_19", + "inputParameters": { + "mod": "${perf_task_18.output.mod}", + "oddEven": "${perf_task_18.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "5": [ + { + "name": "perf_task_21", + "taskReferenceName": "perf_task_21", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf3", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + }, + { + "name": "perf_task_22", + "taskReferenceName": "perf_task_22", + "inputParameters": { + "mod": "${perf_task_21.output.mod}", + "oddEven": "${perf_task_21.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ] + }, + "defaultCase": [ + { + "name": "perf_task_24", + "taskReferenceName": "perf_task_24", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf4", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + }, + { + "name": "perf_task_25", + "taskReferenceName": "perf_task_25", + "inputParameters": { + "mod": "${perf_task_24.output.mod}", + "oddEven": "${perf_task_24.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "startDelay": 0 + } + ] + }, + "startDelay": 0 + }, + "queueWaitTime": 25, + "taskStatus": "COMPLETED" + }, + { + "taskType": "perf_task_4", + "status": "COMPLETED", + "inputData": { + "mod": "6", + "oddEven": "0" + }, + "referenceTaskName": "perf_task_4", + "retryCount": 0, + "seq": 6, + "correlationId": "1505587453950", + "pollCount": 1, + "taskDefName": "perf_task_4", + "scheduledTime": 1505587460234, + "startTime": 1505587463699, + "endTime": 1505587463718, + "updateTime": 1505587463718, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "46e2d0d7-0809-40f2-9f22-bed9d41f6613", + "taskId": "1bf3da08-9d16-4f8a-98c3-4a6efee0e03a", + "callbackAfterSeconds": 0, + "outputData": { + "mod": "9", + "oddEven": "1", + "inputs": { + "subflow_0": { + "mod": 4, + "oddEven": 0 + }, + "subflow_4": { + "mod": 4, + "oddEven": 0 + }, + "subflow_2": { + "mod": 4, + "oddEven": 0 + } + }, + "dynamicTasks": [ + { + "name": null, + "taskReferenceName": "subflow_0", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_2", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + }, + { + "name": null, + "taskReferenceName": "subflow_4", + "description": null, + "inputParameters": null, + "type": "SUB_WORKFLOW", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": { + }, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "sink": null, + "optional": null, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": null + } + } + ], + "attempt": 1 + }, + "workflowTask": { + "name": "perf_task_4", + "taskReferenceName": "perf_task_4", + "inputParameters": { + "mod": "${perf_task_3.output.mod}", + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + "queueWaitTime": 3465, + "taskStatus": "COMPLETED" + } + ], + "input": { + "mod": "0", + "oddEven": "0", + "task2Name": "perf_task_10" + }, + "workflowType": "performance_test_1", + "version": 1, + "correlationId": "1505587453950", + "schemaVersion": 2, + "taskToDomain": { + "*": "beta" + }, + "startTime": 1505587453961, + "workflowDefinition": { + "createTime": 1477681181098, + "updateTime": 1502738273998, + "name": "performance_test_1", + "description": "performance_test_1", + "version": 1, + "tasks": [ + { + "name": "perf_task_1", + "taskReferenceName": "perf_task_1", + "inputParameters": { + "mod": "${workflow.input.mod}", + "oddEven": "${workflow.input.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "dyntask", + "taskReferenceName": "perf_task_2", + "inputParameters": { + "taskToExecute": "${workflow.input.task2Name}" + }, + "type": "DYNAMIC", + "dynamicTaskNameParam": "taskToExecute", + "startDelay": 0 + }, + { + "name": "perf_task_3", + "taskReferenceName": "perf_task_3", + "inputParameters": { + "mod": "${perf_task_2.output.mod}", + "oddEven": "${perf_task_2.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "get_from_es", + "taskReferenceName": "get_es_1", + "type": "HTTP", + "startDelay": 0 + }, + { + "name": "oddEvenDecision", + "taskReferenceName": "oddEvenDecision", + "inputParameters": { + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "DECISION", + "caseValueParam": "oddEven", + "decisionCases": { + "0": [ + { + "name": "perf_task_4", + "taskReferenceName": "perf_task_4", + "inputParameters": { + "mod": "${perf_task_3.output.mod}", + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "dynamic_fanout", + "taskReferenceName": "fanout1", + "inputParameters": { + "dynamicTasks": "${perf_task_4.output.dynamicTasks}", + "input": "${perf_task_4.output.inputs}" + }, + "type": "FORK_JOIN_DYNAMIC", + "dynamicForkTasksParam": "dynamicTasks", + "dynamicForkTasksInputParamName": "input", + "startDelay": 0 + }, + { + "name": "dynamic_join", + "taskReferenceName": "join1", + "type": "JOIN", + "startDelay": 0 + }, + { + "name": "perf_task_5", + "taskReferenceName": "perf_task_5", + "inputParameters": { + "mod": "${perf_task_4.output.mod}", + "oddEven": "${perf_task_4.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_6", + "taskReferenceName": "perf_task_6", + "inputParameters": { + "mod": "${perf_task_5.output.mod}", + "oddEven": "${perf_task_5.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "1": [ + { + "name": "perf_task_7", + "taskReferenceName": "perf_task_7", + "inputParameters": { + "mod": "${perf_task_3.output.mod}", + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_8", + "taskReferenceName": "perf_task_8", + "inputParameters": { + "mod": "${perf_task_7.output.mod}", + "oddEven": "${perf_task_7.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_9", + "taskReferenceName": "perf_task_9", + "inputParameters": { + "mod": "${perf_task_8.output.mod}", + "oddEven": "${perf_task_8.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "modDecision", + "taskReferenceName": "modDecision", + "inputParameters": { + "mod": "${perf_task_8.output.mod}" + }, + "type": "DECISION", + "caseValueParam": "mod", + "decisionCases": { + "0": [ + { + "name": "perf_task_12", + "taskReferenceName": "perf_task_12", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_13", + "taskReferenceName": "perf_task_13", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf1", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ], + "1": [ + { + "name": "perf_task_15", + "taskReferenceName": "perf_task_15", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_16", + "taskReferenceName": "perf_task_16", + "inputParameters": { + "mod": "${perf_task_15.output.mod}", + "oddEven": "${perf_task_15.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf2", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ], + "4": [ + { + "name": "perf_task_18", + "taskReferenceName": "perf_task_18", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_19", + "taskReferenceName": "perf_task_19", + "inputParameters": { + "mod": "${perf_task_18.output.mod}", + "oddEven": "${perf_task_18.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "5": [ + { + "name": "perf_task_21", + "taskReferenceName": "perf_task_21", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf3", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + }, + { + "name": "perf_task_22", + "taskReferenceName": "perf_task_22", + "inputParameters": { + "mod": "${perf_task_21.output.mod}", + "oddEven": "${perf_task_21.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ] + }, + "defaultCase": [ + { + "name": "perf_task_24", + "taskReferenceName": "perf_task_24", + "inputParameters": { + "mod": "${perf_task_9.output.mod}", + "oddEven": "${perf_task_9.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf4", + "inputParameters": { + "mod": "${perf_task_12.output.mod}", + "oddEven": "${perf_task_12.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + }, + { + "name": "perf_task_25", + "taskReferenceName": "perf_task_25", + "inputParameters": { + "mod": "${perf_task_24.output.mod}", + "oddEven": "${perf_task_24.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "startDelay": 0 + } + ] + }, + "startDelay": 0 + }, + { + "name": "perf_task_28", + "taskReferenceName": "perf_task_28", + "inputParameters": { + "mod": "${perf_task_3.output.mod}", + "oddEven": "${perf_task_3.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_29", + "taskReferenceName": "perf_task_29", + "inputParameters": { + "mod": "${perf_task_28.output.mod}", + "oddEven": "${perf_task_28.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + }, + { + "name": "perf_task_30", + "taskReferenceName": "perf_task_30", + "inputParameters": { + "mod": "${perf_task_29.output.mod}", + "oddEven": "${perf_task_29.output.oddEven}" + }, + "type": "SIMPLE", + "startDelay": 0 + } + ], + "schemaVersion": 2 + } +} diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index 87a373793b..fad1bc8181 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -62,6 +62,7 @@ default String getEmbeddedHomePath() { default int getEmbeddedPort() { return getIntProperty(EMBEDDED_PORT_PROPERTY_NAME, EMBEDDED_PORT_DEFAULT_VALUE); + } default String getEmbeddedClusterName() { diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java index 216439bfb5..58989988e9 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/MetadataServiceImpl.java @@ -5,18 +5,22 @@ import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.MetadataServicePb; import com.netflix.conductor.grpc.ProtoMapper; +import com.netflix.conductor.grpc.WorkflowServicePb; import com.netflix.conductor.proto.TaskDefPb; import com.netflix.conductor.proto.WorkflowDefPb; import com.netflix.conductor.service.MetadataService; -import io.grpc.Status; -import io.grpc.stub.StreamObserver; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; import java.util.List; import java.util.stream.Collectors; +import javax.inject.Inject; + +import io.grpc.Status; +import io.grpc.stub.StreamObserver; + public class MetadataServiceImpl extends MetadataServiceGrpc.MetadataServiceImplBase { private static final Logger LOGGER = LoggerFactory.getLogger(MetadataServiceImpl.class); private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; @@ -48,21 +52,24 @@ public void updateWorkflows(MetadataServicePb.UpdateWorkflowsRequest req, Stream } @Override - public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { - WorkflowDef def = service.getWorkflowDef(req.getName(), GRPC_HELPER.optional(req.getVersion())); - if (def != null) { - WorkflowDefPb.WorkflowDef workflow = PROTO_MAPPER.toProto(def); - response.onNext(MetadataServicePb.GetWorkflowResponse.newBuilder() - .setWorkflow(workflow) - .build() - ); - response.onCompleted(); - } else { - response.onError(Status.NOT_FOUND - .withDescription("No such workflow found by name="+req.getName()) - .asRuntimeException() - ); - } + public void getWorkflow(MetadataServicePb.GetWorkflowRequest req, StreamObserver response) { + service.getWorkflowDef(req.getName(), GRPC_HELPER.optional(req.getVersion())).map(def -> { + WorkflowDefPb.WorkflowDef workflow = PROTO_MAPPER.toProto(def); + response.onNext(MetadataServicePb.GetWorkflowResponse.newBuilder() + .setWorkflow(workflow) + .build() + ); + response.onCompleted(); + return def; // Throw away. + } + ).orElseGet(() -> { + response.onError(Status.NOT_FOUND + .withDescription("No such workflow found by name=" + req.getName()) + .asRuntimeException() + ); + return null; // Throw away. + } + ); } @Override @@ -94,7 +101,7 @@ public void getTask(MetadataServicePb.GetTaskRequest req, StreamObserver task_to_domain = 17; repeated string failed_reference_task_names = 18; + WorkflowDef workflow_definition = 19; } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/MetadataResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/MetadataResource.java index dbcc6e81c5..ec733e0884 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/MetadataResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/MetadataResource.java @@ -15,6 +15,11 @@ */ package com.netflix.conductor.server.resources; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.core.execution.ApplicationException; +import com.netflix.conductor.service.MetadataService; + import java.util.List; import javax.inject.Inject; @@ -29,95 +34,95 @@ import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.service.MetadataService; - import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; /** * @author Viren - * */ -@Api(value="/metadata", produces=MediaType.APPLICATION_JSON, consumes=MediaType.APPLICATION_JSON, tags="Metadata Management") +@Api(value = "/metadata", produces = MediaType.APPLICATION_JSON, consumes = MediaType.APPLICATION_JSON, tags = "Metadata Management") @Path("/metadata") @Produces({MediaType.APPLICATION_JSON}) @Consumes({MediaType.APPLICATION_JSON}) public class MetadataResource { - private MetadataService service; - - @Inject - public MetadataResource(MetadataService service) { - this.service = service; - } - - @POST - @Path("/workflow") - @ApiOperation("Create a new workflow definition") - public void create(WorkflowDef def) throws Exception{ - service.registerWorkflowDef(def); - } - - @PUT - @Path("/workflow") - @ApiOperation("Create or update workflow definition") - public void update(List defs) throws Exception{ - service.updateWorkflowDef(defs); - } - - @GET - @ApiOperation("Retrieves workflow definition along with blueprint") - @Path("/workflow/{name}") - public WorkflowDef get(@PathParam("name") String name, @QueryParam("version") Integer version) throws Exception { - return service.getWorkflowDef(name, version); - } - - @GET - @ApiOperation("Retrieves all workflow definition along with blueprint") - @Path("/workflow") - public List getAll() throws Exception { - return service.getWorkflowDefs(); - } - - @POST - @Path("/taskdefs") - @ApiOperation("Create new task definition(s)") - public void registerTaskDef(List taskDefs) throws Exception { - service.registerTaskDef(taskDefs); - } - - @PUT - @Path("/taskdefs") - @ApiOperation("Update an existing task") - public void registerTaskDef(TaskDef taskDef) throws Exception { - service.updateTaskDef(taskDef); - } - - @GET - @Path("/taskdefs") - @ApiOperation("Gets all task definition") - @Consumes({MediaType.WILDCARD}) - public List getTaskDefs() throws Exception{ - return service.getTaskDefs(); - } - - @GET - @Path("/taskdefs/{tasktype}") - @ApiOperation("Gets the task definition") - @Consumes({MediaType.WILDCARD}) - public TaskDef getTaskDef(@PathParam("tasktype") String taskType) throws Exception { - return service.getTaskDef(taskType); - } - - @DELETE - @Path("/taskdefs/{tasktype}") - @ApiOperation("Remove a task definition") - public void unregisterTaskDef(@PathParam("tasktype") String taskType){ - service.unregisterTaskDef(taskType); - } - - + private MetadataService service; + + @Inject + public MetadataResource(MetadataService service) { + this.service = service; + } + + @POST + @Path("/workflow") + @ApiOperation("Create a new workflow definition") + public void create(WorkflowDef def) { + service.registerWorkflowDef(def); + } + + @PUT + @Path("/workflow") + @ApiOperation("Create or update workflow definition") + public void update(List defs) { + service.updateWorkflowDef(defs); + } + + @GET + @ApiOperation("Retrieves workflow definition along with blueprint") + @Path("/workflow/{name}") + public WorkflowDef get(@PathParam("name") String name, @QueryParam("version") Integer version) { + return service.getWorkflowDef(name, version).orElseThrow(() -> + new ApplicationException( + ApplicationException.Code.NOT_FOUND, + String.format("No such workflow for name=%s, version=%s", name, version) + ) + ); + } + + @GET + @ApiOperation("Retrieves all workflow definition along with blueprint") + @Path("/workflow") + public List getAll() { + return service.getWorkflowDefs(); + } + + @POST + @Path("/taskdefs") + @ApiOperation("Create new task definition(s)") + public void registerTaskDef(List taskDefs) { + service.registerTaskDef(taskDefs); + } + + @PUT + @Path("/taskdefs") + @ApiOperation("Update an existing task") + public void registerTaskDef(TaskDef taskDef) { + service.updateTaskDef(taskDef); + } + + @GET + @Path("/taskdefs") + @ApiOperation("Gets all task definition") + @Consumes({MediaType.WILDCARD}) + public List getTaskDefs() { + return service.getTaskDefs(); + } + + @GET + @Path("/taskdefs/{tasktype}") + @ApiOperation("Gets the task definition") + @Consumes({MediaType.WILDCARD}) + public TaskDef getTaskDef(@PathParam("tasktype") String taskType) { + return service.getTaskDef(taskType); + } + + @DELETE + @Path("/taskdefs/{tasktype}") + @ApiOperation("Remove a task definition") + public void unregisterTaskDef(@PathParam("tasktype") String taskType) { + service.unregisterTaskDef(taskType); + } + + } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java index 4d68fefe93..767759c857 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** * @@ -21,7 +18,6 @@ import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; @@ -31,8 +27,12 @@ import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import javax.inject.Inject; import javax.inject.Singleton; @@ -47,218 +47,214 @@ import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; /** * @author Viren - * */ -@Api(value="/workflow", produces=MediaType.APPLICATION_JSON, consumes=MediaType.APPLICATION_JSON, tags="Workflow Management") +@Api(value = "/workflow", produces = MediaType.APPLICATION_JSON, consumes = MediaType.APPLICATION_JSON, tags = "Workflow Management") @Path("/workflow") @Produces({MediaType.APPLICATION_JSON}) @Consumes({MediaType.APPLICATION_JSON}) @Singleton public class WorkflowResource { - private WorkflowExecutor executor; - - private ExecutionService service; - - private MetadataService metadata; - - private int maxSearchSize; - - @Inject - public WorkflowResource(WorkflowExecutor executor, ExecutionService service, MetadataService metadata, Configuration config) { - this.executor = executor; - this.service = service; - this.metadata = metadata; - this.maxSearchSize = config.getIntProperty("workflow.max.search.size", 5_000); - } - - @POST - @Produces({ MediaType.TEXT_PLAIN }) - @ApiOperation("Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain") - public String startWorkflow (StartWorkflowRequest request) throws Exception { - WorkflowDef def = metadata.getWorkflowDef(request.getName(), request.getVersion()); - if(def == null){ - throw new ApplicationException(Code.NOT_FOUND, "No such workflow found by name=" + request.getName() + ", version=" + request.getVersion()); - } - return executor.startWorkflow(def.getName(), def.getVersion(), request.getCorrelationId(), request.getInput(), null, request.getTaskToDomain()); - } - - @POST - @Path("/{name}") - @Produces({ MediaType.TEXT_PLAIN }) - @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking") - public String startWorkflow ( - @PathParam("name") String name, @QueryParam("version") Integer version, - @QueryParam("correlationId") String correlationId, Map input) throws Exception { - - WorkflowDef def = metadata.getWorkflowDef(name, version); - if(def == null){ - throw new ApplicationException(Code.NOT_FOUND, "No such workflow found by name=" + name + ", version=" + version); - } - return executor.startWorkflow(def.getName(), def.getVersion(), correlationId, input, null); - } - - @GET - @Path("/{name}/correlated/{correlationId}") - @ApiOperation("Lists workflows for the given correlation id") - @Consumes(MediaType.WILDCARD) - public List getWorkflows(@PathParam("name") String name, @PathParam("correlationId") String correlationId, - @QueryParam("includeClosed") @DefaultValue("false") boolean includeClosed, - @QueryParam("includeTasks") @DefaultValue("false") boolean includeTasks) throws Exception { - return service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks); - } - - @POST - @Path("/{name}/correlated") - @ApiOperation("Lists workflows for the given correlation id list") - @Consumes(MediaType.WILDCARD) - public Map> getWorkflows(@PathParam("name") String name, - @QueryParam("includeClosed") @DefaultValue("false") boolean includeClosed, - @QueryParam("includeTasks") @DefaultValue("false") boolean includeTasks, List correlationIds) throws Exception { - Map> workflows = new HashMap<>(); - for(String correlationId : correlationIds) { - List ws = service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks); - workflows.put(correlationId, ws); - } - return workflows; - } - - @GET - @Path("/{workflowId}") - @ApiOperation("Gets the workflow by workflow id") - @Consumes(MediaType.WILDCARD) - public Workflow getExecutionStatus( - @PathParam("workflowId") String workflowId, - @QueryParam("includeTasks") @DefaultValue("true") boolean includeTasks) throws Exception { - return service.getExecutionStatus(workflowId, includeTasks); - } - - @DELETE - @Path("/{workflowId}/remove") - @ApiOperation("Removes the workflow from the system") - @Consumes(MediaType.WILDCARD) - public void delete(@PathParam("workflowId") String workflowId, - @QueryParam("archiveWorkflow") @DefaultValue("true") boolean archiveWorkflow) throws Exception { - service.removeWorkflow(workflowId, archiveWorkflow); - } - - @GET - @Path("/running/{name}") - @ApiOperation("Retrieve all the running workflows") - @Consumes(MediaType.WILDCARD) - public List getRunningWorkflow(@PathParam("name") String workflowName, @QueryParam("version") @DefaultValue("1") Integer version, - @QueryParam("startTime") Long startTime, @QueryParam("endTime") Long endTime) throws Exception { - if(startTime != null && endTime != null){ - return executor.getWorkflows(workflowName, version, startTime, endTime); - } else { - return executor.getRunningWorkflowIds(workflowName); - } - } - - @PUT - @Path("/decide/{workflowId}") - @ApiOperation("Starts the decision task for a workflow") - @Consumes(MediaType.WILDCARD) - public void decide(@PathParam("workflowId") String workflowId) throws Exception { - executor.decide(workflowId); - } - - @PUT - @Path("/{workflowId}/pause") - @ApiOperation("Pauses the workflow") - @Consumes(MediaType.WILDCARD) - public void pauseWorkflow(@PathParam("workflowId") String workflowId) throws Exception { - executor.pauseWorkflow(workflowId); - } - - @PUT - @Path("/{workflowId}/resume") - @ApiOperation("Resumes the workflow") - @Consumes(MediaType.WILDCARD) - public void resumeWorkflow(@PathParam("workflowId") String workflowId) throws Exception { - executor.resumeWorkflow(workflowId); - } - - @PUT - @Path("/{workflowId}/skiptask/{taskReferenceName}") - @ApiOperation("Skips a given task from a current running workflow") - @Consumes(MediaType.WILDCARD) - public void skipTaskFromWorkflow(@PathParam("workflowId") String workflowId, @PathParam("taskReferenceName") String taskReferenceName, - SkipTaskRequest skipTaskRequest) throws Exception { - executor.skipTaskFromWorkflow(workflowId, taskReferenceName, skipTaskRequest); - } - - @POST - @Path("/{workflowId}/rerun") - @ApiOperation("Reruns the workflow from a specific task") - @Consumes(MediaType.APPLICATION_JSON) - @Produces({MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) - public String rerun(@PathParam("workflowId") String workflowId, RerunWorkflowRequest request) throws Exception { - request.setReRunFromWorkflowId(workflowId); - return executor.rerun(request); - } - - @POST - @Path("/{workflowId}/restart") - @ApiOperation("Restarts a completed workflow") - @Consumes(MediaType.WILDCARD) - public void restart(@PathParam("workflowId") String workflowId) throws Exception { - executor.rewind(workflowId); - } - - @POST - @Path("/{workflowId}/retry") - @ApiOperation("Retries the last failed task") - @Consumes(MediaType.WILDCARD) - public void retry(@PathParam("workflowId") String workflowId) throws Exception { - executor.retry(workflowId); - } - - @POST - @Path("/{workflowId}/resetcallbacks") - @ApiOperation("Resets callback times of all in_progress tasks to 0") - @Consumes(MediaType.WILDCARD) - public void reset(@PathParam("workflowId") String workflowId) throws Exception { - executor.resetCallbacksForInProgressTasks(workflowId); - } - - @DELETE - @Path("/{workflowId}") - @ApiOperation("Terminate workflow execution") - @Consumes(MediaType.WILDCARD) - public void terminate(@PathParam("workflowId") String workflowId, @QueryParam("reason") String reason) throws Exception { - executor.terminateWorkflow(workflowId, reason); - } - - @ApiOperation(value="Search for workflows based on payload and other parameters", notes="use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC") - @GET - @Consumes(MediaType.WILDCARD) - @Produces(MediaType.APPLICATION_JSON) - @Path("/search") + private WorkflowExecutor executor; + + private ExecutionService service; + + private MetadataService metadata; + + private int maxSearchSize; + + @Inject + public WorkflowResource(WorkflowExecutor executor, ExecutionService service, MetadataService metadata, Configuration config) { + this.executor = executor; + this.service = service; + this.metadata = metadata; + this.maxSearchSize = config.getIntProperty("workflow.max.search.size", 5_000); + } + + @POST + @Produces({MediaType.TEXT_PLAIN}) + @ApiOperation("Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain") + public String startWorkflow(StartWorkflowRequest request) { + return executor.startWorkflow( + request.getName(), + request.getVersion(), + request.getCorrelationId(), + request.getInput(), + null, + request.getTaskToDomain() + ); + } + + @POST + @Path("/{name}") + @Produces({MediaType.TEXT_PLAIN}) + @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking") + public String startWorkflow( + @PathParam("name") String name, @QueryParam("version") Integer version, + @QueryParam("correlationId") String correlationId, Map input) { + + return executor.startWorkflow(name, version, correlationId, input, null); + } + + @GET + @Path("/{name}/correlated/{correlationId}") + @ApiOperation("Lists workflows for the given correlation id") + @Consumes(MediaType.WILDCARD) + public List getWorkflows(@PathParam("name") String name, @PathParam("correlationId") String correlationId, + @QueryParam("includeClosed") @DefaultValue("false") boolean includeClosed, + @QueryParam("includeTasks") @DefaultValue("false") boolean includeTasks) { + return service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks); + } + + @POST + @Path("/{name}/correlated") + @ApiOperation("Lists workflows for the given correlation id list") + @Consumes(MediaType.WILDCARD) + public Map> getWorkflows(@PathParam("name") String name, + @QueryParam("includeClosed") @DefaultValue("false") boolean includeClosed, + @QueryParam("includeTasks") @DefaultValue("false") boolean includeTasks, List correlationIds) { + Map> workflows = new HashMap<>(); + for (String correlationId : correlationIds) { + List ws = service.getWorkflowInstances(name, correlationId, includeClosed, includeTasks); + workflows.put(correlationId, ws); + } + return workflows; + } + + @GET + @Path("/{workflowId}") + @ApiOperation("Gets the workflow by workflow id") + @Consumes(MediaType.WILDCARD) + public Workflow getExecutionStatus( + @PathParam("workflowId") String workflowId, + @QueryParam("includeTasks") @DefaultValue("true") boolean includeTasks) throws Exception { + return service.getExecutionStatus(workflowId, includeTasks); + } + + @DELETE + @Path("/{workflowId}/remove") + @ApiOperation("Removes the workflow from the system") + @Consumes(MediaType.WILDCARD) + public void delete(@PathParam("workflowId") String workflowId, + @QueryParam("archiveWorkflow") @DefaultValue("true") boolean archiveWorkflow) throws Exception { + service.removeWorkflow(workflowId, archiveWorkflow); + } + + @GET + @Path("/running/{name}") + @ApiOperation("Retrieve all the running workflows") + @Consumes(MediaType.WILDCARD) + public List getRunningWorkflow(@PathParam("name") String workflowName, @QueryParam("version") @DefaultValue("1") Integer version, + @QueryParam("startTime") Long startTime, @QueryParam("endTime") Long endTime) { + if (startTime != null && endTime != null) { + return executor.getWorkflows(workflowName, version, startTime, endTime); + } else { + return executor.getRunningWorkflowIds(workflowName); + } + } + + @PUT + @Path("/decide/{workflowId}") + @ApiOperation("Starts the decision task for a workflow") + @Consumes(MediaType.WILDCARD) + public void decide(@PathParam("workflowId") String workflowId) { + executor.decide(workflowId); + } + + @PUT + @Path("/{workflowId}/pause") + @ApiOperation("Pauses the workflow") + @Consumes(MediaType.WILDCARD) + public void pauseWorkflow(@PathParam("workflowId") String workflowId) { + executor.pauseWorkflow(workflowId); + } + + @PUT + @Path("/{workflowId}/resume") + @ApiOperation("Resumes the workflow") + @Consumes(MediaType.WILDCARD) + public void resumeWorkflow(@PathParam("workflowId") String workflowId) { + executor.resumeWorkflow(workflowId); + } + + @PUT + @Path("/{workflowId}/skiptask/{taskReferenceName}") + @ApiOperation("Skips a given task from a current running workflow") + @Consumes(MediaType.WILDCARD) + public void skipTaskFromWorkflow(@PathParam("workflowId") String workflowId, @PathParam("taskReferenceName") String taskReferenceName, + SkipTaskRequest skipTaskRequest) { + executor.skipTaskFromWorkflow(workflowId, taskReferenceName, skipTaskRequest); + } + + @POST + @Path("/{workflowId}/rerun") + @ApiOperation("Reruns the workflow from a specific task") + @Consumes(MediaType.APPLICATION_JSON) + @Produces({MediaType.TEXT_PLAIN, MediaType.APPLICATION_JSON}) + public String rerun(@PathParam("workflowId") String workflowId, RerunWorkflowRequest request) { + request.setReRunFromWorkflowId(workflowId); + return executor.rerun(request); + } + + @POST + @Path("/{workflowId}/restart") + @ApiOperation("Restarts a completed workflow") + @Consumes(MediaType.WILDCARD) + public void restart(@PathParam("workflowId") String workflowId) { + executor.rewind(workflowId); + } + + @POST + @Path("/{workflowId}/retry") + @ApiOperation("Retries the last failed task") + @Consumes(MediaType.WILDCARD) + public void retry(@PathParam("workflowId") String workflowId) { + executor.retry(workflowId); + } + + @POST + @Path("/{workflowId}/resetcallbacks") + @ApiOperation("Resets callback times of all in_progress tasks to 0") + @Consumes(MediaType.WILDCARD) + public void reset(@PathParam("workflowId") String workflowId) { + executor.resetCallbacksForInProgressTasks(workflowId); + } + + @DELETE + @Path("/{workflowId}") + @ApiOperation("Terminate workflow execution") + @Consumes(MediaType.WILDCARD) + public void terminate(@PathParam("workflowId") String workflowId, @QueryParam("reason") String reason) throws Exception { + executor.terminateWorkflow(workflowId, reason); + } + + @ApiOperation(value = "Search for workflows based on payload and other parameters", notes = "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC") + @GET + @Consumes(MediaType.WILDCARD) + @Produces(MediaType.APPLICATION_JSON) + @Path("/search") public SearchResult search( - @QueryParam("start") @DefaultValue("0") int start, - @QueryParam("size") @DefaultValue("100") int size, - @QueryParam("sort") String sort, - @QueryParam("freeText") @DefaultValue("*") String freeText, - @QueryParam("query") String query - ){ - - if(size > maxSearchSize) { - throw new ApplicationException(Code.INVALID_INPUT, "Cannot return more than " + maxSearchSize + " workflows. Please use pagination"); - } - return service.search(query, freeText, start, size, convert(sort)); - } - - @ApiOperation(value = "Search for workflows based on task parameters", notes="use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC") + @QueryParam("start") @DefaultValue("0") int start, + @QueryParam("size") @DefaultValue("100") int size, + @QueryParam("sort") String sort, + @QueryParam("freeText") @DefaultValue("*") String freeText, + @QueryParam("query") String query + ) { + + if (size > maxSearchSize) { + throw new ApplicationException(Code.INVALID_INPUT, "Cannot return more than " + maxSearchSize + " workflows. Please use pagination"); + } + return service.search(query, freeText, start, size, convert(sort)); + } + + @ApiOperation(value = "Search for workflows based on task parameters", notes = "use sort options as sort=:ASC|DESC e.g. sort=name&sort=workflowId:DESC. If order is not specified, defaults to ASC") @GET @Consumes(MediaType.WILDCARD) @Produces(MediaType.APPLICATION_JSON) @@ -273,11 +269,11 @@ public SearchResult searchWorkflowsByTasks( return service.searchWorkflowByTasks(query, freeText, start, size, convert(sort)); } - private List convert(String sortStr) { - List list = new ArrayList(); - if(sortStr != null && sortStr.length() != 0){ - list = Arrays.asList(sortStr.split("\\|")); - } - return list; - } + private List convert(String sortStr) { + List list = new ArrayList(); + if (sortStr != null && sortStr.length() != 0) { + list = Arrays.asList(sortStr.split("\\|")); + } + return list; + } } diff --git a/mysql-persistence/build.gradle b/mysql-persistence/build.gradle index 5982268562..f7c5cdbe65 100644 --- a/mysql-persistence/build.gradle +++ b/mysql-persistence/build.gradle @@ -1,19 +1,20 @@ dependencies { - compile project(':conductor-core') - compile "com.google.inject:guice:${revGuice}" + compile project(':conductor-core') + compile "com.google.inject:guice:${revGuice}" - compile "commons-io:commons-io:${revCommonsIo}" - compile "mysql:mysql-connector-java:${revMySqlConnector}" - compile "com.zaxxer:HikariCP:${revHikariCP}" - compile "org.flywaydb:flyway-core:${revFlywayCore}" + compile "commons-io:commons-io:${revCommonsIo}" + compile "mysql:mysql-connector-java:${revMySqlConnector}" + compile "com.zaxxer:HikariCP:${revHikariCP}" + compile "org.flywaydb:flyway-core:${revFlywayCore}" - testCompile "ch.vorburger.mariaDB4j:mariaDB4j:${revMariaDB4j}" - //TODO Change the below deps to use the same version as one in versionsOfDependencies.gradle - testCompile 'ch.qos.logback:logback-core:1.2.3' - testCompile 'ch.qos.logback:logback-classic:1.2.3' + testCompile project(':conductor-core').sourceSets.test.output + testCompile "ch.vorburger.mariaDB4j:mariaDB4j:${revMariaDB4j}" + //TODO Change the below deps to use the same version as one in versionsOfDependencies.gradle + testCompile 'ch.qos.logback:logback-core:1.2.3' + testCompile 'ch.qos.logback:logback-classic:1.2.3' } test { - //the MySQL unit tests must run within the same JVM to share the same embedded DB - maxParallelForks = 1 + //the MySQL unit tests must run within the same JVM to share the same embedded DB + maxParallelForks = 1 } diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAO.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAO.java index 1233920e96..e9a967bc37 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAO.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAO.java @@ -33,7 +33,7 @@ public MySQLMetadataDAO(ObjectMapper om, DataSource dataSource, Configuration co int cacheRefreshTime = config.getIntProperty(PROP_TASKDEF_CACHE_REFRESH, DEFAULT_TASKDEF_CACHE_REFRESH_SECONDS); Executors.newSingleThreadScheduledExecutor() - .scheduleWithFixedDelay(this::refreshTaskDefs, cacheRefreshTime, cacheRefreshTime, TimeUnit.SECONDS); + .scheduleWithFixedDelay(this::refreshTaskDefs, cacheRefreshTime, cacheRefreshTime, TimeUnit.SECONDS); } @Override @@ -95,7 +95,7 @@ public void create(WorkflowDef def) { withTransaction(tx -> { if (workflowExists(tx, def)) { throw new ApplicationException(ApplicationException.Code.CONFLICT, - "Workflow with " + def.key() + " already exists!"); + "Workflow with " + def.key() + " already exists!"); } insertOrUpdateWorkflowDef(tx, def); @@ -111,20 +111,24 @@ public void update(WorkflowDef def) { @Override - public WorkflowDef getLatest(String name) { + public Optional getLatest(String name) { final String GET_LATEST_WORKFLOW_DEF_QUERY = "SELECT json_data FROM meta_workflow_def WHERE NAME = ? AND " + - "version = latest_version"; + "version = latest_version"; - return queryWithTransaction(GET_LATEST_WORKFLOW_DEF_QUERY, - q -> q.addParameter(name).executeAndFetchFirst(WorkflowDef.class)); + return Optional.ofNullable( + queryWithTransaction(GET_LATEST_WORKFLOW_DEF_QUERY, + q -> q.addParameter(name).executeAndFetchFirst(WorkflowDef.class)) + ); } @Override - public WorkflowDef get(String name, int version) { + public Optional get(String name, int version) { final String GET_WORKFLOW_DEF_QUERY = "SELECT json_data FROM meta_workflow_def WHERE NAME = ? AND version = ?"; - return queryWithTransaction(GET_WORKFLOW_DEF_QUERY, q -> q.addParameter(name) - .addParameter(version) - .executeAndFetchFirst(WorkflowDef.class)); + return Optional.ofNullable( + queryWithTransaction(GET_WORKFLOW_DEF_QUERY, q -> q.addParameter(name) + .addParameter(version) + .executeAndFetchFirst(WorkflowDef.class)) + ); } @Override @@ -143,7 +147,7 @@ public List getAll() { @Override public List getAllLatest() { final String GET_ALL_LATEST_WORKFLOW_DEF_QUERY = "SELECT json_data FROM meta_workflow_def WHERE version = " + - "latest_version"; + "latest_version"; return queryWithTransaction(GET_ALL_LATEST_WORKFLOW_DEF_QUERY, q -> q.executeAndFetch(WorkflowDef.class)); } @@ -151,10 +155,10 @@ public List getAllLatest() { @Override public List getAllVersions(String name) { final String GET_ALL_VERSIONS_WORKFLOW_DEF_QUERY = "SELECT json_data FROM meta_workflow_def WHERE name = ? " + - "ORDER BY version"; + "ORDER BY version"; return queryWithTransaction(GET_ALL_VERSIONS_WORKFLOW_DEF_QUERY, - q -> q.addParameter(name).executeAndFetch(WorkflowDef.class)); + q -> q.addParameter(name).executeAndFetch(WorkflowDef.class)); } @Override @@ -162,19 +166,19 @@ public void addEventHandler(EventHandler eventHandler) { Preconditions.checkNotNull(eventHandler.getName(), "EventHandler name cannot be null"); final String INSERT_EVENT_HANDLER_QUERY = "INSERT INTO meta_event_handler (name, event, active, json_data) " + - "VALUES (?, ?, ?, ?)"; + "VALUES (?, ?, ?, ?)"; withTransaction(tx -> { if (getEventHandler(tx, eventHandler.getName()) != null) { throw new ApplicationException(ApplicationException.Code.CONFLICT, - "EventHandler with name " + eventHandler.getName() + " already exists!"); + "EventHandler with name " + eventHandler.getName() + " already exists!"); } execute(tx, INSERT_EVENT_HANDLER_QUERY, q -> q.addParameter(eventHandler.getName()) - .addParameter(eventHandler.getEvent()) - .addParameter(eventHandler.isActive()) - .addJsonParameter(eventHandler) - .executeUpdate()); + .addParameter(eventHandler.getEvent()) + .addParameter(eventHandler.isActive()) + .addJsonParameter(eventHandler) + .executeUpdate()); }); } @@ -184,22 +188,22 @@ public void updateEventHandler(EventHandler eventHandler) { //@formatter:off final String UPDATE_EVENT_HANDLER_QUERY = "UPDATE meta_event_handler SET " + - "event = ?, active = ?, json_data = ?, " + - "modified_on = CURRENT_TIMESTAMP WHERE name = ?"; + "event = ?, active = ?, json_data = ?, " + + "modified_on = CURRENT_TIMESTAMP WHERE name = ?"; //@formatter:on withTransaction(tx -> { EventHandler existing = getEventHandler(tx, eventHandler.getName()); if (existing == null) { throw new ApplicationException(ApplicationException.Code.NOT_FOUND, - "EventHandler with name " + eventHandler.getName() + " not found!"); + "EventHandler with name " + eventHandler.getName() + " not found!"); } execute(tx, UPDATE_EVENT_HANDLER_QUERY, q -> q.addParameter(eventHandler.getEvent()) - .addParameter(eventHandler.isActive()) - .addJsonParameter(eventHandler) - .addParameter(eventHandler.getName()) - .executeUpdate()); + .addParameter(eventHandler.isActive()) + .addJsonParameter(eventHandler) + .addParameter(eventHandler.getName()) + .executeUpdate()); }); } @@ -211,7 +215,7 @@ public void removeEventHandlerStatus(String name) { EventHandler existing = getEventHandler(tx, name); if (existing == null) { throw new ApplicationException(ApplicationException.Code.NOT_FOUND, - "EventHandler with name " + name + " not found!"); + "EventHandler with name " + name + " not found!"); } execute(tx, DELETE_EVENT_HANDLER_QUERY, q -> q.addParameter(name).executeDelete()); @@ -244,8 +248,8 @@ public List getEventHandlersForEvent(String event, boolean activeO } /** - * Use {@link Preconditions} to check for required {@link TaskDef} fields, throwing a Runtime exception - * if validations fail. + * Use {@link Preconditions} to check for required {@link TaskDef} fields, throwing a Runtime exception if + * validations fail. * * @param taskDef The {@code TaskDef} to check. */ @@ -255,8 +259,8 @@ private void validate(TaskDef taskDef) { } /** - * Use {@link Preconditions} to check for required {@link WorkflowDef} fields, throwing a Runtime exception - * if validations fail. + * Use {@link Preconditions} to check for required {@link WorkflowDef} fields, throwing a Runtime exception if + * validations fail. * * @param def The {@code WorkflowDef} to check. */ @@ -276,7 +280,7 @@ private EventHandler getEventHandler(Connection connection, String name) { final String READ_ONE_EVENT_HANDLER_QUERY = "SELECT json_data FROM meta_event_handler WHERE name = ?"; return query(connection, READ_ONE_EVENT_HANDLER_QUERY, - q -> q.addParameter(name).executeAndFetchFirst(EventHandler.class)); + q -> q.addParameter(name).executeAndFetchFirst(EventHandler.class)); } /** @@ -288,10 +292,10 @@ private EventHandler getEventHandler(Connection connection, String name) { */ private Boolean workflowExists(Connection connection, WorkflowDef def) { final String CHECK_WORKFLOW_DEF_EXISTS_QUERY = "SELECT COUNT(*) FROM meta_workflow_def WHERE name = ? AND " + - "version = ?"; + "version = ?"; return query(connection, CHECK_WORKFLOW_DEF_EXISTS_QUERY, - q -> q.addParameter(def.getName()).addParameter(def.getVersion()).exists()); + q -> q.addParameter(def.getName()).addParameter(def.getVersion()).exists()); } /** @@ -303,7 +307,7 @@ private Boolean workflowExists(Connection connection, WorkflowDef def) { */ private Optional getLatestVersion(Connection tx, WorkflowDef def) { final String GET_LATEST_WORKFLOW_DEF_VERSION = "SELECT max(version) AS version FROM meta_workflow_def WHERE " + - "name = ?"; + "name = ?"; Integer val = query(tx, GET_LATEST_WORKFLOW_DEF_VERSION, q -> { q.addParameter(def.getName()); @@ -327,7 +331,7 @@ private Optional getLatestVersion(Connection tx, WorkflowDef def) { */ private void updateLatestVersion(Connection tx, WorkflowDef def) { final String UPDATE_WORKFLOW_DEF_LATEST_VERSION_QUERY = "UPDATE meta_workflow_def SET latest_version = ? " + - "WHERE name = ?"; + "WHERE name = ?"; execute(tx, UPDATE_WORKFLOW_DEF_LATEST_VERSION_QUERY, q -> q.addParameter(def.getVersion()).addParameter(def.getName()).executeUpdate()); @@ -335,26 +339,26 @@ private void updateLatestVersion(Connection tx, WorkflowDef def) { private void insertOrUpdateWorkflowDef(Connection tx, WorkflowDef def) { final String INSERT_WORKFLOW_DEF_QUERY = "INSERT INTO meta_workflow_def (name, version, json_data) VALUES (?," + - " ?, ?)"; + " ?, ?)"; Optional version = getLatestVersion(tx, def); if (!version.isPresent() || version.get() < def.getVersion()) { execute(tx, INSERT_WORKFLOW_DEF_QUERY, q -> q.addParameter(def.getName()) - .addParameter(def.getVersion()) - .addJsonParameter(def) - .executeUpdate()); + .addParameter(def.getVersion()) + .addJsonParameter(def) + .executeUpdate()); } else { //@formatter:off final String UPDATE_WORKFLOW_DEF_QUERY = - "UPDATE meta_workflow_def " + - "SET json_data = ?, modified_on = CURRENT_TIMESTAMP " + - "WHERE name = ? AND version = ?"; + "UPDATE meta_workflow_def " + + "SET json_data = ?, modified_on = CURRENT_TIMESTAMP " + + "WHERE name = ? AND version = ?"; //@formatter:on execute(tx, UPDATE_WORKFLOW_DEF_QUERY, q -> q.addJsonParameter(def) - .addParameter(def.getName()) - .addParameter(def.getVersion()) - .executeUpdate()); + .addParameter(def.getName()) + .addParameter(def.getVersion()) + .executeUpdate()); } updateLatestVersion(tx, def); @@ -406,7 +410,7 @@ private TaskDef getTaskDefFromDB(String name) { final String READ_ONE_TASKDEF_QUERY = "SELECT json_data FROM meta_task_def WHERE name = ?"; return queryWithTransaction(READ_ONE_TASKDEF_QUERY, - q -> q.addParameter(name).executeAndFetchFirst(TaskDef.class)); + q -> q.addParameter(name).executeAndFetchFirst(TaskDef.class)); } private String insertOrUpdateTaskDef(TaskDef taskDef) { diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java similarity index 78% rename from mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java rename to mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java index 62a4823901..af1a86e362 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLBaseDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java @@ -1,9 +1,9 @@ package com.netflix.conductor.dao.mysql; import com.fasterxml.jackson.databind.ObjectMapper; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.common.utils.JsonMapperProvider; import com.zaxxer.hikari.HikariDataSource; import org.flywaydb.core.Flyway; @@ -18,23 +18,23 @@ import javax.sql.DataSource; -import ch.vorburger.mariadb4j.DB; - @SuppressWarnings("Duplicates") -public class MySQLBaseDAOTest { - protected final Logger logger = LoggerFactory.getLogger(getClass()); - protected final DataSource dataSource; - protected final TestConfiguration testConfiguration = new TestConfiguration(); - protected final ObjectMapper objectMapper = new JsonMapperProvider().get(); - protected final DB db = EmbeddedDatabase.INSTANCE.getDB(); +public class MySQLDAOTestUtil { + private static final Logger logger = LoggerFactory.getLogger(MySQLDAOTestUtil.class); + private final DataSource dataSource; + private final TestConfiguration testConfiguration = new TestConfiguration(); + private final ObjectMapper objectMapper = new JsonMapperProvider().get(); static AtomicBoolean migrated = new AtomicBoolean(false); - MySQLBaseDAOTest() { + MySQLDAOTestUtil() { testConfiguration.setProperty("jdbc.url", "jdbc:mysql://localhost:33307/conductor"); testConfiguration.setProperty("jdbc.username", "root"); testConfiguration.setProperty("jdbc.password", ""); + // Ensure the DB starts + EmbeddedDatabase.INSTANCE.getDB(); + this.dataSource = getDataSource(testConfiguration); } @@ -61,7 +61,7 @@ private synchronized static void flywayMigrate(DataSource dataSource) { return; } - synchronized (MySQLBaseDAOTest.class) { + synchronized (MySQLDAOTestUtil.class) { Flyway flyway = new Flyway(); flyway.setDataSource(dataSource); flyway.setPlaceholderReplacement(false); @@ -70,7 +70,19 @@ private synchronized static void flywayMigrate(DataSource dataSource) { } } - protected void resetAllData() { + public DataSource getDataSource() { + return dataSource; + } + + public TestConfiguration getTestConfiguration() { + return testConfiguration; + } + + public ObjectMapper getObjectMapper() { + return objectMapper; + } + + public void resetAllData() { logger.info("Resetting data for test"); try (Connection connection = dataSource.getConnection()) { try(ResultSet rs = connection.prepareStatement("SHOW TABLES").executeQuery(); diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java index bf749f7529..3445494598 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java @@ -1,451 +1,69 @@ package com.netflix.conductor.dao.mysql; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.mockito.Mockito.mock; - -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.Callable; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.stream.Collectors; - -import org.apache.commons.lang3.builder.EqualsBuilder; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; - -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.core.execution.ApplicationException; +import com.netflix.conductor.dao.ExecutionDAO; +import com.netflix.conductor.dao.ExecutionDAOTest; import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.dao.MetadataDAO; -@SuppressWarnings("Duplicates") -public class MySQLExecutionDAOTest extends MySQLBaseDAOTest { - - private MySQLMetadataDAO metadata; - private MySQLExecutionDAO dao; - - @Before - public void setup() throws Exception { - metadata = new MySQLMetadataDAO(objectMapper, dataSource, testConfiguration); - dao = new MySQLExecutionDAO(mock(IndexDAO.class), metadata, objectMapper, dataSource); - resetAllData(); - } - - @Rule - public ExpectedException expected = ExpectedException.none(); - - @Test - public void testTaskExceedsLimit() throws Exception { - TaskDef def = new TaskDef(); - def.setName("task1"); - def.setConcurrentExecLimit(1); - metadata.createTaskDef(def); - - List tasks = new LinkedList<>(); - for(int i = 0; i < 15; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t_" + i); - task.setWorkflowInstanceId("workflow_" + i); - task.setReferenceTaskName("task1"); - task.setTaskDefName("task1"); - tasks.add(task); - task.setStatus(Status.SCHEDULED); - } - - dao.createTasks(tasks); - assertFalse(dao.exceedsInProgressLimit(tasks.get(0))); - tasks.get(0).setStatus(Status.IN_PROGRESS); - dao.updateTask(tasks.get(0)); - - for(Task task : tasks) { - assertTrue(dao.exceedsInProgressLimit(task)); - } - } +import org.junit.Before; +import org.junit.Test; - @Test - public void testCreateTaskException() throws Exception { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t1"); - task.setTaskDefName("task1"); +import java.util.List; - expected.expect(ApplicationException.class); - expected.expectMessage("Workflow instance id cannot be null"); - dao.createTasks(Collections.singletonList(task)); +import static junit.framework.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.mock; - task.setWorkflowInstanceId("wfid"); - expected.expect(ApplicationException.class); - expected.expectMessage("Task reference name cannot be null"); - dao.createTasks(Collections.singletonList(task)); - } +@SuppressWarnings("Duplicates") +public class MySQLExecutionDAOTest extends ExecutionDAOTest { + + private final MySQLDAOTestUtil testMySQL = new MySQLDAOTestUtil(); + private MySQLMetadataDAO metadata; + private MySQLExecutionDAO dao; + + @Before + public void setup() throws Exception { + metadata = new MySQLMetadataDAO( + testMySQL.getObjectMapper(), + testMySQL.getDataSource(), + testMySQL.getTestConfiguration() + ); + dao = new MySQLExecutionDAO( + mock(IndexDAO.class), + metadata, + testMySQL.getObjectMapper(), + testMySQL.getDataSource() + ); + testMySQL.resetAllData(); + } - @Test - public void testCreateTaskException2() throws Exception { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t1"); - task.setTaskDefName("task1"); - task.setWorkflowInstanceId("wfid"); + @Test + public void testPendingByCorrelationId() throws Exception { - expected.expect(ApplicationException.class); - expected.expectMessage("Task reference name cannot be null"); - dao.createTasks(Collections.singletonList(task)); - } + WorkflowDef def = new WorkflowDef(); + def.setName("pending_count_correlation_jtest"); - @Test - public void testPollData() throws Exception { - dao.updateLastPoll("taskDef", null, "workerId1"); - PollData pd = dao.getPollData("taskDef", null); - assertNotNull(pd); - assertTrue(pd.getLastPollTime() > 0); - assertEquals(pd.getQueueName(), "taskDef"); - assertEquals(pd.getDomain(), null); - assertEquals(pd.getWorkerId(), "workerId1"); + Workflow workflow = createTestWorkflow(); + workflow.setWorkflowDefinition(def); - dao.updateLastPoll("taskDef", "domain1", "workerId1"); - pd = dao.getPollData("taskDef", "domain1"); - assertNotNull(pd); - assertTrue(pd.getLastPollTime() > 0); - assertEquals(pd.getQueueName(), "taskDef"); - assertEquals(pd.getDomain(), "domain1"); - assertEquals(pd.getWorkerId(), "workerId1"); + String idBase = workflow.getWorkflowId(); + generateWorkflows(workflow, idBase, 10); - List pData = dao.getPollData("taskDef"); - assertEquals(pData.size(), 2); - pd = dao.getPollData("taskDef", "domain2"); - assertTrue(pd == null); - } - - @Test - public void testWith5THreads() throws InterruptedException, ExecutionException { - testPollDataWithParallelThreads(5); + List bycorrelationId = getExecutionDAO().getWorkflowsByCorrelationId("corr001", true); + assertNotNull(bycorrelationId); + assertEquals(10, bycorrelationId.size()); } - - - private void testPollDataWithParallelThreads(final int threadCount) throws InterruptedException, ExecutionException { - Callable task = new Callable() { - @Override - public PollData call() { - dao.updateLastPoll("taskDef", null, "workerId1"); - return dao.getPollData("taskDef", null); - } - }; - List> tasks = Collections.nCopies(threadCount, task); - - ExecutorService executorService = Executors.newFixedThreadPool(threadCount); - List> futures = executorService.invokeAll(tasks); - List resultList = new ArrayList(futures.size()); - // Check for exceptions - for (Future future : futures) { - // Throws an exception if an exception was thrown by the task. - PollData pollData = future.get(); - System.out.println(pollData); - if(pollData !=null) - resultList.add(future.get().getQueueName()); - } - // Validate the IDs - Assert.assertEquals(threadCount, futures.size()); - List expectedList = new ArrayList(threadCount); - for (long i = 1; i <= threadCount; i++) { - expectedList.add("taskDef"); - } - Collections.sort(resultList); - Assert.assertEquals(expectedList, resultList); + @Override + public ExecutionDAO getExecutionDAO() { + return dao; } - @Test - public void testTaskCreateDups() throws Exception { - List tasks = new LinkedList<>(); - String workflowId = UUID.randomUUID().toString(); - - for(int i = 0; i < 3; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + i); - task.setReferenceTaskName("t" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + i); - task.setStatus(Status.IN_PROGRESS); - tasks.add(task); - } - - //Let's insert a retried task - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + 2); - task.setReferenceTaskName("t" + 2); - task.setRetryCount(1); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + 2); - task.setStatus(Status.IN_PROGRESS); - tasks.add(task); - - //Duplicate task! - task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + 1); - task.setReferenceTaskName("t" + 1); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + 1); - task.setStatus(Status.IN_PROGRESS); - tasks.add(task); - - List created = dao.createTasks(tasks); - assertEquals(tasks.size()-1, created.size()); //1 less - - Set srcIds = tasks.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()).collect(Collectors.toSet()); - Set createdIds = created.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()).collect(Collectors.toSet()); - - assertEquals(srcIds, createdIds); - - List pending = dao.getPendingTasksByWorkflow("task0", workflowId); - assertNotNull(pending); - assertEquals(1, pending.size()); - assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), pending.get(0))); - - List found = dao.getTasks(tasks.get(0).getTaskDefName(), null, 1); - assertNotNull(found); - assertEquals(1, found.size()); - assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), found.get(0))); - } - - @Test - public void testTaskOps() throws Exception { - List tasks = new LinkedList<>(); - String workflowId = UUID.randomUUID().toString(); - - for(int i = 0; i < 3; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + i); - task.setReferenceTaskName("testTaskOps" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("testTaskOps" + i); - task.setStatus(Status.IN_PROGRESS); - tasks.add(task); - } - - for(int i = 0; i < 3; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("x" + workflowId + "_t" + i); - task.setReferenceTaskName("testTaskOps" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId("x" + workflowId); - task.setTaskDefName("testTaskOps" + i); - task.setStatus(Status.IN_PROGRESS); - dao.createTasks(Arrays.asList(task)); - } - - - List created = dao.createTasks(tasks); - assertEquals(tasks.size(), created.size()); - - List pending = dao.getPendingTasksForTaskType(tasks.get(0).getTaskDefName()); - assertNotNull(pending); - assertEquals(2, pending.size()); - //Pending list can come in any order. finding the one we are looking for and then comparing - Task matching = pending.stream().filter(task -> task.getTaskId().equals(tasks.get(0).getTaskId())).findAny().get(); - assertTrue(EqualsBuilder.reflectionEquals(matching, tasks.get(0))); - - List update = new LinkedList<>(); - for(int i = 0; i < 3; i++) { - Task found = dao.getTask(workflowId + "_t" + i); - assertNotNull(found); - found.getOutputData().put("updated", true); - found.setStatus(Status.COMPLETED); - update.add(found); - } - dao.updateTasks(update); - - List taskIds = tasks.stream().map(Task::getTaskId).collect(Collectors.toList()); - List found = dao.getTasks(taskIds); - assertEquals(taskIds.size(), found.size()); - found.forEach(task -> { - assertTrue(task.getOutputData().containsKey("updated")); - assertEquals(true, task.getOutputData().get("updated")); - dao.removeTask(task.getTaskId()); - }); - - found = dao.getTasks(taskIds); - assertTrue(found.isEmpty()); - } - - @Test - public void test() throws Exception { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("correlationX"); - workflow.setCreatedBy("junit_tester"); - workflow.setEndTime(200L); - - Map input = new HashMap<>(); - input.put("param1", "param1 value"); - input.put("param2", 100); - workflow.setInput(input); - - Map output = new HashMap<>(); - output.put("ouput1", "output 1 value"); - output.put("op2", 300); - workflow.setOutput(output); - - workflow.setOwnerApp("workflow"); - workflow.setParentWorkflowId("parentWorkflowId"); - workflow.setParentWorkflowTaskId("parentWFTaskId"); - workflow.setReasonForIncompletion("missing recipe"); - workflow.setReRunFromWorkflowId("re-run from id1"); - workflow.setSchemaVersion(2); - workflow.setStartTime(90L); - workflow.setStatus(WorkflowStatus.FAILED); - workflow.setWorkflowId("workflow0"); - - List tasks = new LinkedList<>(); - - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t1"); - task.setReferenceTaskName("t1"); - task.setWorkflowInstanceId(workflow.getWorkflowId()); - task.setTaskDefName("task1"); - - Task task2 = new Task(); - task2.setScheduledTime(2L); - task2.setSeq(2); - task2.setTaskId("t2"); - task2.setReferenceTaskName("t2"); - task2.setWorkflowInstanceId(workflow.getWorkflowId()); - task2.setTaskDefName("task2"); - - Task task3 = new Task(); - task3.setScheduledTime(2L); - task3.setSeq(3); - task3.setTaskId("t3"); - task3.setReferenceTaskName("t3"); - task3.setWorkflowInstanceId(workflow.getWorkflowId()); - task3.setTaskDefName("task3"); - - tasks.add(task); - tasks.add(task2); - tasks.add(task3); - - workflow.setTasks(tasks); - - workflow.setUpdatedBy("junit_tester"); - workflow.setUpdateTime(800L); - workflow.setVersion(3); - //workflow.setWorkflowId("wf0001"); - workflow.setWorkflowType("Junit Workflow"); - - String workflowId = dao.createWorkflow(workflow); - List created = dao.createTasks(tasks); - assertEquals(tasks.size(), created.size()); - - Workflow workflowWithTasks = dao.getWorkflow(workflow.getWorkflowId(), true); - assertEquals(workflowWithTasks.getWorkflowId(), workflowId); - assertTrue(!workflowWithTasks.getTasks().isEmpty()); - - assertEquals(workflow.getWorkflowId(), workflowId); - Workflow found = dao.getWorkflow(workflowId, false); - assertTrue(found.getTasks().isEmpty()); - - workflow.getTasks().clear(); - assertTrue(EqualsBuilder.reflectionEquals(workflow, found)); - - workflow.getInput().put("updated", true); - dao.updateWorkflow(workflow); - found = dao.getWorkflow(workflowId); - assertNotNull(found); - assertTrue(found.getInput().containsKey("updated")); - assertEquals(true, found.getInput().get("updated")); - - List running = dao.getRunningWorkflowIds(workflow.getWorkflowType()); - assertNotNull(running); - assertTrue(running.isEmpty()); - - workflow.setStatus(WorkflowStatus.RUNNING); - dao.updateWorkflow(workflow); - - running = dao.getRunningWorkflowIds(workflow.getWorkflowType()); - assertNotNull(running); - assertEquals(1, running.size()); - assertEquals(workflow.getWorkflowId(), running.get(0)); - - List pending = dao.getPendingWorkflowsByType(workflow.getWorkflowType()); - assertNotNull(pending); - assertEquals(1, pending.size()); - assertEquals(3, pending.get(0).getTasks().size()); - pending.get(0).getTasks().clear(); - assertTrue(EqualsBuilder.reflectionEquals(workflow, pending.get(0))); - - workflow.setStatus(WorkflowStatus.COMPLETED); - dao.updateWorkflow(workflow); - running = dao.getRunningWorkflowIds(workflow.getWorkflowType()); - assertNotNull(running); - assertTrue(running.isEmpty()); - - List bytime = dao.getWorkflowsByType(workflow.getWorkflowType(), System.currentTimeMillis(), System.currentTimeMillis()+100); - assertNotNull(bytime); - assertTrue(bytime.isEmpty()); - - bytime = dao.getWorkflowsByType(workflow.getWorkflowType(), workflow.getCreateTime() - 10, workflow.getCreateTime() + 10); - assertNotNull(bytime); - assertEquals(1, bytime.size()); - - String workflowName = "pending_count_test"; - String idBase = workflow.getWorkflowId(); - for(int i = 0; i < 10; i++) { - workflow.setWorkflowId("x" + i + idBase); - workflow.setCorrelationId("corr001"); - workflow.setStatus(WorkflowStatus.RUNNING); - workflow.setWorkflowType(workflowName); - dao.createWorkflow(workflow); - } - - List bycorrelationId = dao.getWorkflowsByCorrelationId("corr001", true); - assertNotNull(bycorrelationId); - assertEquals(10, bycorrelationId.size()); - - long count = dao.getPendingWorkflowCount(workflowName); - assertEquals(10, count); - - for(int i = 0; i < 10; i++) { - dao.removeFromPendingWorkflow(workflowName, "x" + i + idBase); - } - count = dao.getPendingWorkflowCount(workflowName); - assertEquals(0, count); - } + @Override + public MetadataDAO getMetadataDAO() { + return metadata; + } } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java index c015e412ab..b9df9ce0fe 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java @@ -1,18 +1,9 @@ package com.netflix.conductor.dao.mysql; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.core.execution.ApplicationException; -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; import org.apache.commons.lang3.builder.EqualsBuilder; import org.junit.Before; @@ -20,16 +11,27 @@ import org.junit.runner.RunWith; import org.junit.runners.JUnit4; +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + @SuppressWarnings("Duplicates") @RunWith(JUnit4.class) -public class MySQLMetadataDAOTest extends MySQLBaseDAOTest { +public class MySQLMetadataDAOTest { + private final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); private MySQLMetadataDAO dao; @Before public void setup() throws Exception { - dao = new MySQLMetadataDAO(objectMapper, dataSource, testConfiguration); - resetAllData(); + dao = new MySQLMetadataDAO(testUtil.getObjectMapper(), testUtil.getDataSource(), testUtil.getTestConfiguration()); + testUtil.resetAllData(); } @Test(expected=NullPointerException.class) @@ -68,7 +70,7 @@ public void testWorkflowDefOperations() throws Exception { assertEquals("test", all.get(0).getName()); assertEquals(1, all.get(0).getVersion()); - WorkflowDef found = dao.get("test", 1); + WorkflowDef found = dao.get("test", 1).get(); assertTrue(EqualsBuilder.reflectionEquals(def, found)); def.setVersion(2); @@ -80,7 +82,7 @@ public void testWorkflowDefOperations() throws Exception { assertEquals("test", all.get(0).getName()); assertEquals(1, all.get(0).getVersion()); - found = dao.getLatest(def.getName()); + found = dao.getLatest(def.getName()).get(); assertEquals(def.getName(), found.getName()); assertEquals(def.getVersion(), found.getVersion()); assertEquals(2, found.getVersion()); @@ -101,7 +103,7 @@ public void testWorkflowDefOperations() throws Exception { def.setDescription("updated"); dao.update(def); - found = dao.get(def.getName(), def.getVersion()); + found = dao.get(def.getName(), def.getVersion()).get(); assertEquals(def.getDescription(), found.getDescription()); List allnames = dao.findAll(); diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java index afb4930f47..5e597c9863 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java @@ -1,19 +1,9 @@ package com.netflix.conductor.dao.mysql; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - import com.google.common.collect.ImmutableList; + import com.netflix.conductor.core.events.queue.Message; -import java.sql.Connection; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; + import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -21,17 +11,31 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.sql.Connection; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + @SuppressWarnings("Duplicates") -public class MySQLQueueDAOTest extends MySQLBaseDAOTest { +public class MySQLQueueDAOTest { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLQueueDAOTest.class); + private final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); private MySQLQueueDAO dao; @Before public void setup() throws Exception { - dao = new MySQLQueueDAO(objectMapper, dataSource); - resetAllData(); + dao = new MySQLQueueDAO(testUtil.getObjectMapper(), testUtil.getDataSource()); + testUtil.resetAllData(); } @Rule @@ -145,9 +149,9 @@ public void pollMessagesTest() { // Assert that our un-popped messages match our expected size final long expectedSize = totalSize - firstPollSize - secondPollSize; - try(Connection c = dataSource.getConnection()) { + try(Connection c = testUtil.getDataSource().getConnection()) { String UNPOPPED = "SELECT COUNT(*) FROM queue_message WHERE queue_name = ? AND popped = false"; - try(Query q = new Query(objectMapper, c, UNPOPPED)) { + try(Query q = new Query(testUtil.getObjectMapper(), c, UNPOPPED)) { long count = q.addParameter(queueName).executeCount(); assertEquals("Remaining queue size mismatch", expectedSize, count); } @@ -224,9 +228,9 @@ else if(i == 6 || i == 7){ // Assert that our un-popped messages match our expected size final long expectedSize = totalSize - firstPollSize - secondPollSize; - try(Connection c = dataSource.getConnection()) { + try(Connection c = testUtil.getDataSource().getConnection()) { String UNPOPPED = "SELECT COUNT(*) FROM queue_message WHERE queue_name = ? AND popped = false"; - try(Query q = new Query(objectMapper, c, UNPOPPED)) { + try(Query q = new Query(testUtil.getObjectMapper(), c, UNPOPPED)) { long count = q.addParameter(queueName).executeCount(); assertEquals("Remaining queue size mismatch", expectedSize, count); } diff --git a/redis-persistence/build.gradle b/redis-persistence/build.gradle index e6452d660f..920b695ed0 100644 --- a/redis-persistence/build.gradle +++ b/redis-persistence/build.gradle @@ -9,4 +9,6 @@ dependencies { //In memory compile "org.rarefiedredis.redis:redis-java:${revRarefiedRedis}" + + testCompile project(':conductor-core').sourceSets.test.output } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index f7f381834a..be78b548a1 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -15,10 +15,11 @@ */ package com.netflix.conductor.dao.dynomite; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.inject.Singleton; + +import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.PollData; @@ -37,12 +38,12 @@ import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dyno.DynoProxy; import com.netflix.conductor.metrics.Monitors; + import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; @@ -57,6 +58,8 @@ import java.util.Set; import java.util.stream.Collectors; +import javax.inject.Inject; + @Singleton @Trace public class RedisExecutionDAO extends BaseDynoDAO implements ExecutionDAO { @@ -146,11 +149,7 @@ public List createTasks(List tasks) { List created = new LinkedList(); for (Task task : tasks) { - - Preconditions.checkNotNull(task, "task object cannot be null"); - Preconditions.checkNotNull(task.getTaskId(), "Task id cannot be null"); - Preconditions.checkNotNull(task.getWorkflowInstanceId(), "Workflow instance id cannot be null"); - Preconditions.checkNotNull(task.getReferenceTaskName(), "Task reference name cannot be null"); + validate(task); recordRedisDaoRequests("createTask", task.getTaskType(), task.getWorkflowType()); @@ -719,4 +718,20 @@ public List getPollData(String taskDefName) { } return pollData; } + + /** + * + * @param task + * @throws ApplicationException + */ + private void validate(Task task) { + try { + Preconditions.checkNotNull(task, "task object cannot be null"); + Preconditions.checkNotNull(task.getTaskId(), "Task id cannot be null"); + Preconditions.checkNotNull(task.getWorkflowInstanceId(), "Workflow instance id cannot be null"); + Preconditions.checkNotNull(task.getReferenceTaskName(), "Task reference name cannot be null"); + } catch (NullPointerException npe){ + throw new ApplicationException(Code.INVALID_INPUT, npe.getMessage(), npe); + } + } } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java index f127096923..ef34dac9b9 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAO.java @@ -15,21 +15,10 @@ */ package com.netflix.conductor.dao.dynomite; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import javax.inject.Inject; - -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.inject.Singleton; + +import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -41,6 +30,19 @@ import com.netflix.conductor.dyno.DynoProxy; import com.netflix.conductor.metrics.Monitors; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.Executors; +import java.util.concurrent.TimeUnit; + +import javax.inject.Inject; + @Singleton @Trace public class RedisMetadataDAO extends BaseDynoDAO implements MetadataDAO { @@ -170,7 +172,7 @@ public void update(WorkflowDef def) { } @Override - public WorkflowDef getLatest(String name) { + public Optional getLatest(String name) { Preconditions.checkNotNull(name, "WorkflowDef name cannot be null"); WorkflowDef def = null; @@ -181,7 +183,7 @@ public WorkflowDef getLatest(String name) { recordRedisDaoPayloadSize("getWorkflowDef", workflowDefJsonString.length(), "n/a", def.getName()); } - return def; + return Optional.ofNullable(def); } public List getAllVersions(String name) { @@ -205,7 +207,7 @@ public List getAllVersions(String name) { } @Override - public WorkflowDef get(String name, int version) { + public Optional get(String name, int version) { Preconditions.checkNotNull(name, "WorkflowDef name cannot be null"); WorkflowDef def = null; @@ -215,7 +217,7 @@ public WorkflowDef get(String name, int version) { def = readValue(workflowDefJsonString, WorkflowDef.class); recordRedisDaoPayloadSize("getWorkflowDef", workflowDefJsonString.length(), "n/a", name); } - return def; + return Optional.ofNullable(def); } @Override diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java index 37ffa60fd9..8cdb9f6497 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java @@ -1,17 +1,14 @@ /** * Copyright 2016 Netflix, Inc. * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. */ /** * @@ -19,466 +16,103 @@ package com.netflix.conductor.dao.dynomite; import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; -import com.netflix.conductor.common.utils.JsonMapperProvider; +import com.netflix.conductor.dao.ExecutionDAO; +import com.netflix.conductor.dao.ExecutionDAOTest; import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.redis.JedisMock; import com.netflix.conductor.dyno.DynoProxy; -import org.apache.commons.lang.builder.EqualsBuilder; import org.junit.Before; -import org.junit.Rule; import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; -import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; import redis.clients.jedis.JedisCommands; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; /** * @author Viren * */ @RunWith(MockitoJUnitRunner.class) -public class RedisExecutionDAOTest { +public class RedisExecutionDAOTest extends ExecutionDAOTest { - private RedisMetadataDAO metadataDAO; + private RedisMetadataDAO metadataDAO; - private RedisExecutionDAO executionDAO; + private RedisExecutionDAO executionDAO; @Mock private IndexDAO indexDAO; private static ObjectMapper objectMapper = new JsonMapperProvider().get(); - @SuppressWarnings("unchecked") - @Before - public void init() throws Exception { - Configuration config = new TestConfiguration(); - JedisCommands jedisMock = new JedisMock(); - DynoProxy dynoClient = new DynoProxy(jedisMock); - - metadataDAO = new RedisMetadataDAO(dynoClient, objectMapper, config); - executionDAO = new RedisExecutionDAO(dynoClient, objectMapper, indexDAO, metadataDAO, config); - - // Ignore indexing in Redis tests. - doNothing().when(indexDAO).indexTask(any(Task.class)); - } - - @Rule - public ExpectedException expected = ExpectedException.none(); - - @Test - public void testTaskExceedsLimit() throws Exception { - - TaskDef def = new TaskDef(); - def.setName("task1"); - def.setConcurrentExecLimit(1); - metadataDAO.createTaskDef(def); - - List tasks = new LinkedList<>(); - for(int i = 0; i < 15; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t_" + i); - task.setWorkflowInstanceId("workflow_" + i); - task.setReferenceTaskName("task1"); - task.setTaskDefName("task1"); - tasks.add(task); - task.setStatus(Status.SCHEDULED); - } - - executionDAO.createTasks(tasks); - assertFalse(executionDAO.exceedsInProgressLimit(tasks.get(0))); - tasks.get(0).setStatus(Status.IN_PROGRESS); - executionDAO.updateTask(tasks.get(0)); - - for(Task task : tasks) { - assertTrue(executionDAO.exceedsInProgressLimit(task)); - } - - } - @Test - public void testCreateTaskException() throws Exception { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t1"); - task.setTaskDefName("task1"); - expected.expect(NullPointerException.class); - expected.expectMessage("Workflow instance id cannot be null"); - executionDAO.createTasks(Arrays.asList(task)); - - task.setWorkflowInstanceId("wfid"); - expected.expect(NullPointerException.class); - expected.expectMessage("Task reference name cannot be nullss"); - executionDAO.createTasks(Arrays.asList(task)); - - } - - @Test - public void testCreateTaskException2() throws Exception { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t1"); - task.setTaskDefName("task1"); - task.setWorkflowInstanceId("wfid"); - expected.expect(NullPointerException.class); - expected.expectMessage("Task reference name cannot be null"); - executionDAO.createTasks(Arrays.asList(task)); - } - - @Test - public void testPollData() throws Exception { - executionDAO.updateLastPoll("taskDef", null, "workerId1"); - PollData pd = executionDAO.getPollData("taskDef", null); - assertNotNull(pd); - assertTrue(pd.getLastPollTime() > 0); - assertEquals(pd.getQueueName(), "taskDef"); - assertEquals(pd.getDomain(), null); - assertEquals(pd.getWorkerId(), "workerId1"); - - executionDAO.updateLastPoll("taskDef", "domain1", "workerId1"); - pd = executionDAO.getPollData("taskDef", "domain1"); - assertNotNull(pd); - assertTrue(pd.getLastPollTime() > 0); - assertEquals(pd.getQueueName(), "taskDef"); - assertEquals(pd.getDomain(), "domain1"); - assertEquals(pd.getWorkerId(), "workerId1"); - - List pData = executionDAO.getPollData("taskDef"); - assertEquals(pData.size(), 2); - - pd = executionDAO.getPollData("taskDef", "domain2"); - assertTrue(pd == null); - } - - @Test - public void testTaskCreateDups() throws Exception { - List tasks = new LinkedList<>(); - String workflowId = UUID.randomUUID().toString(); - - for(int i = 0; i < 3; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + i); - task.setReferenceTaskName("t" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + i); - task.setStatus(Task.Status.IN_PROGRESS); - tasks.add(task); - } - - //Let's insert a retried task - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + 2); - task.setReferenceTaskName("t" + 2); - task.setRetryCount(1); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + 2); - task.setStatus(Task.Status.IN_PROGRESS); - tasks.add(task); - - //Duplicate task! - task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + 1); - task.setReferenceTaskName("t" + 1); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("task" + 1); - task.setStatus(Task.Status.IN_PROGRESS); - tasks.add(task); - - List created = executionDAO.createTasks(tasks); - assertEquals(tasks.size()-1, created.size()); //1 less - - Set srcIds = tasks.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()).collect(Collectors.toSet()); - Set createdIds = created.stream().map(t -> t.getReferenceTaskName() + "." + t.getRetryCount()).collect(Collectors.toSet()); - - assertEquals(srcIds, createdIds); - - List pending = executionDAO.getPendingTasksByWorkflow("task0", workflowId); - assertNotNull(pending); - assertEquals(1, pending.size()); - assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), pending.get(0))); - - List found = executionDAO.getTasks(tasks.get(0).getTaskDefName(), null, 1); - assertNotNull(found); - assertEquals(1, found.size()); - assertTrue(EqualsBuilder.reflectionEquals(tasks.get(0), found.get(0))); - } - - @Test - public void testTaskOps() throws Exception { - List tasks = new LinkedList<>(); - String workflowId = UUID.randomUUID().toString(); - - for(int i = 0; i < 3; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId(workflowId + "_t" + i); - task.setReferenceTaskName("testTaskOps" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId(workflowId); - task.setTaskDefName("testTaskOps" + i); - task.setStatus(Task.Status.IN_PROGRESS); - tasks.add(task); - } - - for(int i = 0; i < 3; i++) { - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("x" + workflowId + "_t" + i); - task.setReferenceTaskName("testTaskOps" + i); - task.setRetryCount(0); - task.setWorkflowInstanceId("x" + workflowId); - task.setTaskDefName("testTaskOps" + i); - task.setStatus(Task.Status.IN_PROGRESS); - executionDAO.createTasks(Arrays.asList(task)); - } - - - List created = executionDAO.createTasks(tasks); - assertEquals(tasks.size(), created.size()); - - List pending = executionDAO.getPendingTasksForTaskType(tasks.get(0).getTaskDefName()); - assertNotNull(pending); - assertEquals(2, pending.size()); - //Pending list can come in any order. finding the one we are looking for and then comparing - Task matching = pending.stream().filter(task -> task.getTaskId().equals(tasks.get(0).getTaskId())).findAny().get(); - assertTrue(EqualsBuilder.reflectionEquals(matching, tasks.get(0))); - - List update = new LinkedList<>(); - for(int i = 0; i < 3; i++) { - Task found = executionDAO.getTask(workflowId + "_t" + i); - assertNotNull(found); - found.getOutputData().put("updated", true); - found.setStatus(Task.Status.COMPLETED); - update.add(found); - } - executionDAO.updateTasks(update); - - List taskIds = tasks.stream().map(Task::getTaskId).collect(Collectors.toList()); - List found = executionDAO.getTasks(taskIds); - assertEquals(taskIds.size(), found.size()); - found.forEach(task -> { - assertTrue(task.getOutputData().containsKey("updated")); - assertEquals(true, task.getOutputData().get("updated")); - executionDAO.removeTask(task.getTaskId()); - }); - - found = executionDAO.getTasks(taskIds); - assertTrue(found.isEmpty()); - } - - @Test - public void test() throws Exception { - Workflow workflow = new Workflow(); - workflow.setCorrelationId("correlationX"); - workflow.setCreatedBy("junit_tester"); - workflow.setEndTime(200L); - - Map input = new HashMap<>(); - input.put("param1", "param1 value"); - input.put("param2", 100); - workflow.setInput(input); - - Map output = new HashMap<>(); - output.put("ouput1", "output 1 value"); - output.put("op2", 300); - workflow.setOutput(output); - - workflow.setOwnerApp("workflow"); - workflow.setParentWorkflowId("parentWorkflowId"); - workflow.setParentWorkflowTaskId("parentWFTaskId"); - workflow.setReasonForIncompletion("missing recipe"); - workflow.setReRunFromWorkflowId("re-run from id1"); - workflow.setSchemaVersion(2); - workflow.setStartTime(90L); - workflow.setStatus(WorkflowStatus.FAILED); - workflow.setWorkflowId("workflow0"); - - List tasks = new LinkedList<>(); - - Task task = new Task(); - task.setScheduledTime(1L); - task.setSeq(1); - task.setTaskId("t1"); - task.setReferenceTaskName("t1"); - task.setWorkflowInstanceId(workflow.getWorkflowId()); - task.setTaskDefName("task1"); - - Task task2 = new Task(); - task2.setScheduledTime(2L); - task2.setSeq(2); - task2.setTaskId("t2"); - task2.setReferenceTaskName("t2"); - task2.setWorkflowInstanceId(workflow.getWorkflowId()); - task2.setTaskDefName("task2"); - - Task task3 = new Task(); - task3.setScheduledTime(2L); - task3.setSeq(3); - task3.setTaskId("t3"); - task3.setReferenceTaskName("t3"); - task3.setWorkflowInstanceId(workflow.getWorkflowId()); - task3.setTaskDefName("task3"); - - tasks.add(task); - tasks.add(task2); - tasks.add(task3); - - workflow.setTasks(tasks); - - workflow.setUpdatedBy("junit_tester"); - workflow.setUpdateTime(800L); - workflow.setVersion(3); - //workflow.setWorkflowId("wf0001"); - workflow.setWorkflowType("Junit Workflow"); - - String workflowId = executionDAO.createWorkflow(workflow); - List created = executionDAO.createTasks(tasks); - assertEquals(tasks.size(), created.size()); - - Workflow workflowWithTasks = executionDAO.getWorkflow(workflow.getWorkflowId(), true); - assertEquals(workflowWithTasks.getWorkflowId(), workflowId); - assertTrue(!workflowWithTasks.getTasks().isEmpty()); - - assertEquals(workflow.getWorkflowId(), workflowId); - Workflow found = executionDAO.getWorkflow(workflowId, false); - assertTrue(found.getTasks().isEmpty()); - - workflow.getTasks().clear(); - assertTrue(EqualsBuilder.reflectionEquals(workflow, found)); - - workflow.getInput().put("updated", true); - executionDAO.updateWorkflow(workflow); - found = executionDAO.getWorkflow(workflowId); - assertNotNull(found); - assertTrue(found.getInput().containsKey("updated")); - assertEquals(true, found.getInput().get("updated")); - - List running = executionDAO.getRunningWorkflowIds(workflow.getWorkflowType()); - assertNotNull(running); - assertTrue(running.isEmpty()); - - workflow.setStatus(WorkflowStatus.RUNNING); - executionDAO.updateWorkflow(workflow); - - running = executionDAO.getRunningWorkflowIds(workflow.getWorkflowType()); - assertNotNull(running); - assertEquals(1, running.size()); - assertEquals(workflow.getWorkflowId(), running.get(0)); - - List pending = executionDAO.getPendingWorkflowsByType(workflow.getWorkflowType()); - assertNotNull(pending); - assertEquals(1, pending.size()); - assertEquals(3, pending.get(0).getTasks().size()); - pending.get(0).getTasks().clear(); - assertTrue(EqualsBuilder.reflectionEquals(workflow, pending.get(0))); - - workflow.setStatus(WorkflowStatus.COMPLETED); - executionDAO.updateWorkflow(workflow); - running = executionDAO.getRunningWorkflowIds(workflow.getWorkflowType()); - assertNotNull(running); - assertTrue(running.isEmpty()); - - List bytime = executionDAO.getWorkflowsByType(workflow.getWorkflowType(), System.currentTimeMillis(), System.currentTimeMillis()+100); - assertNotNull(bytime); - assertTrue(bytime.isEmpty()); - - bytime = executionDAO.getWorkflowsByType(workflow.getWorkflowType(), workflow.getCreateTime() - 10, workflow.getCreateTime() + 10); - assertNotNull(bytime); - assertEquals(1, bytime.size()); - - String workflowName = "pending_count_test"; - String idBase = workflow.getWorkflowId(); - for(int i = 0; i < 10; i++) { - workflow.setWorkflowId("x" + i + idBase); - workflow.setCorrelationId("corr001"); - workflow.setStatus(WorkflowStatus.RUNNING); - workflow.setWorkflowType(workflowName); - executionDAO.createWorkflow(workflow); - } - - /* - List bycorrelationId = executionDAO.getWorkflowsByCorrelationId("corr001"); - assertNotNull(bycorrelationId); - assertEquals(10, bycorrelationId.size()); - */ - long count = executionDAO.getPendingWorkflowCount(workflowName); - assertEquals(10, count); - - for(int i = 0; i < 10; i++) { - executionDAO.removeFromPendingWorkflow(workflowName, "x" + i + idBase); - } - count = executionDAO.getPendingWorkflowCount(workflowName); - assertEquals(0, count); - } - - @Test - @SuppressWarnings("unchecked") - public void testCorrelateTaskToWorkflowInDS() throws Exception { - String workflowId = "workflowId"; - String taskId = "taskId1"; - String taskDefName = "task1"; - - TaskDef def = new TaskDef(); - def.setName("task1"); - def.setConcurrentExecLimit(1); - metadataDAO.createTaskDef(def); - - Task task = new Task(); - task.setTaskId(taskId); - task.setWorkflowInstanceId(workflowId); - task.setReferenceTaskName("ref_name"); - task.setTaskDefName(taskDefName); - task.setTaskType(taskDefName); - task.setStatus(Status.IN_PROGRESS); - List tasks = executionDAO.createTasks(Collections.singletonList(task)); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - - executionDAO.correlateTaskToWorkflowInDS(taskId, workflowId); - tasks = executionDAO.getTasksForWorkflow(workflowId); - assertNotNull(tasks); - assertEquals(workflowId, tasks.get(0).getWorkflowInstanceId()); - assertEquals(taskId, tasks.get(0).getTaskId()); - } - + @SuppressWarnings("unchecked") + @Before + public void init() throws Exception { + Configuration config = new TestConfiguration(); + JedisCommands jedisMock = new JedisMock(); + DynoProxy dynoClient = new DynoProxy(jedisMock); + + metadataDAO = new RedisMetadataDAO(dynoClient, objectMapper, config); + executionDAO = new RedisExecutionDAO(dynoClient, objectMapper, mock(IndexDAO.class), metadataDAO, config); + + // Ignore indexing in Redis tests. + doNothing().when(indexDAO).indexTask(any(Task.class)); + } + + @Test + @SuppressWarnings("unchecked") + public void testCorrelateTaskToWorkflowInDS() throws Exception { + String workflowId = "workflowId"; + String taskId = "taskId1"; + String taskDefName = "task1"; + + TaskDef def = new TaskDef(); + def.setName("task1"); + def.setConcurrentExecLimit(1); + metadataDAO.createTaskDef(def); + + Task task = new Task(); + task.setTaskId(taskId); + task.setWorkflowInstanceId(workflowId); + task.setReferenceTaskName("ref_name"); + task.setTaskDefName(taskDefName); + task.setTaskType(taskDefName); + task.setStatus(Status.IN_PROGRESS); + List tasks = executionDAO.createTasks(Collections.singletonList(task)); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + + executionDAO.correlateTaskToWorkflowInDS(taskId, workflowId); + tasks = executionDAO.getTasksForWorkflow(workflowId); + assertNotNull(tasks); + assertEquals(workflowId, tasks.get(0).getWorkflowInstanceId()); + assertEquals(taskId, tasks.get(0).getTaskId()); + } + + @Override + protected ExecutionDAO getExecutionDAO() { + return executionDAO; + } + + @Override + protected MetadataDAO getMetadataDAO() { + return metadataDAO; + } } diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java index 7a71cb6dc7..abd70e0fb2 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisMetadataDAOTest.java @@ -18,21 +18,6 @@ */ package com.netflix.conductor.dao.dynomite; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; - -import java.util.Arrays; -import java.util.List; -import java.util.Set; -import java.util.UUID; -import java.util.stream.Collectors; - -import com.netflix.conductor.common.utils.JsonMapperProvider; -import org.apache.commons.lang.builder.EqualsBuilder; -import org.junit.Before; -import org.junit.Test; - import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.events.EventHandler.Action; @@ -42,14 +27,29 @@ import com.netflix.conductor.common.metadata.tasks.TaskDef.RetryLogic; import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.config.TestConfiguration; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.dao.redis.JedisMock; import com.netflix.conductor.dyno.DynoProxy; +import org.apache.commons.lang.builder.EqualsBuilder; +import org.junit.Before; +import org.junit.Test; + +import java.util.Arrays; +import java.util.List; +import java.util.Set; +import java.util.UUID; +import java.util.stream.Collectors; + import redis.clients.jedis.JedisCommands; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + /** * @author Viren * @@ -106,7 +106,7 @@ public void testWorkflowDefOperations() throws Exception { assertEquals("test", all.get(0).getName()); assertEquals(1, all.get(0).getVersion()); - WorkflowDef found = dao.get("test", 1); + WorkflowDef found = dao.get("test", 1).get(); assertTrue(EqualsBuilder.reflectionEquals(def, found)); def.setVersion(2); @@ -118,7 +118,7 @@ public void testWorkflowDefOperations() throws Exception { assertEquals("test", all.get(0).getName()); assertEquals(1, all.get(0).getVersion()); - found = dao.getLatest(def.getName()); + found = dao.getLatest(def.getName()).get(); assertEquals(def.getName(), found.getName()); assertEquals(def.getVersion(), found.getVersion()); assertEquals(2, found.getVersion()); @@ -139,7 +139,7 @@ public void testWorkflowDefOperations() throws Exception { def.setDescription("updated"); dao.update(def); - found = dao.get(def.getName(), def.getVersion()); + found = dao.get(def.getName(), def.getVersion()).get(); assertEquals(def.getDescription(), found.getDescription()); List allnames = dao.findAll(); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index 23bfd8e016..78d4e847f7 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -18,8 +18,9 @@ */ package com.netflix.conductor.tests.integration; -import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.util.concurrent.Uninterruptibles; + +import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; @@ -45,6 +46,7 @@ import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; import com.netflix.conductor.tests.utils.TestRunner; + import org.apache.commons.lang.StringUtils; import org.junit.After; import org.junit.Before; @@ -56,7 +58,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -71,6 +72,8 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import javax.inject.Inject; + import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -339,12 +342,10 @@ public void testWorkflowSchemaVersion() throws Exception { metadataService.updateWorkflowDef(ver1); metadataService.updateWorkflowDef(ver2); - WorkflowDef found = metadataService.getWorkflowDef(ver2.getName(), 1); - assertNotNull(found); + WorkflowDef found = metadataService.getWorkflowDef(ver2.getName(), 1).get(); assertEquals(2, found.getSchemaVersion()); - WorkflowDef found1 = metadataService.getWorkflowDef(ver1.getName(), 1); - assertNotNull(found1); + WorkflowDef found1 = metadataService.getWorkflowDef(ver1.getName(), 1).get(); assertEquals(1, found1.getSchemaVersion()); } @@ -1149,7 +1150,7 @@ public void testSimpleWorkflowFailureWithTerminalError() throws Exception { taskDef.setRetryCount(1); metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(found); Map outputParameters = found.getOutputParameters(); outputParameters.put("validationErrors", "${t1.output.ErrorMessage}"); @@ -1220,8 +1221,7 @@ public void testSimpleWorkflow() throws Exception { clearWorkflows(); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1"; Map input = new HashMap<>(); @@ -1478,8 +1478,7 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { clearWorkflows(); createWorkflowDefForDomain(); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); String correlationId = "unit_test_sw"; Map input = new HashMap(); @@ -1608,8 +1607,7 @@ public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { clearWorkflows(); createWorkflowDefForDomain(); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); String correlationId = "unit_test_sw"; Map input = new HashMap(); @@ -1735,8 +1733,7 @@ public void testLongRunning() throws Exception { clearWorkflows(); - WorkflowDef found = metadataService.getWorkflowDef(LONG_RUNNING, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LONG_RUNNING, 1).get(); String correlationId = "unit_test_1"; Map input = new HashMap(); @@ -1844,8 +1841,7 @@ public void testResetWorkflowInProgressTasks() throws Exception { clearWorkflows(); - WorkflowDef found = metadataService.getWorkflowDef(LONG_RUNNING, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LONG_RUNNING, 1).get(); String correlationId = "unit_test_1"; Map input = new HashMap(); @@ -1958,8 +1954,7 @@ public void testConcurrentWorkflowExecutions() throws Exception { int count = 3; - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_concurrrent"; Map input = new HashMap(); @@ -2163,8 +2158,7 @@ public void testRetries() throws Exception { taskDef.setRetryDelaySeconds(1); metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1"; Map input = new HashMap(); @@ -2231,8 +2225,7 @@ public void testRetries() throws Exception { @Test public void testSuccess() throws Exception { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); Map input = new HashMap(); @@ -2341,8 +2334,7 @@ public void testSuccess() throws Exception { @Test public void testDeciderUpdate() throws Exception { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); Map input = new HashMap(); @@ -2373,8 +2365,7 @@ public void testDeciderUpdate() throws Exception { //Ignore for now, will improve this in the future public void testFailurePoints() throws Exception { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); Map input = new HashMap(); @@ -2448,8 +2439,7 @@ public void testDeciderMix() throws Exception { ExecutorService executors = Executors.newFixedThreadPool(3); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); Map input = new HashMap(); @@ -2588,16 +2578,14 @@ public void testDeciderMix() throws Exception { @Test public void testFailures() throws Exception { - WorkflowDef errorWorkflow = metadataService.getWorkflowDef(FORK_JOIN_WF, 1); - assertNotNull("Error workflow is not defined", errorWorkflow); + metadataService.getWorkflowDef(FORK_JOIN_WF, 1).get(); String taskName = "junit_task_1"; TaskDef taskDef = metadataService.getTaskDef(taskName); taskDef.setRetryCount(0); metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(found.getFailureWorkflow()); assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); @@ -2692,8 +2680,7 @@ public void testRetry() throws Exception { taskDef.setRetryDelaySeconds(0); metadataService.updateTaskDef(taskDef); - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(workflowDef); + WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(workflowDef.getFailureWorkflow()); assertFalse(StringUtils.isBlank(workflowDef.getFailureWorkflow())); @@ -2869,8 +2856,7 @@ public void testRestart() throws Exception { taskDef.setRetryCount(0); metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(found.getFailureWorkflow()); assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); @@ -2931,8 +2917,7 @@ public void testTimeout() throws Exception { taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(found.getFailureWorkflow()); assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); @@ -3004,8 +2989,7 @@ public void testTimeout() throws Exception { @Test public void testReruns() throws Exception { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); Map input = new HashMap(); @@ -3152,8 +3136,7 @@ public void testTaskSkipping() throws Exception { metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(TEST_WORKFLOW_NAME_3, 1); - assertNotNull(found); + metadataService.getWorkflowDef(TEST_WORKFLOW_NAME_3, 1).get(); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); Map input = new HashMap(); @@ -3223,8 +3206,7 @@ public void testTaskSkipping() throws Exception { @Test public void testPauseResume() throws Exception { - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); - assertNotNull(found); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); String correlationId = "unit_test_1" + System.nanoTime(); Map input = new HashMap(); @@ -3335,8 +3317,7 @@ public void testPauseResume() throws Exception { public void testSubWorkflow() throws Exception { createSubWorkflow(); - WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - assertNotNull(found); + metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); Map input = new HashMap<>(); input.put("param1", "param 1 value"); input.put("param3", "param 2 value"); @@ -3402,8 +3383,7 @@ public void testSubWorkflowFailure() throws Exception { createSubWorkflow(); - WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - assertNotNull(found); + metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); Map input = new HashMap<>(); input.put("param1", "param 1 value"); @@ -3467,7 +3447,7 @@ public void testSubWorkflowFailureInverse() throws Exception { createSubWorkflow(); - WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); + WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); assertNotNull(found); Map input = new HashMap<>(); input.put("param1", "param 1 value"); @@ -3896,8 +3876,7 @@ private String runWorkflowWithSubworkflow() throws Exception { clearWorkflows(); createWorkflowDefForDomain(); - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); - assertNotNull(workflowDef); + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); String correlationId = "unit_test_sw"; Map input = new HashMap<>(); From 57a1c438281800f010665da5059d0261714dabde Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Fri, 13 Jul 2018 10:23:28 +0200 Subject: [PATCH 093/163] Fix issues that happened when upstream was merged. --- .../conductor/core/execution/WorkflowExecutor.java | 7 +++++++ .../conductor/core/execution/TestDeciderOutcomes.java | 8 +++++--- .../conductor/tests/integration/WorkflowServiceTest.java | 4 ++-- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index e03e0ae225..596954990c 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -648,6 +648,13 @@ public void updateTask(TaskResult taskResult) { //In case of a FAILED_WITH_TERMINAL_ERROR the workflow will be terminated and the output of the task is never copied //ensuring the task output is copied to the workflow here if (FAILED_WITH_TERMINAL_ERROR.equals(task.getStatus())) { + //Update the task in the workflow instance + Task taskByRefName = workflowInstance.getTaskByRefName(task.getReferenceTaskName()); + taskByRefName.setStatus(task.getStatus()); + taskByRefName.setOutputData(task.getOutputData()); + taskByRefName.setReasonForIncompletion(task.getReasonForIncompletion()); + taskByRefName.setWorkerId(task.getWorkerId()); + taskByRefName.setCallbackAfterSeconds(task.getCallbackAfterSeconds()); WorkflowDef workflowDef = workflowInstance.getWorkflowDefinition(); Map outputData = task.getOutputData(); if (!workflowDef.getOutputParameters().isEmpty()) { diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java index 629bf12dde..c34316c299 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java @@ -81,9 +81,11 @@ public class TestDeciderOutcomes { public void init() throws Exception { MetadataDAO metadataDAO = mock(MetadataDAO.class); - TaskDef td = new TaskDef(); - td.setRetryCount(1); - when(metadataDAO.getTaskDef(any())).thenReturn(td); + TaskDef taskDef = new TaskDef(); + taskDef.setRetryCount(1); + taskDef.setName("mockTaskDef"); + taskDef.setResponseTimeoutSeconds(0); + when(metadataDAO.getTaskDef(any())).thenReturn(taskDef); ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index 78d4e847f7..f7819bca2a 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -2756,7 +2756,7 @@ public void testNonRestartartableWorkflows() throws Exception { taskDef.setRetryCount(0); metadataService.updateTaskDef(taskDef); - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); found.setName(JUNIT_TEST_WF_NON_RESTARTABLE); found.setRestartable(false); metadataService.updateWorkflowDef(found); @@ -3581,7 +3581,7 @@ public void testEventWorkflow() throws Exception { @Test public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); + WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(workflowDef); String workflowId = workflowExecutor.startWorkflow(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); From bf83cffc742864055ad493302f17e9006330d9da Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 19 Jul 2018 16:20:37 -0700 Subject: [PATCH 094/163] Adding support to the IndexDAO for initialization tasks - On an embedded ElasticSearch instance, start of the instance is necessary before accessing the index --- .../com/netflix/conductor/dao/IndexDAO.java | 313 +++++++++--------- .../dao/es5/index/ElasticSearchDAOV5.java | 76 +++-- 2 files changed, 206 insertions(+), 183 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java index 632040c75d..fd9e363269 100644 --- a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java +++ b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java @@ -1,12 +1,12 @@ /** * Copyright 2016 Netflix, Inc. - * + *

* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + *

+ * http://www.apache.org/licenses/LICENSE-2.0 + *

* Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -26,159 +26,166 @@ import java.util.concurrent.CompletableFuture; /** - * + * * @author Viren * DAO to index the workflow and task details for searching. */ public interface IndexDAO { - /** - * This method should return an unique identifier of the indexed doc - * @param workflow Workflow to be indexed - * - */ - void indexWorkflow(Workflow workflow); - - /** - * - * /** - * This method should return an unique identifier of the indexed doc - * @param workflow Workflow to be indexed - * @return CompletableFuture of type void - */ - CompletableFuture asyncIndexWorkflow(Workflow workflow); - - /** - * @param task Task to be indexed - */ - void indexTask(Task task); - - /** - * - * @param task Task to be indexed asynchronously - * @return CompletableFuture of type void - */ - CompletableFuture asyncIndexTask(Task task); - - /** - * - * @param query SQL like query for workflow search parameters. - * @param freeText Additional query in free text. Lucene syntax - * @param start start start index for pagination - * @param count count # of workflow ids to be returned - * @param sort sort options - * @return List of workflow ids for the matching query - */ - SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort); - - - /** - * - * @param query SQL like query for task search parameters. - * @param freeText Additional query in free text. Lucene syntax - * @param start start start index for pagination - * @param count count # of task ids to be returned - * @param sort sort options - * @return List of workflow ids for the matching query - */ - SearchResult searchTasks(String query, String freeText, int start, int count, List sort); - - /** - * Remove the workflow index - * @param workflowId workflow to be removed - */ - void removeWorkflow(String workflowId); - - /** - * Remove the workflow index - * @param workflowId workflow to be removed - * @return CompletableFuture of type void - */ - CompletableFuture asyncRemoveWorkflow(String workflowId); - - - /** - * - * Updates the index - * @param workflowInstanceId id of the workflow - * @param keys keys to be updated - * @param values values. Number of keys and values MUST match. - */ - void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values); - - /** - * Updates the index - * @param workflowInstanceId id of the workflow - * @param keys keys to be updated - * @param values values. Number of keys and values MUST match. - * @return CompletableFuture of type void - */ - CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values); - - - /** - * Retrieves a specific field from the index - * @param workflowInstanceId id of the workflow - * @param key field to be retrieved - * @return value of the field as string - */ - String get(String workflowInstanceId, String key); - - /** - * @param logs Task Execution logs to be indexed - */ - void addTaskExecutionLogs(List logs); - - /** - * - * @param logs Task Execution logs to be indexed - * @return CompletableFuture of type void - */ - CompletableFuture asyncAddTaskExecutionLogs(List logs); - - /** - * - * @param taskId Id of the task for which to fetch the execution logs - * @return Returns the task execution logs for given task id - */ - List getTaskExecutionLogs(String taskId); - - - /** - * @param eventExecution Event Execution to be indexed - */ - void addEventExecution(EventExecution eventExecution); - - - /** - * - * @param eventExecution Event Execution to be indexed - * @return CompletableFuture of type void - */ - CompletableFuture asyncAddEventExecution(EventExecution eventExecution); - - /** - * Adds an incoming external message into the index - * @param queue Name of the registered queue - * @param msg Message - */ - void addMessage(String queue, Message msg); - - /** - * Search for Workflows completed or failed beyond archiveTtlDays - * @param indexName Name of the index to search - * @param archiveTtlDays Archival Time to Live - * @return List of worlflow Ids matching the pattern - */ - List searchArchivableWorkflows(String indexName, long archiveTtlDays); - - /** - * Search for RUNNING workflows changed in the last lastModifiedHoursAgoFrom to lastModifiedHoursAgoTo hours - * @param lastModifiedHoursAgoFrom - last updated date should be lastModifiedHoursAgoFrom hours ago or later - * @param lastModifiedHoursAgoTo - last updated date should be lastModifiedHoursAgoTo hours ago or earlier - * * - * @return List of workflow Ids matching the pattern - */ - List searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, int lastModifiedHoursAgoTo); + /** + * This method should return an unique identifier of the indexed doc + * @param workflow Workflow to be indexed + * + */ + void setup() throws Exception; + + /** + * This method should return an unique identifier of the indexed doc + * @param workflow Workflow to be indexed + * + */ + void indexWorkflow(Workflow workflow); + + /** + * + * /** + * This method should return an unique identifier of the indexed doc + * @param workflow Workflow to be indexed + * @return CompletableFuture of type void + */ + CompletableFuture asyncIndexWorkflow(Workflow workflow); + + /** + * @param task Task to be indexed + */ + void indexTask(Task task); + + /** + * + * @param task Task to be indexed asynchronously + * @return CompletableFuture of type void + */ + CompletableFuture asyncIndexTask(Task task); + + /** + * + * @param query SQL like query for workflow search parameters. + * @param freeText Additional query in free text. Lucene syntax + * @param start start start index for pagination + * @param count count # of workflow ids to be returned + * @param sort sort options + * @return List of workflow ids for the matching query + */ + SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort); + + + /** + * + * @param query SQL like query for task search parameters. + * @param freeText Additional query in free text. Lucene syntax + * @param start start start index for pagination + * @param count count # of task ids to be returned + * @param sort sort options + * @return List of workflow ids for the matching query + */ + SearchResult searchTasks(String query, String freeText, int start, int count, List sort); + + /** + * Remove the workflow index + * @param workflowId workflow to be removed + */ + void removeWorkflow(String workflowId); + + /** + * Remove the workflow index + * @param workflowId workflow to be removed + * @return CompletableFuture of type void + */ + CompletableFuture asyncRemoveWorkflow(String workflowId); + + + /** + * + * Updates the index + * @param workflowInstanceId id of the workflow + * @param keys keys to be updated + * @param values values. Number of keys and values MUST match. + */ + void updateWorkflow(String workflowInstanceId, String[] keys, Object[] values); + + /** + * Updates the index + * @param workflowInstanceId id of the workflow + * @param keys keys to be updated + * @param values values. Number of keys and values MUST match. + * @return CompletableFuture of type void + */ + CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values); + + + /** + * Retrieves a specific field from the index + * @param workflowInstanceId id of the workflow + * @param key field to be retrieved + * @return value of the field as string + */ + String get(String workflowInstanceId, String key); + + /** + * @param logs Task Execution logs to be indexed + */ + void addTaskExecutionLogs(List logs); + + /** + * + * @param logs Task Execution logs to be indexed + * @return CompletableFuture of type void + */ + CompletableFuture asyncAddTaskExecutionLogs(List logs); + + /** + * + * @param taskId Id of the task for which to fetch the execution logs + * @return Returns the task execution logs for given task id + */ + List getTaskExecutionLogs(String taskId); + + + /** + * @param eventExecution Event Execution to be indexed + */ + void addEventExecution(EventExecution eventExecution); + + + /** + * + * @param eventExecution Event Execution to be indexed + * @return CompletableFuture of type void + */ + CompletableFuture asyncAddEventExecution(EventExecution eventExecution); + + /** + * Adds an incoming external message into the index + * @param queue Name of the registered queue + * @param msg Message + */ + void addMessage(String queue, Message msg); + + /** + * Search for Workflows completed or failed beyond archiveTtlDays + * @param indexName Name of the index to search + * @param archiveTtlDays Archival Time to Live + * @return List of worlflow Ids matching the pattern + */ + List searchArchivableWorkflows(String indexName, long archiveTtlDays); + + /** + * Search for RUNNING workflows changed in the last lastModifiedHoursAgoFrom to lastModifiedHoursAgoTo hours + * @param lastModifiedHoursAgoFrom - last updated date should be lastModifiedHoursAgoFrom hours ago or later + * @param lastModifiedHoursAgoTo - last updated date should be lastModifiedHoursAgoTo hours ago or earlier + * * + * @return List of workflow Ids matching the pattern + */ + List searchRecentRunningWorkflows(int lastModifiedHoursAgoFrom, int lastModifiedHoursAgoTo); } \ No newline at end of file diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java index 097e94aef7..0805515ae2 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java @@ -31,6 +31,7 @@ import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.es5.index.query.parser.Expression; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.query.parser.ParserException; import com.netflix.conductor.metrics.Monitors; @@ -121,7 +122,6 @@ public class ElasticSearchDAOV5 implements IndexDAO { private Client elasticSearchClient; - private static final TimeZone GMT = TimeZone.getTimeZone("GMT"); private static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyyMMww"); @@ -133,20 +133,11 @@ public class ElasticSearchDAOV5 implements IndexDAO { } @Inject - public ElasticSearchDAOV5(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { + public ElasticSearchDAOV5(Client elasticSearchClient, ElasticSearchConfiguration config, ObjectMapper objectMapper) { this.objectMapper = objectMapper; this.elasticSearchClient = elasticSearchClient; - this.indexName = config.getProperty("workflow.elasticsearch.index.name", null); - - try { - - initIndex(); - updateIndexName(config); - Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> updateIndexName(config), 0, 1, TimeUnit.HOURS); - - } catch (Exception e) { - logger.error(e.getMessage(), e); - } + this.indexName = config.getIndexName(); + this.logIndexPrefix = config.getProperty("workflow.elasticsearch.tasklog.index.name", "task_log"); int corePoolSize = 6; int maximumPoolSize = 12; @@ -156,10 +147,18 @@ public ElasticSearchDAOV5(Client elasticSearchClient, Configuration config, Obje keepAliveTime, TimeUnit.MINUTES, new LinkedBlockingQueue<>()); + + /* + try { + this.setup(); + }catch(Exception e){ + e.printStackTrace(); + logger.debug("Got into problems with the setup"); + } + */ } - private void updateIndexName(Configuration config) { - this.logIndexPrefix = config.getProperty("workflow.elasticsearch.tasklog.index.name", "task_log"); + private void updateIndexName() { this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); try { @@ -175,23 +174,19 @@ private void updateIndexName(Configuration config) { } } - /** - * Initializes the index with required templates and mappings. - */ - private void initIndex() throws Exception { + @Override + public void setup() throws Exception { - //0. Add the index template - GetIndexTemplatesResponse result = elasticSearchClient.admin().indices().prepareGetTemplates("wfe_template").execute().actionGet(); - if (result.getIndexTemplates().isEmpty()) { - logger.info("Creating the index template 'wfe_template'"); - InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/template.json"); - byte[] templateSource = IOUtils.toByteArray(stream); + elasticSearchClient.admin().cluster().prepareHealth().setWaitForGreenStatus() + .execute().get(); + try { - try { - elasticSearchClient.admin().indices().preparePutTemplate("wfe_template").setSource(templateSource, XContentType.JSON).execute().actionGet(); - } catch (Exception e) { - logger.error("Failed to init index template", e); - } + initIndex(); + updateIndexName(); + Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> updateIndexName(), 0, 1, TimeUnit.HOURS); + + } catch (Exception e) { + logger.error(e.getMessage(), e); } //1. Create the required index @@ -219,6 +214,27 @@ private void initIndex() throws Exception { } } + /** + * Initializes the index with required templates and mappings. + */ + private void initIndex() throws Exception { + + //0. Add the index template + GetIndexTemplatesResponse result = elasticSearchClient.admin().indices().prepareGetTemplates("wfe_template").execute().actionGet(); + if (result.getIndexTemplates().isEmpty()) { + logger.info("Creating the index template 'wfe_template'"); + InputStream stream = ElasticSearchDAOV5.class.getResourceAsStream("/template.json"); + byte[] templateSource = IOUtils.toByteArray(stream); + + try { + elasticSearchClient.admin().indices().preparePutTemplate("wfe_template").setSource(templateSource, XContentType.JSON).execute().actionGet(); + } catch (Exception e) { + logger.error("Failed to init index template", e); + } + } + + } + @Override public void indexWorkflow(Workflow workflow) { try { From a3de66cd0a89c6e4a9dab4d81ab84861c0d93306 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 19 Jul 2018 16:28:11 -0700 Subject: [PATCH 095/163] Update Main class in order to support Embedded ES wait time - If Embedded ES option is chosen, the main class will wait for a EMBEDDED_ES_INIT_TIME amount of time --- .../com/netflix/conductor/bootstrap/Main.java | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java index 9cbd9d4d8c..b015435a02 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java @@ -18,6 +18,8 @@ import com.google.inject.Guice; import com.google.inject.Injector; +import com.netflix.conductor.dao.IndexDAO; +import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; import com.netflix.conductor.grpc.server.GRPCServerProvider; import com.netflix.conductor.jetty.server.JettyServerProvider; @@ -27,6 +29,7 @@ import java.io.File; import java.io.FileInputStream; import java.io.IOException; +import java.util.Optional; import java.util.Properties; /** @@ -34,6 +37,8 @@ */ public class Main { + private static final int EMBEDDED_ES_INIT_TIME = 5000; + public static void main(String[] args) throws Exception { loadConfigFile(args.length > 0 ? args[0] : System.getenv("CONDUCTOR_CONFIG_FILE")); @@ -47,14 +52,21 @@ public static void main(String[] args) throws Exception { ModulesProvider modulesProvider = bootstrapInjector.getInstance(ModulesProvider.class); Injector serverInjector = Guice.createInjector(modulesProvider.get()); - - serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().ifPresent(search -> { + Optional embeddedSearchInstance = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get(); + if (embeddedSearchInstance.isPresent()) { try { - search.start(); + embeddedSearchInstance.get().start(); + /* + * Elasticsearch embedded instance does not notify when it is up and ready to accept incoming requests. + * A possible solution for reading and writing into the index is to wait a specific amount of time. + */ + Thread.sleep(EMBEDDED_ES_INIT_TIME); } catch (Exception ioe) { System.exit(3); } - }); + } + serverInjector.getInstance(IndexDAO.class).setup(); + System.out.println("\n\n\n"); System.out.println(" _ _ "); From 0381767bdd064db0bd291e2db6f39296a2f6bcfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 19 Jul 2018 16:29:32 -0700 Subject: [PATCH 096/163] Updated MockIndexDao in order to support the new Index setup feature --- .../java/com/netflix/conductor/tests/utils/MockIndexDAO.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java index 6e94b9f2f4..c854b666a1 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/MockIndexDAO.java @@ -36,6 +36,10 @@ */ public class MockIndexDAO implements IndexDAO { + @Override + public void setup() { + } + @Override public void indexWorkflow(Workflow workflow) { } From 53423ce46082473cfe4357d9724b3e8a6be5ca3c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 19 Jul 2018 16:59:29 -0700 Subject: [PATCH 097/163] Updated comments and formatting for consistency with the current code --- .../com/netflix/conductor/dao/IndexDAO.java | 13 ++---- .../dao/es5/index/ElasticSearchDAOV5.java | 46 ++++++++----------- 2 files changed, 22 insertions(+), 37 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java index fd9e363269..a28973df8a 100644 --- a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java +++ b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java @@ -1,12 +1,12 @@ /** * Copyright 2016 Netflix, Inc. - *

+ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at - *

- * http://www.apache.org/licenses/LICENSE-2.0 - *

+ * + * http://www.apache.org/licenses/LICENSE-2.0 + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @@ -78,7 +78,6 @@ public interface IndexDAO { */ SearchResult searchWorkflows(String query, String freeText, int start, int count, List sort); - /** * * @param query SQL like query for task search parameters. @@ -103,7 +102,6 @@ public interface IndexDAO { */ CompletableFuture asyncRemoveWorkflow(String workflowId); - /** * * Updates the index @@ -122,7 +120,6 @@ public interface IndexDAO { */ CompletableFuture asyncUpdateWorkflow(String workflowInstanceId, String[] keys, Object[] values); - /** * Retrieves a specific field from the index * @param workflowInstanceId id of the workflow @@ -150,13 +147,11 @@ public interface IndexDAO { */ List getTaskExecutionLogs(String taskId); - /** * @param eventExecution Event Execution to be indexed */ void addEventExecution(EventExecution eventExecution); - /** * * @param eventExecution Event Execution to be indexed diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java index 0805515ae2..91580a1d40 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java @@ -25,7 +25,6 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.common.utils.RetryUtil; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.ApplicationException.Code; @@ -147,38 +146,13 @@ public ElasticSearchDAOV5(Client elasticSearchClient, ElasticSearchConfiguration keepAliveTime, TimeUnit.MINUTES, new LinkedBlockingQueue<>()); - - /* - try { - this.setup(); - }catch(Exception e){ - e.printStackTrace(); - logger.debug("Got into problems with the setup"); - } - */ - } - - private void updateIndexName() { - this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); - - try { - elasticSearchClient.admin().indices().prepareGetIndex().addIndices(logIndexName).execute().actionGet(); - } catch (IndexNotFoundException infe) { - try { - elasticSearchClient.admin().indices().prepareCreate(logIndexName).execute().actionGet(); - } catch (ResourceAlreadyExistsException ilee) { - - } catch (Exception e) { - logger.error("Failed to update log index name: {}", logIndexName, e); - } - } } @Override public void setup() throws Exception { - elasticSearchClient.admin().cluster().prepareHealth().setWaitForGreenStatus() - .execute().get(); + elasticSearchClient.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().get(); + try { initIndex(); @@ -235,6 +209,22 @@ private void initIndex() throws Exception { } + private void updateIndexName() { + this.logIndexName = this.logIndexPrefix + "_" + SIMPLE_DATE_FORMAT.format(new Date()); + + try { + elasticSearchClient.admin().indices().prepareGetIndex().addIndices(logIndexName).execute().actionGet(); + } catch (IndexNotFoundException infe) { + try { + elasticSearchClient.admin().indices().prepareCreate(logIndexName).execute().actionGet(); + } catch (ResourceAlreadyExistsException ilee) { + + } catch (Exception e) { + logger.error("Failed to update log index name: {}", logIndexName, e); + } + } + } + @Override public void indexWorkflow(Workflow workflow) { try { From 23336f21bea871670406877afb20b95948e7fca0 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Tue, 24 Jul 2018 14:15:00 +0200 Subject: [PATCH 098/163] Add basic REST healthcheck. --- jersey/build.gradle | 18 +++-- .../server/resources/HealthCheckResource.java | 31 ++++++++ server/build.gradle | 74 ++++++++++--------- .../conductor/server/ServerModule.java | 4 + versionsOfDependencies.gradle | 1 + 5 files changed, 84 insertions(+), 44 deletions(-) create mode 100644 jersey/src/main/java/com/netflix/conductor/server/resources/HealthCheckResource.java diff --git a/jersey/build.gradle b/jersey/build.gradle index b6f77bc1bd..87613a3363 100644 --- a/jersey/build.gradle +++ b/jersey/build.gradle @@ -1,11 +1,13 @@ dependencies { - compile project(':conductor-common') - compile project(':conductor-core') - - compile "javax.ws.rs:jsr311-api:${revJsr311Api}" - compile "io.swagger:swagger-jaxrs:${revSwagger}" - compile "com.sun.jersey:jersey-bundle:${revJerseyBundle}" - - provided "javax.servlet:javax.servlet-api:${revServletApi}" + compile project(':conductor-common') + compile project(':conductor-core') + + compile "com.netflix.runtime:health-api:${revHealth}" + + compile "javax.ws.rs:jsr311-api:${revJsr311Api}" + compile "io.swagger:swagger-jaxrs:${revSwagger}" + compile "com.sun.jersey:jersey-bundle:${revJerseyBundle}" + + provided "javax.servlet:javax.servlet-api:${revServletApi}" } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/HealthCheckResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/HealthCheckResource.java new file mode 100644 index 0000000000..c23e5d21c0 --- /dev/null +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/HealthCheckResource.java @@ -0,0 +1,31 @@ +package com.netflix.conductor.server.resources; + +import com.netflix.runtime.health.api.HealthCheckAggregator; +import com.netflix.runtime.health.api.HealthCheckStatus; + +import javax.inject.Inject; +import javax.inject.Singleton; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.core.MediaType; + +import io.swagger.annotations.Api; + +@Api(value = "/health", produces = MediaType.APPLICATION_JSON, tags = "Health Check") +@Path("/health") +@Produces({MediaType.APPLICATION_JSON}) +@Singleton +public class HealthCheckResource { + private final HealthCheckAggregator healthCheck; + + @Inject + public HealthCheckResource(HealthCheckAggregator healthCheck) { + this.healthCheck = healthCheck; + } + + @GET + public HealthCheckStatus doCheck() throws Exception { + return healthCheck.check().get(); + } +} diff --git a/server/build.gradle b/server/build.gradle index 49d99e7a99..c27dbf71c8 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -5,12 +5,12 @@ buildscript { } plugins { - id 'com.github.johnrengelman.shadow' version '2.0.4' + id 'com.github.johnrengelman.shadow' version '2.0.4' } configurations.all { resolutionStrategy { - force 'com.fasterxml.jackson.core:jackson-core:2.7.5' + force 'com.fasterxml.jackson.core:jackson-core:2.7.5' } } @@ -18,43 +18,45 @@ apply plugin: 'war' apply plugin: "org.akhikhl.gretty" dependencies { - - //Conductor - compile project(':conductor-core') - compile project(':conductor-jersey') - compile project(':conductor-redis-persistence') - compile project(':conductor-mysql-persistence') - compile project(':conductor-contribs') + + //Conductor + compile project(':conductor-core') + compile project(':conductor-jersey') + compile project(':conductor-redis-persistence') + compile project(':conductor-mysql-persistence') + compile project(':conductor-contribs') compile project(':conductor-es5-persistence') compile project(':conductor-grpc-server') - - //Jetty - compile "org.eclipse.jetty:jetty-server:${revJetteyServer}" - compile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" - - //Guice - compile "com.sun.jersey.contribs:jersey-guice:${revJerseyGuice}" - compile "com.google.inject:guice:${revGuice}" - compile "com.google.inject.extensions:guice-servlet:${revGuiceServlet}" - - //Swagger - compile "io.swagger:swagger-jersey-jaxrs:${revSwagger}" + + compile "com.netflix.runtime:health-guice:${revHealth}" + + //Jetty + compile "org.eclipse.jetty:jetty-server:${revJetteyServer}" + compile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" + + //Guice + compile "com.sun.jersey.contribs:jersey-guice:${revJerseyGuice}" + compile "com.google.inject:guice:${revGuice}" + compile "com.google.inject.extensions:guice-servlet:${revGuiceServlet}" + + //Swagger + compile "io.swagger:swagger-jersey-jaxrs:${revSwagger}" } shadowJar { - mergeServiceFiles() - configurations = [project.configurations.compile] - manifest { - attributes 'Description': 'Self contained Conductor server jar' - attributes 'Main-Class' : 'com.netflix.conductor.bootstrap.Main' - } + mergeServiceFiles() + configurations = [project.configurations.compile] + manifest { + attributes 'Description': 'Self contained Conductor server jar' + attributes 'Main-Class': 'com.netflix.conductor.bootstrap.Main' + } } publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar + publications { + nebula(MavenPublication) { + artifact shadowJar + } } - } } gretty { @@ -74,10 +76,10 @@ configurations.grettyRunnerTomcat8 { build.dependsOn('shadowJar') task server(type: JavaExec) { - systemProperty 'workflow.elasticsearch.url', 'localhost:9300' + systemProperty 'workflow.elasticsearch.url', 'localhost:9300' // Switch between Elasticsearch versions 2 & 5 with major version number. - systemProperty 'loadSample', 'true' - systemProperties System.properties - main = 'com.netflix.conductor.server.Main' - classpath = sourceSets.test.runtimeClasspath + systemProperty 'loadSample', 'true' + systemProperties System.properties + main = 'com.netflix.conductor.server.Main' + classpath = sourceSets.test.runtimeClasspath } diff --git a/server/src/main/java/com/netflix/conductor/server/ServerModule.java b/server/src/main/java/com/netflix/conductor/server/ServerModule.java index 175e4c9a0c..8e7c1f084c 100644 --- a/server/src/main/java/com/netflix/conductor/server/ServerModule.java +++ b/server/src/main/java/com/netflix/conductor/server/ServerModule.java @@ -19,12 +19,14 @@ import com.google.inject.AbstractModule; import com.google.inject.Scopes; +import com.netflix.archaius.guice.ArchaiusModule; import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; import com.netflix.conductor.dyno.SystemPropertiesDynomiteConfiguration; import com.netflix.conductor.grpc.server.GRPCModule; import com.netflix.conductor.jetty.server.JettyModule; +import com.netflix.runtime.health.guice.HealthModule; import java.util.concurrent.ExecutorService; @@ -37,6 +39,8 @@ public class ServerModule extends AbstractModule { @Override protected void configure() { install(new CoreModule()); + install(new ArchaiusModule()); + install(new HealthModule()); install(new JettyModule()); install(new GRPCModule()); diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index 35f2bce3f9..a738972d31 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -18,6 +18,7 @@ ext { revGuice = '4.1.0' revGuiceMultiBindings = '4.1.0' revGuiceServlet = '4.1.0' + revHealth = '1.1.+' revHikariCP = '2.6.3' revJsonPath = '2.2.0' revJaxrsJackson = '2.7.5' From 1c4a44eae57b8a1d28c4305981a114570576443c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 25 Jul 2018 18:01:20 -0700 Subject: [PATCH 099/163] Decoupling elastic search instance configuration from db namespace - Added property 'elasticsearch' so we are able to run an external or inmemory instance regardless of the option chosen for db --- .../conductor/core/config/Configuration.java | 15 +++++++++++++++ docker/server/config/config-local.properties | 8 ++++++++ docker/server/config/config-mysql-grpc.properties | 8 ++++++++ docker/server/config/config-mysql.properties | 8 ++++++++ docker/server/config/config.properties | 8 ++++++++ .../es5/EmbeddedElasticSearchV5Provider.java | 2 +- 6 files changed, 48 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index 920aeef36c..42138741f3 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -27,6 +27,9 @@ public interface Configuration { String DB_PROPERTY_NAME = "db"; String DB_DEFAULT_VALUE = "memory"; + String ELASTICSEARCH_PROPERTY_NAME = "elasticsearch"; + String ELASTICSEARCH_DEFAULT_VALUE = "memory"; + String SWEEP_FREQUENCY_PROPERTY_NAME = "decider.sweep.frequency.seconds"; int SWEEP_FREQUENCY_DEFAULT_VALUE = 30; @@ -66,6 +69,14 @@ default String getDBString() { return getProperty(DB_PROPERTY_NAME, DB_DEFAULT_VALUE).toUpperCase(); } + default ELASTICSEARCH getElasticSearchType() { + return ELASTICSEARCH.valueOf(getElasticSearchString()); + } + + default String getElasticSearchString() { + return getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_DEFAULT_VALUE).toUpperCase(); + } + /** * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. */ @@ -166,4 +177,8 @@ default List getAdditionalModules() { enum DB { REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL } + + enum ELASTICSEARCH { + MEMORY, EXTERNAL + } } diff --git a/docker/server/config/config-local.properties b/docker/server/config/config-local.properties index b59ea05a26..640f9c6ff4 100755 --- a/docker/server/config/config-local.properties +++ b/docker/server/config/config-local.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=false db=memory +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c diff --git a/docker/server/config/config-mysql-grpc.properties b/docker/server/config/config-mysql-grpc.properties index 57de7df2d3..651901077c 100755 --- a/docker/server/config/config-mysql-grpc.properties +++ b/docker/server/config/config-mysql-grpc.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=true db=mysql +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties index 52f2eb4b3c..d7b574143b 100755 --- a/docker/server/config/config-mysql.properties +++ b/docker/server/config/config-mysql.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=false db=mysql +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config.properties b/docker/server/config/config.properties index c289e65964..23866501b9 100755 --- a/docker/server/config/config.properties +++ b/docker/server/config/config.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=false db=dynomite +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java index b0b0ac24eb..5ee496a43e 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java @@ -29,6 +29,6 @@ public Optional get() { } private boolean isEmbedded() { - return configuration.getDB().equals(Configuration.DB.MEMORY); + return configuration.getElasticSearchType().equals(Configuration.ELASTICSEARCH.MEMORY); } } From 424cfc80b86f9d670ae7d9287aa0ad52e47a5d7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 25 Jul 2018 21:28:33 -0700 Subject: [PATCH 100/163] Consolidated payloads for starting a new workflow. - A dto is shared for starting a workflow regardless of the rest resource used - Added the possibility of using a workflow definition when running a workflow. This is a transition for the concept of ephemeral workflows - Added validations related to shared DTO --- .../workflow/StartWorkflowRequest.java | 165 ++++++++++-------- .../core/execution/WorkflowExecutor.java | 13 +- .../core/execution/tasks/SubWorkflow.java | 2 +- .../server/service/WorkflowServiceImpl.java | 2 +- .../server/resources/WorkflowResource.java | 24 ++- .../conductor/jetty/server/JettyServer.java | 13 +- 6 files changed, 137 insertions(+), 82 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java index 9d3d93f9e5..4574e4c2ff 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java @@ -1,77 +1,106 @@ package com.netflix.conductor.common.metadata.workflow; -import com.github.vmg.protogen.annotations.*; +import com.github.vmg.protogen.annotations.ProtoField; +import com.github.vmg.protogen.annotations.ProtoMessage; import java.util.HashMap; import java.util.Map; @ProtoMessage public class StartWorkflowRequest { - @ProtoField(id = 1) - private String name; - - @ProtoField(id = 2) - private Integer version; - - @ProtoField(id = 3) - private String correlationId; - - @ProtoField(id = 4) - private Map input = new HashMap<>(); - - @ProtoField(id = 5) - private Map taskToDomain = new HashMap<>(); - - public String getName() { - return name; - } - public void setName(String name) { - this.name = name; - } - public StartWorkflowRequest withName(String name) { - this.name = name; - return this; - } - public Integer getVersion() { - return version; - } - public void setVersion(Integer version) { - this.version = version; - } - public StartWorkflowRequest withVersion(Integer version) { - this.version = version; - return this; - } - public String getCorrelationId() { - return correlationId; - } - public void setCorrelationId(String correlationId) { - this.correlationId = correlationId; - } - public StartWorkflowRequest withCorrelationId(String correlationId) { - this.correlationId = correlationId; - return this; - } - public Map getInput() { - return input; - } - public void setInput(Map input) { - this.input = input; - } - public StartWorkflowRequest withInput(Map input) { - this.input = input; - return this; - } - public Map getTaskToDomain() { - return taskToDomain; - } - public void setTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - } - public StartWorkflowRequest withTaskToDomain(Map taskToDomain) { - this.taskToDomain = taskToDomain; - return this; - } - - + + @ProtoField(id = 1) + private String name; + + @ProtoField(id = 2) + private Integer version; + + @ProtoField(id = 3) + private String correlationId; + + @ProtoField(id = 4) + private Map input = new HashMap<>(); + + @ProtoField(id = 5) + private Map taskToDomain = new HashMap<>(); + + private WorkflowDef workflowDef; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public StartWorkflowRequest withName(String name) { + this.name = name; + return this; + } + + public Integer getVersion() { + return version; + } + + public void setVersion(Integer version) { + this.version = version; + } + + public StartWorkflowRequest withVersion(Integer version) { + this.version = version; + return this; + } + + public String getCorrelationId() { + return correlationId; + } + + public void setCorrelationId(String correlationId) { + this.correlationId = correlationId; + } + + public StartWorkflowRequest withCorrelationId(String correlationId) { + this.correlationId = correlationId; + return this; + } + + public StartWorkflowRequest withTaskToDomain(Map taskToDomain) { + this.taskToDomain = taskToDomain; + return this; + } + + public StartWorkflowRequest withWorkflowDef(WorkflowDef workflowDef) { + this.workflowDef = workflowDef; + return this; + } + + public Map getInput() { + return input; + } + + public void setInput(Map input) { + this.input = input; + } + + public StartWorkflowRequest withInput(Map input) { + this.input = input; + return this; + } + + public Map getTaskToDomain() { + return taskToDomain; + } + + public void setTaskToDomain(Map taskToDomain) { + this.taskToDomain = taskToDomain; + } + + public WorkflowDef getWorkflowDef() { + return workflowDef; + } + + public void setWorkflowDef(WorkflowDef workflowDef) { + this.workflowDef = workflowDef; + } } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 596954990c..657920e914 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -141,7 +141,8 @@ public String startWorkflow( String correlationId, Map input, String event, - Map taskToDomain + Map taskToDomain, + WorkflowDef workflowDef ) { return startWorkflow( name, @@ -151,7 +152,8 @@ public String startWorkflow( null, null, event, - taskToDomain + taskToDomain, + workflowDef ); } @@ -175,6 +177,7 @@ public String startWorkflow( parentWorkflowId, parentWorkflowTaskId, event, + null, null ); } @@ -190,11 +193,13 @@ public String startWorkflow( String parentWorkflowId, String parentWorkflowTaskId, String event, - Map taskToDomain + Map taskToDomain, + WorkflowDef workflowDef ) { Optional potentialDef = - version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); + workflowDef != null ? Optional.of(workflowDef) : + version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); //Check if the workflow definition is valid WorkflowDef workflowDefinition = potentialDef diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java index 79afced249..a26be43656 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java @@ -59,7 +59,7 @@ public void start(Workflow workflow, Task task, WorkflowExecutor provider) throw try { - String subWorkflowId = provider.startWorkflow(name, version, wfInput, correlationId, workflow.getWorkflowId(), task.getTaskId(), null, workflow.getTaskToDomain()); + String subWorkflowId = provider.startWorkflow(name, version, wfInput, correlationId, workflow.getWorkflowId(), task.getTaskId(), null, workflow.getTaskToDomain(), null); task.getOutputData().put("subWorkflowId", subWorkflowId); task.getInputData().put("subWorkflowId", subWorkflowId); task.setStatus(Status.IN_PROGRESS); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index 279c1bf6ec..607078eafa 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -56,7 +56,7 @@ public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, try { String id = executor.startWorkflow( request.getName(), GRPC_HELPER.optional(request.getVersion()), request.getCorrelationId(), - request.getInput(), null, request.getTaskToDomain()); + request.getInput(), null, request.getTaskToDomain(), request.getWorkflowDef()); response.onNext(WorkflowServicePb.StartWorkflowResponse.newBuilder() .setWorkflowId(id) .build() diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java index 767759c857..e4d3f8481e 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java @@ -15,6 +15,7 @@ */ package com.netflix.conductor.server.resources; +import com.google.common.base.Strings; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; @@ -82,25 +83,36 @@ public WorkflowResource(WorkflowExecutor executor, ExecutionService service, Met @Produces({MediaType.TEXT_PLAIN}) @ApiOperation("Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain") public String startWorkflow(StartWorkflowRequest request) { + if (Strings.isNullOrEmpty(request.getName())) { + throw new ApplicationException(Code.INVALID_INPUT, "A name is required to start a workflow."); + } return executor.startWorkflow( request.getName(), request.getVersion(), request.getCorrelationId(), request.getInput(), null, - request.getTaskToDomain() + request.getTaskToDomain(), + request.getWorkflowDef() ); } @POST @Path("/{name}") @Produces({MediaType.TEXT_PLAIN}) - @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking") - public String startWorkflow( - @PathParam("name") String name, @QueryParam("version") Integer version, - @QueryParam("correlationId") String correlationId, Map input) { + @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking.") + public String startWorkflow(@PathParam("name") String name, StartWorkflowRequest request) { + String workflowName = request.getName(); + if (Strings.isNullOrEmpty(name) || (!Strings.isNullOrEmpty(workflowName) && !name.equals(workflowName))) { + throw new ApplicationException( + Code.INVALID_INPUT, + "Cannot run workflow with name inconsistencies. " + + "Make sure the name on the url and the name on the payload matches." + ); + } - return executor.startWorkflow(name, version, correlationId, input, null); + return executor.startWorkflow(name, request.getVersion(), request.getCorrelationId(), + request.getInput(), null, request.getTaskToDomain(), request.getWorkflowDef()); } @GET diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java index 74b56073b1..293d87e747 100644 --- a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java @@ -15,6 +15,7 @@ */ package com.netflix.conductor.jetty.server; +import com.google.common.collect.ImmutableMap; import com.google.inject.servlet.GuiceFilter; import com.fasterxml.jackson.databind.ObjectMapper; @@ -32,6 +33,7 @@ import java.util.EnumSet; import java.util.LinkedList; import java.util.List; +import java.util.Map; import javax.servlet.DispatcherType; import javax.ws.rs.core.MediaType; @@ -115,8 +117,15 @@ private static void createKitchenSink(int port) throws Exception { stream = Main.class.getResourceAsStream("/sub_flow_1.json"); client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); - String input = "{\"task2Name\":\"task_5\"}"; - client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(input); + Map payload = ImmutableMap.of("input", + ImmutableMap.of("task2Name", "task_5")); + String payloadStr = om.writeValueAsString(payload); + client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(payloadStr); + + // Ephemeral workflow + + //client.resource("http://localhost:" + port + "/api/workflow/ephemeralTest").type(MediaType.APPLICATION_JSON).post(input); + logger.info("Kitchen sink workflows are created!"); } From 343b4d9c2157aa288d7c00a2d6341bf2887eb3dd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 25 Jul 2018 22:50:15 -0700 Subject: [PATCH 101/163] Added example of a ephemeral workflow with stored tasks --- .../conductor/jetty/server/JettyServer.java | 9 +- .../ephemeralWithStoredTasks-kitchenSink.json | 167 ++++++++++++++++++ 2 files changed, 172 insertions(+), 4 deletions(-) create mode 100644 server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java index 293d87e747..2ebf27994f 100644 --- a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java @@ -122,11 +122,12 @@ private static void createKitchenSink(int port) throws Exception { String payloadStr = om.writeValueAsString(payload); client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(payloadStr); - // Ephemeral workflow - - //client.resource("http://localhost:" + port + "/api/workflow/ephemeralTest").type(MediaType.APPLICATION_JSON).post(input); + logger.info("Kitchen sink workflows are created!"); - logger.info("Kitchen sink workflows are created!"); + // Ephemeral workflow + InputStream ephemeralInputStream = Main.class.getResourceAsStream("/ephemeralWithStoredTasks-kitchenSink.json"); + client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSink").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); + logger.info("Ephemeral Kitchen sink workflow with stored task is created!"); } } diff --git a/server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json b/server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json new file mode 100644 index 0000000000..36628ef419 --- /dev/null +++ b/server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json @@ -0,0 +1,167 @@ +{ + + "workflowDef": { + "name": "ephemeralKitchenSink", + "description": "kitchensink workflow definition", + "version": 1, + "tasks": [ + { + "name": "task_1", + "taskReferenceName": "task_1", + "inputParameters": { + "mod": "${workflow.input.mod}", + "oddEven": "${workflow.input.oddEven}", + "env": { + "taskId": "${CPEWF_TASK_ID}", + "workflowId": "${HOSTNAME}" + } + }, + "type": "SIMPLE" + }, + { + "name": "event_task", + "taskReferenceName": "event_0", + "inputParameters": { + "mod": "${workflow.input.mod}", + "oddEven": "${workflow.input.oddEven}" + }, + "type": "EVENT", + "sink": "conductor" + }, + { + "name": "dyntask", + "taskReferenceName": "task_2", + "inputParameters": { + "taskToExecute": "${workflow.input.task2Name}" + }, + "type": "DYNAMIC", + "dynamicTaskNameParam": "taskToExecute" + }, + { + "name": "oddEvenDecision", + "taskReferenceName": "oddEvenDecision", + "inputParameters": { + "oddEven": "${task_2.output.oddEven}" + }, + "type": "DECISION", + "caseValueParam": "oddEven", + "decisionCases": { + "0": [ + { + "name": "task_4", + "taskReferenceName": "task_4", + "inputParameters": { + "mod": "${task_2.output.mod}", + "oddEven": "${task_2.output.oddEven}" + }, + "type": "SIMPLE" + }, + { + "name": "dynamic_fanout", + "taskReferenceName": "fanout1", + "inputParameters": { + "dynamicTasks": "${task_4.output.dynamicTasks}", + "input": "${task_4.output.inputs}" + }, + "type": "FORK_JOIN_DYNAMIC", + "dynamicForkTasksParam": "dynamicTasks", + "dynamicForkTasksInputParamName": "input" + }, + { + "name": "dynamic_join", + "taskReferenceName": "join1", + "type": "JOIN" + } + ], + "1": [ + { + "name": "fork_join", + "taskReferenceName": "forkx", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "task_10", + "taskReferenceName": "task_10", + "type": "SIMPLE" + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf3", + "inputParameters": { + "mod": "${task_1.output.mod}", + "oddEven": "${task_1.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ], + [ + { + "name": "task_11", + "taskReferenceName": "task_11", + "type": "SIMPLE" + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf4", + "inputParameters": { + "mod": "${task_1.output.mod}", + "oddEven": "${task_1.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ] + ] + }, + { + "name": "join", + "taskReferenceName": "join2", + "type": "JOIN", + "joinOn": [ + "wf3", + "wf4" + ] + } + ] + } + }, + { + "name": "search_elasticsearch", + "taskReferenceName": "get_es_1", + "inputParameters": { + "http_request": { + "uri": "http://localhost:9200/conductor/_search?size=10", + "method": "GET" + } + }, + "type": "HTTP" + }, + { + "name": "task_30", + "taskReferenceName": "task_30", + "inputParameters": { + "statuses": "${get_es_1.output..status}", + "workflowIds": "${get_es_1.output..workflowId}" + }, + "type": "SIMPLE" + } + ], + "outputParameters": { + "statues": "${get_es_1.output..status}", + "workflowIds": "${get_es_1.output..workflowId}" + }, + "schemaVersion": 2 + }, + + "input": { + "task2Name": "task_5" + } +} From f43aa00c779a2637a83c4d5d3fd4decf05bacb85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 26 Jul 2018 12:13:03 -0700 Subject: [PATCH 102/163] When starting a new workflow execution, store workflow definition if it is ephemeral --- .../conductor/server/resources/WorkflowResource.java | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java index e4d3f8481e..e6298ae337 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java @@ -86,6 +86,9 @@ public String startWorkflow(StartWorkflowRequest request) { if (Strings.isNullOrEmpty(request.getName())) { throw new ApplicationException(Code.INVALID_INPUT, "A name is required to start a workflow."); } + if (request.getWorkflowDef() != null) { + metadata.registerWorkflowDef(request.getWorkflowDef()); + } return executor.startWorkflow( request.getName(), request.getVersion(), @@ -107,10 +110,14 @@ public String startWorkflow(@PathParam("name") String name, StartWorkflowRequest throw new ApplicationException( Code.INVALID_INPUT, "Cannot run workflow with name inconsistencies. " + - "Make sure the name on the url and the name on the payload matches." + "Make sure the name on the url and the name on the payload match." ); } + if (request.getWorkflowDef() != null) { + metadata.registerWorkflowDef(request.getWorkflowDef()); + } + return executor.startWorkflow(name, request.getVersion(), request.getCorrelationId(), request.getInput(), null, request.getTaskToDomain(), request.getWorkflowDef()); } From cd8b125633a8f1fb8d1c6bb41a80f125cbe6763f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 26 Jul 2018 19:04:52 -0700 Subject: [PATCH 103/163] Added payload example for a ephemeral workflow with ephemeral tasks --- ...k-ephemeralWorkflowWithEphemeralTasks.json | 260 ++++++++++++++++++ ...ink-ephemeralWorkflowWithStoredTasks.json} | 2 +- 2 files changed, 261 insertions(+), 1 deletion(-) create mode 100644 server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json rename server/src/main/resources/{ephemeralWithStoredTasks-kitchenSink.json => kitchenSink-ephemeralWorkflowWithStoredTasks.json} (99%) diff --git a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json new file mode 100644 index 0000000000..8dc1ff3191 --- /dev/null +++ b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json @@ -0,0 +1,260 @@ +{ + "workflowDef": { + "name": "ephemeralKitchenSinkEphemeralTasks", + "description": "Kitchensink ephemeral workflow with ephemeral tasks", + "version": 1, + "tasks": [ + { + "name": "task_10001", + "taskReferenceName": "task_10001", + "inputParameters": { + "mod": "${workflow.input.mod}", + "oddEven": "${workflow.input.oddEven}", + "env": { + "taskId": "${CPEWF_TASK_ID}", + "workflowId": "${HOSTNAME}" + } + }, + "type": "SIMPLE", + "taskDef": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "task_10001", + "description": "task_10001", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } + }, + { + "name": "event_task", + "taskReferenceName": "event_0", + "inputParameters": { + "mod": "${workflow.input.mod}", + "oddEven": "${workflow.input.oddEven}" + }, + "type": "EVENT", + "sink": "conductor" + }, + { + "name": "dyntask", + "taskReferenceName": "task_2", + "inputParameters": { + "taskToExecute": "${workflow.input.task2Name}" + }, + "type": "DYNAMIC", + "dynamicTaskNameParam": "taskToExecute" + }, + { + "name": "oddEvenDecision", + "taskReferenceName": "oddEvenDecision", + "inputParameters": { + "oddEven": "${task_2.output.oddEven}" + }, + "type": "DECISION", + "caseValueParam": "oddEven", + "decisionCases": { + "0": [ + { + "name": "task_10004", + "taskReferenceName": "task_10004", + "inputParameters": { + "mod": "${task_2.output.mod}", + "oddEven": "${task_2.output.oddEven}" + }, + "type": "SIMPLE", + "taskDef": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "task_10004", + "description": "task_10004", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } + }, + { + "name": "dynamic_fanout", + "taskReferenceName": "fanout1", + "inputParameters": { + "dynamicTasks": "${task_10004.output.dynamicTasks}", + "input": "${task_10004.output.inputs}" + }, + "type": "FORK_JOIN_DYNAMIC", + "dynamicForkTasksParam": "dynamicTasks", + "dynamicForkTasksInputParamName": "input" + }, + { + "name": "dynamic_join", + "taskReferenceName": "join1", + "type": "JOIN" + } + ], + "1": [ + { + "name": "fork_join", + "taskReferenceName": "forkx", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "task_100010", + "taskReferenceName": "task_100010", + "type": "SIMPLE", + "taskDef": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "task_100010", + "description": "task_100010", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf3", + "inputParameters": { + "mod": "${task_10001.output.mod}", + "oddEven": "${task_10001.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ], + [ + { + "name": "task_100011", + "taskReferenceName": "task_100011", + "type": "SIMPLE", + "taskDef": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "task_100011", + "description": "task_100011", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } + }, + { + "name": "sub_workflow_x", + "taskReferenceName": "wf4", + "inputParameters": { + "mod": "${task_10001.output.mod}", + "oddEven": "${task_10001.output.oddEven}" + }, + "type": "SUB_WORKFLOW", + "subWorkflowParam": { + "name": "sub_flow_1", + "version": 1 + } + } + ] + ] + }, + { + "name": "join", + "taskReferenceName": "join2", + "type": "JOIN", + "joinOn": [ + "wf3", + "wf4" + ] + } + ] + } + }, + { + "name": "search_elasticsearch", + "taskReferenceName": "get_es_1", + "inputParameters": { + "http_request": { + "uri": "http://localhost:9200/conductor/_search?size=10", + "method": "GET" + } + }, + "type": "HTTP" + }, + { + "name": "task_100030", + "taskReferenceName": "task_100030", + "inputParameters": { + "statuses": "${get_es_1.output..status}", + "workflowIds": "${get_es_1.output..workflowId}" + }, + "type": "SIMPLE", + "taskDef": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "task_100030", + "description": "task_100030", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } + } + ], + "outputParameters": { + "statues": "${get_es_1.output..status}", + "workflowIds": "${get_es_1.output..workflowId}" + }, + "schemaVersion": 2 + }, + "input": { + "task2Name": "task_10005" + } +} diff --git a/server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json similarity index 99% rename from server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json rename to server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json index 36628ef419..4fc9c81161 100644 --- a/server/src/main/resources/ephemeralWithStoredTasks-kitchenSink.json +++ b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json @@ -1,7 +1,7 @@ { "workflowDef": { - "name": "ephemeralKitchenSink", + "name": "ephemeralKitchenSinkStoredTasks", "description": "kitchensink workflow definition", "version": 1, "tasks": [ From b3de0c6bf7aba3df2be380729d91d71211b063f8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 26 Jul 2018 19:08:35 -0700 Subject: [PATCH 104/163] Added concept of ephemeral tasks - Workflow can have ephemeral or stored tasks - If an ephemeral task is detected, the system stores it as part of the workflow execution --- .../workflow/StartWorkflowRequest.java | 1 + .../metadata/workflow/WorkflowTask.java | 107 +++++++++++------- .../core/execution/WorkflowExecutor.java | 14 +-- .../conductor/service/MetadataService.java | 23 ++++ .../conductor/grpc/AbstractProtoMapper.java | 12 ++ .../proto/model/startworkflowrequest.proto | 2 + grpc/src/main/proto/model/workflowtask.proto | 2 + .../server/resources/WorkflowResource.java | 4 +- .../conductor/jetty/server/JettyServer.java | 31 +++-- 9 files changed, 139 insertions(+), 57 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java index 4574e4c2ff..3ef85c556f 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/StartWorkflowRequest.java @@ -24,6 +24,7 @@ public class StartWorkflowRequest { @ProtoField(id = 5) private Map taskToDomain = new HashMap<>(); + @ProtoField(id = 6) private WorkflowDef workflowDef; public String getName() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index b47b5a42d2..81af0c2fe0 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -19,6 +19,7 @@ package com.netflix.conductor.common.metadata.workflow; import com.github.vmg.protogen.annotations.*; +import com.netflix.conductor.common.metadata.tasks.TaskDef; import java.util.Collection; import java.util.HashMap; @@ -134,6 +135,9 @@ public void setTasks(List tasks) { @ProtoField(id = 18) private boolean optional = false; + + @ProtoField(id = 19) + private TaskDef taskDef; /** * @return the name @@ -394,6 +398,29 @@ public boolean isOptional() { return optional; } + /** + * + * @return If the task is defined by the user, regardless of its nature (ephemeral or stored) + */ + public boolean isUserDefined() { + return this.getType().equals(Type.SIMPLE.name()); + } + + /** + * + * @return Task definition associated to the Workflow Task + */ + public TaskDef getTaskDef() { + return taskDef; + } + + /** + * @param taskDef Task definition + */ + public void setTaskDef(TaskDef taskDef) { + this.taskDef = taskDef; + } + /** * * @param optional when set to true, the task is marked as optional @@ -401,86 +428,88 @@ public boolean isOptional() { public void setOptional(boolean optional) { this.optional = optional; } - - private Collection> children(){ - Collection> v1 = new LinkedList<>(); - Type tt = Type.USER_DEFINED; - if(Type.isSystemTask(type)) { - tt = Type.valueOf(type); + + private Collection> children() { + Collection> workflowTaskLists = new LinkedList<>(); + Type taskType = Type.USER_DEFINED; + if (Type.isSystemTask(type)) { + taskType = Type.valueOf(type); } - - switch(tt){ + + switch (taskType) { case DECISION: - v1.addAll(decisionCases.values()); - v1.add(defaultCase); + workflowTaskLists.addAll(decisionCases.values()); + workflowTaskLists.add(defaultCase); break; case FORK_JOIN: - v1.addAll(forkTasks); + workflowTaskLists.addAll(forkTasks); break; default: break; } - return v1; - + return workflowTaskLists; + } - - public List all(){ + + public List all() { List all = new LinkedList<>(); all.add(this); - for (List wfts : children() ){ - for(WorkflowTask wft : wfts){ - all.addAll(wft.all()); + for (List workflowTaskList : children()) { + for (WorkflowTask workflowTask : workflowTaskList) { + all.addAll(workflowTask.all()); } } return all; } - - public WorkflowTask next(String taskReferenceName, WorkflowTask parent){ - Type tt = Type.USER_DEFINED; - if(Type.isSystemTask(type)) { - tt = Type.valueOf(type); + + public WorkflowTask next(String taskReferenceName, WorkflowTask parent) { + Type taskType = Type.USER_DEFINED; + if (Type.isSystemTask(type)) { + taskType = Type.valueOf(type); } - - switch(tt){ + + switch (taskType) { case DECISION: - for (List wfts : children() ){ + for (List wfts : children()) { Iterator it = wfts.iterator(); - while(it.hasNext()){ + while (it.hasNext()) { WorkflowTask task = it.next(); - if(task.getTaskReferenceName().equals(taskReferenceName)){ + if (task.getTaskReferenceName().equals(taskReferenceName)) { break; } WorkflowTask nextTask = task.next(taskReferenceName, this); - if(nextTask != null){ + if (nextTask != null) { return nextTask; } - if(task.has(taskReferenceName)){ + if (task.has(taskReferenceName)) { break; } } - if(it.hasNext()) { return it.next(); } + if (it.hasNext()) { + return it.next(); + } } break; case FORK_JOIN: boolean found = false; - for (List wfts : children() ){ + for (List wfts : children()) { Iterator it = wfts.iterator(); - while(it.hasNext()){ + while (it.hasNext()) { WorkflowTask task = it.next(); - if(task.getTaskReferenceName().equals(taskReferenceName)){ + if (task.getTaskReferenceName().equals(taskReferenceName)) { found = true; break; } WorkflowTask nextTask = task.next(taskReferenceName, this); - if(nextTask != null){ + if (nextTask != null) { return nextTask; } } - if(it.hasNext()) { - return it.next(); + if (it.hasNext()) { + return it.next(); } - if(found && parent != null){ - return parent.next(this.taskReferenceName, parent); //we need to return join task... -- get my sibling from my parent.. + if (found && parent != null) { + return parent.next(this.taskReferenceName, parent); //we need to return join task... -- get my sibling from my parent.. } } break; diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 657920e914..b0770e3543 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -249,17 +249,17 @@ public String startWorkflow( throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); } - //because everything else is a system defined task - Set missingTaskDefs = workflowDefinition.all().stream() - .filter(wft -> wft.getType().equals(WorkflowTask.Type.SIMPLE.name())) - .map(wft2 -> wft2.getName()) + // Obtain the missing task definitions: those that are not system tasks and also don't have embedded definitions + Set missingTaskDefinitions = workflowDefinition.all().stream() + .filter(workflowTask -> (workflowTask.isUserDefined() && workflowTask.getTaskDef() == null)) + .map(workflowTask -> workflowTask.getName()) .filter(task -> metadataDAO.getTaskDef(task) == null) .collect(Collectors.toSet()); - if (!missingTaskDefs.isEmpty()) { - logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefs); + if (!missingTaskDefinitions.isEmpty()) { + logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefinitions); Monitors.recordWorkflowStartError(workflowDefinition.getName(), WorkflowContext.get().getClientApp()); - throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs); + throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefinitions); } //A random UUID is assigned to the work flow instance String workflowId = IDGenerator.generate(); diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java index ceea05961f..88aee0f664 100644 --- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java +++ b/core/src/main/java/com/netflix/conductor/service/MetadataService.java @@ -32,6 +32,7 @@ import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import javax.inject.Inject; import javax.inject.Singleton; @@ -158,6 +159,28 @@ public void registerWorkflowDef(WorkflowDef def) { metadata.create(def); } + /** + * Creates a workflow based on a definition considering stored and ephemeral definitions + * Improvement: Add transactional support + * + * @param def Ephemeral workflow definition + */ + public void registerEphemeralWorkflowDef(WorkflowDef def) { + + this.registerWorkflowDef(def); + + List ephemeralTaskDefinitions = def.getTasks().stream() + .filter(workflowTask -> (workflowTask.isUserDefined() && workflowTask.getTaskDef() != null)) + .map(workflowTask -> workflowTask.getTaskDef()) + .collect(Collectors.toList()); + + // TODO: add ability to batch read/write tasks on behalf of performance increase + for (TaskDef ephemeralTaskDefinition : ephemeralTaskDefinitions) { + metadata.createTaskDef(ephemeralTaskDefinition); + } + + } + /** * * @param eventHandler Event handler to be added. diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 8ce9d040af..474e857fba 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -757,6 +757,9 @@ public StartWorkflowRequestPb.StartWorkflowRequest toProto(StartWorkflowRequest to.putInput( pair.getKey(), toProto( pair.getValue() ) ); } to.putAllTaskToDomain( from.getTaskToDomain() ); + if (from.getWorkflowDef() != null) { + to.setWorkflowDef( toProto( from.getWorkflowDef() ) ); + } return to.build(); } @@ -771,6 +774,9 @@ public StartWorkflowRequest fromProto(StartWorkflowRequestPb.StartWorkflowReques } to.setInput(inputMap); to.setTaskToDomain( from.getTaskToDomainMap() ); + if (from.hasWorkflowDef()) { + to.setWorkflowDef( fromProto( from.getWorkflowDef() ) ); + } return to; } @@ -886,6 +892,9 @@ public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { to.setSink( from.getSink() ); } to.setOptional( from.isOptional() ); + if (from.getTaskDef() != null) { + to.setTaskDef( toProto( from.getTaskDef() ) ); + } return to.build(); } @@ -919,6 +928,9 @@ public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setSink( from.getSink() ); to.setOptional( from.getOptional() ); + if (from.hasTaskDef()) { + to.setTaskDef( fromProto( from.getTaskDef() ) ); + } return to; } diff --git a/grpc/src/main/proto/model/startworkflowrequest.proto b/grpc/src/main/proto/model/startworkflowrequest.proto index a575b3adc4..a85ba550d1 100644 --- a/grpc/src/main/proto/model/startworkflowrequest.proto +++ b/grpc/src/main/proto/model/startworkflowrequest.proto @@ -1,6 +1,7 @@ syntax = "proto3"; package conductor.proto; +import "model/workflowdef.proto"; import "google/protobuf/struct.proto"; option java_package = "com.netflix.conductor.proto"; @@ -13,4 +14,5 @@ message StartWorkflowRequest { string correlation_id = 3; map input = 4; map task_to_domain = 5; + WorkflowDef workflow_def = 6; } diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto index 85b6b76704..2ca2eaf891 100644 --- a/grpc/src/main/proto/model/workflowtask.proto +++ b/grpc/src/main/proto/model/workflowtask.proto @@ -1,6 +1,7 @@ syntax = "proto3"; package conductor.proto; +import "model/taskdef.proto"; import "model/subworkflowparams.proto"; import "google/protobuf/struct.proto"; @@ -42,4 +43,5 @@ message WorkflowTask { repeated string join_on = 16; string sink = 17; bool optional = 18; + TaskDef task_def = 19; } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java index e6298ae337..b799ac207e 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java @@ -87,7 +87,7 @@ public String startWorkflow(StartWorkflowRequest request) { throw new ApplicationException(Code.INVALID_INPUT, "A name is required to start a workflow."); } if (request.getWorkflowDef() != null) { - metadata.registerWorkflowDef(request.getWorkflowDef()); + metadata.registerEphemeralWorkflowDef(request.getWorkflowDef()); } return executor.startWorkflow( request.getName(), @@ -115,7 +115,7 @@ public String startWorkflow(@PathParam("name") String name, StartWorkflowRequest } if (request.getWorkflowDef() != null) { - metadata.registerWorkflowDef(request.getWorkflowDef()); + metadata.registerEphemeralWorkflowDef(request.getWorkflowDef()); } return executor.startWorkflow(name, request.getVersion(), request.getCorrelationId(), diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java index 2ebf27994f..9aff2fe619 100644 --- a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java @@ -100,16 +100,19 @@ public synchronized void stop() throws Exception { private static void createKitchenSink(int port) throws Exception { + Client client = Client.create(); + ObjectMapper objectMapper = new ObjectMapper(); + /* + * Kitchensink example (stored workflow with stored tasks) + */ List taskDefs = new LinkedList<>(); for (int i = 0; i < 40; i++) { taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); } taskDefs.add(new TaskDef("search_elasticsearch", "search_elasticsearch", 1, 0)); - Client client = Client.create(); - ObjectMapper om = new ObjectMapper(); - client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(om.writeValueAsString(taskDefs)); + client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(objectMapper.writeValueAsString(taskDefs)); InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); @@ -119,15 +122,25 @@ private static void createKitchenSink(int port) throws Exception { Map payload = ImmutableMap.of("input", ImmutableMap.of("task2Name", "task_5")); - String payloadStr = om.writeValueAsString(payload); + String payloadStr = objectMapper.writeValueAsString(payload); client.resource("http://localhost:" + port + "/api/workflow/kitchensink").type(MediaType.APPLICATION_JSON).post(payloadStr); - logger.info("Kitchen sink workflows are created!"); + logger.info("Kitchen sink workflow is created!"); + + /* + * Kitchensink example with ephemeral workflow and stored tasks + */ + InputStream ephemeralInputStream = Main.class.getResourceAsStream("/kitchenSink-ephemeralWorkflowWithStoredTasks.json"); + client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSinkStoredTasks").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); + logger.info("Ephemeral Kitchen sink workflow with stored tasks is created!"); + + /* + * Kitchensink example with ephemeral workflow and ephemeral tasks + */ + ephemeralInputStream = Main.class.getResourceAsStream("/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json"); + client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSinkEphemeralTasks").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); + logger.info("Ephemeral Kitchen sink workflow with ephemeral tasks is created!"); - // Ephemeral workflow - InputStream ephemeralInputStream = Main.class.getResourceAsStream("/ephemeralWithStoredTasks-kitchenSink.json"); - client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSink").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); - logger.info("Ephemeral Kitchen sink workflow with stored task is created!"); } } From 7b2ca5d900bf3b9c82e413ebeb34d2ead5ab7335 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 27 Jul 2018 15:22:45 -0700 Subject: [PATCH 105/163] Moved ElasticSearch instance type property to ElasticSearchConfiguration --- .../conductor/core/config/Configuration.java | 14 -------------- docker/server/config/config-local.properties | 15 +++++++-------- docker/server/config/config-mysql-grpc.properties | 9 +++++---- docker/server/config/config-mysql.properties | 9 +++++---- docker/server/config/config.properties | 15 +++++++-------- .../elasticsearch/ElasticSearchConfiguration.java | 14 ++++++++++++++ .../es5/EmbeddedElasticSearchV5Provider.java | 6 ++---- 7 files changed, 40 insertions(+), 42 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index 42138741f3..ffb789efa1 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -27,9 +27,6 @@ public interface Configuration { String DB_PROPERTY_NAME = "db"; String DB_DEFAULT_VALUE = "memory"; - String ELASTICSEARCH_PROPERTY_NAME = "elasticsearch"; - String ELASTICSEARCH_DEFAULT_VALUE = "memory"; - String SWEEP_FREQUENCY_PROPERTY_NAME = "decider.sweep.frequency.seconds"; int SWEEP_FREQUENCY_DEFAULT_VALUE = 30; @@ -69,14 +66,6 @@ default String getDBString() { return getProperty(DB_PROPERTY_NAME, DB_DEFAULT_VALUE).toUpperCase(); } - default ELASTICSEARCH getElasticSearchType() { - return ELASTICSEARCH.valueOf(getElasticSearchString()); - } - - default String getElasticSearchString() { - return getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_DEFAULT_VALUE).toUpperCase(); - } - /** * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. */ @@ -178,7 +167,4 @@ enum DB { REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL } - enum ELASTICSEARCH { - MEMORY, EXTERNAL - } } diff --git a/docker/server/config/config-local.properties b/docker/server/config/config-local.properties index 640f9c6ff4..b72d893d56 100755 --- a/docker/server/config/config-local.properties +++ b/docker/server/config/config-local.properties @@ -11,14 +11,6 @@ conductor.grpc.server.enabled=false db=memory -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory -# -# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. -# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when -# the server dies. Useful for more stable environments like staging or production. -elasticsearch=external - # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c @@ -37,6 +29,13 @@ queues.dynomite.threads=10 # For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. queues.dynomite.nonQuorum.port=22122 +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +workflow.elasticsearch.instanceType=external # Transport address to elasticsearch workflow.elasticsearch.url=localhost:9300 diff --git a/docker/server/config/config-mysql-grpc.properties b/docker/server/config/config-mysql-grpc.properties index 651901077c..e8aff4c808 100755 --- a/docker/server/config/config-mysql-grpc.properties +++ b/docker/server/config/config-mysql-grpc.properties @@ -11,15 +11,16 @@ conductor.grpc.server.enabled=true db=mysql -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory +jdbc.url=jdbc:mysql://mysql:3306/conductor + +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory # # memory: The instance is created in memory and lost when the server dies. Useful for development and testing. # external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when # the server dies. Useful for more stable environments like staging or production. -elasticsearch=external +workflow.elasticsearch.instanceType=external -jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties index d7b574143b..7c8a0f88b6 100755 --- a/docker/server/config/config-mysql.properties +++ b/docker/server/config/config-mysql.properties @@ -11,15 +11,16 @@ conductor.grpc.server.enabled=false db=mysql -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory +jdbc.url=jdbc:mysql://mysql:3306/conductor + +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory # # memory: The instance is created in memory and lost when the server dies. Useful for development and testing. # external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when # the server dies. Useful for more stable environments like staging or production. -elasticsearch=external +workflow.elasticsearch.instanceType=external -jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config.properties b/docker/server/config/config.properties index 23866501b9..d66187dd3f 100755 --- a/docker/server/config/config.properties +++ b/docker/server/config/config.properties @@ -11,14 +11,6 @@ conductor.grpc.server.enabled=false db=dynomite -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory -# -# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. -# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when -# the server dies. Useful for more stable environments like staging or production. -elasticsearch=external - # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c @@ -40,6 +32,13 @@ queues.dynomite.threads=10 # For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. queues.dynomite.nonQuorum.port=22122 +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +workflow.elasticsearch.instanceType=external # Transport address to elasticsearch workflow.elasticsearch.url=es:9300 diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index fad1bc8181..7f1866edf6 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -9,6 +9,9 @@ public interface ElasticSearchConfiguration extends Configuration { + String ELASTICSEARCH_PROPERTY_NAME = "workflow.elasticsearch.instanceType"; + ElasticSearchInstanceType ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE = ElasticSearchInstanceType.MEMORY; + String ELASTIC_SEARCH_URL_PROPERTY_NAME = "workflow.elasticsearch.url"; String ELASTIC_SEARCH_URL_DEFAULT_VALUE = "localhost:9300"; @@ -76,4 +79,15 @@ default String getEmbeddedHost() { default String getEmbeddedSettingsFile() { return getProperty(EMBEDDED_SETTINGS_FILE_PROPERTY_NAME, EMBEDDED_SETTINGS_FILE_DEFAULT_VALUE); } + + default ElasticSearchInstanceType getElasticSearchInstanceType() { + return ElasticSearchInstanceType.valueOf( + getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE.name()).toUpperCase() + ); + } + + enum ElasticSearchInstanceType { + MEMORY, EXTERNAL + } + } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java index 5ee496a43e..19dabec1b8 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java @@ -1,13 +1,11 @@ package com.netflix.conductor.elasticsearch.es5; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import java.util.Optional; - import javax.inject.Inject; +import java.util.Optional; public class EmbeddedElasticSearchV5Provider implements EmbeddedElasticSearchProvider { private final ElasticSearchConfiguration configuration; @@ -29,6 +27,6 @@ public Optional get() { } private boolean isEmbedded() { - return configuration.getElasticSearchType().equals(Configuration.ELASTICSEARCH.MEMORY); + return configuration.getElasticSearchInstanceType().equals(ElasticSearchConfiguration.ElasticSearchInstanceType.MEMORY); } } From c48f10a1a3d12871399bd515d76fdef6868ba026 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sat, 28 Jul 2018 16:08:56 -0700 Subject: [PATCH 106/163] Updated Frontend in order to load workflow metadata only if definition is not embedded - Workflow detail view now supports ephemeral workflows --- ui/src/api/wfe.js | 50 +++++++++++++++++++++++++---------------------- 1 file changed, 27 insertions(+), 23 deletions(-) diff --git a/ui/src/api/wfe.js b/ui/src/api/wfe.js index b66d855a44..07e43d62e5 100644 --- a/ui/src/api/wfe.js +++ b/ui/src/api/wfe.js @@ -8,11 +8,11 @@ import transform from "lodash/transform"; import identity from "lodash/identity"; const router = new Router(); -const baseURL = process.env.WF_SERVER; -const baseURL2 = baseURL + 'workflow/'; -const baseURL2ByTasks = baseURL2 + 'search-by-task'; -const baseURLMeta = baseURL + 'metadata/'; -const baseURLTask = baseURL + 'tasks/'; +const baseUrl = process.env.WF_SERVER; +const baseWorkflowUrl = baseUrl + 'workflow/'; +const baseWorkflowUrlByTasks = baseWorkflowUrl + 'search-by-task'; +const baseMetadataUrl = baseUrl + 'metadata/'; +const baseTaskUrl = baseUrl + 'tasks/'; router.get('/', async (req, res, next) => { @@ -38,7 +38,7 @@ router.get('/', async (req, res, next) => { } let query = req.query.q; - const url = baseURL2 + 'search?size=100&sort=startTime:DESC&freeText=' + freeText.join(' AND ') + '&start=' + start + '&query=' + query; + const url = baseWorkflowUrl + 'search?size=100&sort=startTime:DESC&freeText=' + freeText.join(' AND ') + '&start=' + start + '&query=' + query; const result = await http.get(url); const hits = result.results; res.status(200).send({result: {hits:hits, totalHits: result.totalHits}}); @@ -72,7 +72,7 @@ router.get('/search-by-task/:taskId', async (req, res, next) => { } let query = req.query.q || ""; - const url = baseURL2 + 'search-by-tasks?size=100&sort=startTime:DESC&freeText=' + freeText.join(' AND ') + '&start=' + start; + const url = baseWorkflowUrl + 'search-by-tasks?size=100&sort=startTime:DESC&freeText=' + freeText.join(' AND ') + '&start=' + start; const result = await http.get(url); const hits = result.results; res.status(200).send({result: {hits:hits, totalHits: result.totalHits}}); @@ -83,8 +83,12 @@ router.get('/search-by-task/:taskId', async (req, res, next) => { router.get('/id/:workflowId', async (req, res, next) => { try { - const result = await http.get(baseURL2 + req.params.workflowId + '?includeTasks=true'); - const meta = await http.get(baseURLMeta + 'workflow/' + result.workflowType + '?version=' + result.version); + const result = await http.get(baseWorkflowUrl + req.params.workflowId + '?includeTasks=true'); + + let meta = result.workflowDefinition; + if(!meta){ + meta = await http.get(baseMetadataUrl + 'workflow/' + result.workflowType + '?version=' + result.version); + } const subs = filter(identity)(map(task => { if (task.taskType === 'SUB_WORKFLOW') { @@ -116,7 +120,7 @@ router.get('/id/:workflowId', async (req, res, next) => { } }); - const logs = map(task => Promise.all([task, http.get(baseURLTask + task.taskId + '/log')]))(result.tasks); + const logs = map(task => Promise.all([task, http.get(baseTaskUrl + task.taskId + '/log')]))(result.tasks); await Promise.all(logs).then(result => { forEach(([task, logs]) => { @@ -128,8 +132,8 @@ router.get('/id/:workflowId', async (req, res, next) => { const promises = map(({name, version, subWorkflowId, referenceTaskName}) => Promise.all([ referenceTaskName, - http.get(baseURLMeta + 'workflow/' + name + '?version=' + version), - http.get(baseURL2 + subWorkflowId + '?includeTasks=true') + http.get(baseMetadataUrl + 'workflow/' + name + '?version=' + version), + http.get(baseWorkflowUrl + subWorkflowId + '?includeTasks=true') ]))(subs); const subworkflows = await Promise.all(promises).then(result => { @@ -145,7 +149,7 @@ router.get('/id/:workflowId', async (req, res, next) => { }); router.delete('/terminate/:workflowId', async (req, res, next) => { try { - const result = await http.delete(baseURL2 + req.params.workflowId); + const result = await http.delete(baseWorkflowUrl + req.params.workflowId); res.status(200).send({result: req.params.workflowId }); } catch (err) { next(err); @@ -153,7 +157,7 @@ router.delete('/terminate/:workflowId', async (req, res, next) => { }); router.post('/restart/:workflowId', async (req, res, next) => { try { - const result = await http.post(baseURL2 + req.params.workflowId + '/restart'); + const result = await http.post(baseWorkflowUrl + req.params.workflowId + '/restart'); res.status(200).send({result: req.params.workflowId }); } catch (err) { next(err); @@ -162,7 +166,7 @@ router.post('/restart/:workflowId', async (req, res, next) => { router.post('/retry/:workflowId', async (req, res, next) => { try { - const result = await http.post(baseURL2 + req.params.workflowId + '/retry'); + const result = await http.post(baseWorkflowUrl + req.params.workflowId + '/retry'); res.status(200).send({result: req.params.workflowId }); } catch (err) { next(err); @@ -171,7 +175,7 @@ router.post('/retry/:workflowId', async (req, res, next) => { router.post('/pause/:workflowId', async (req, res, next) => { try { - const result = await http.put(baseURL2 + req.params.workflowId + '/pause'); + const result = await http.put(baseWorkflowUrl + req.params.workflowId + '/pause'); res.status(200).send({result: req.params.workflowId }); } catch (err) { next(err); @@ -180,7 +184,7 @@ router.post('/pause/:workflowId', async (req, res, next) => { router.post('/resume/:workflowId', async (req, res, next) => { try { - const result = await http.put(baseURL2 + req.params.workflowId + '/resume'); + const result = await http.put(baseWorkflowUrl + req.params.workflowId + '/resume'); res.status(200).send({result: req.params.workflowId }); } catch (err) { next(err); @@ -190,7 +194,7 @@ router.post('/resume/:workflowId', async (req, res, next) => { //metadata router.get('/metadata/workflow/:name/:version', async (req, res, next) => { try { - const result = await http.get(baseURLMeta + 'workflow/' + req.params.name + '?version=' + req.params.version); + const result = await http.get(baseMetadataUrl + 'workflow/' + req.params.name + '?version=' + req.params.version); res.status(200).send({result}); } catch (err) { next(err); @@ -198,7 +202,7 @@ router.get('/metadata/workflow/:name/:version', async (req, res, next) => { }); router.get('/metadata/workflow', async (req, res, next) => { try { - const result = await http.get(baseURLMeta + 'workflow'); + const result = await http.get(baseMetadataUrl + 'workflow'); res.status(200).send({result}); } catch (err) { next(err); @@ -206,7 +210,7 @@ router.get('/metadata/workflow', async (req, res, next) => { }); router.get('/metadata/taskdef', async (req, res, next) => { try { - const result = await http.get(baseURLMeta + 'taskdefs'); + const result = await http.get(baseMetadataUrl + 'taskdefs'); res.status(200).send({result}); } catch (err) { next(err); @@ -214,7 +218,7 @@ router.get('/metadata/taskdef', async (req, res, next) => { }); router.get('/task/log/:taskId', async (req, res, next) => { try { - const logs = await http.get(baseURLTask + req.params.taskId + '/log'); + const logs = await http.get(baseTaskUrl + req.params.taskId + '/log'); res.status(200).send({logs}); } catch (err) { next(err); @@ -222,8 +226,8 @@ router.get('/task/log/:taskId', async (req, res, next) => { }); router.get('/queue/data', async (req, res, next) => { try { - const sizes = await http.get(baseURLTask + 'queue/all'); - const polldata = await http.get(baseURLTask + 'queue/polldata/all'); + const sizes = await http.get(baseTaskUrl + 'queue/all'); + const polldata = await http.get(baseTaskUrl + 'queue/polldata/all'); polldata.forEach(pd=>{ var qname = pd.queueName; From 1e29024b6d3a24c4739e2a0b79afd9d154c6eff8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sat, 28 Jul 2018 17:37:50 -0700 Subject: [PATCH 107/163] Added support for storing embedded workflow and task definitions - Workflow executions can now store embedded workflow definitions - Workflow tasks can now store embedded task definitions - As workflow tasks may contain embedded definitions, this let execution Tasks access to it on those cases During the addition of the work, some code styles were updated: - Got rid of implementation classes on attribute declarations. - Refactored certain variables across the code on behalf of improving readability --- .../common/metadata/workflow/WorkflowDef.java | 14 +++--- .../metadata/workflow/WorkflowTask.java | 20 +++----- .../conductor/common/run/Workflow.java | 14 +++--- .../core/execution/DeciderService.java | 48 +++++++++---------- .../core/execution/WorkflowExecutor.java | 26 +++++----- .../core/execution/WorkflowSweeper.java | 10 ++-- .../execution/mapper/DynamicTaskMapper.java | 16 ++++--- .../execution/mapper/SimpleTaskMapper.java | 13 +++-- .../mapper/UserDefinedTaskMapper.java | 13 +++-- .../conductor/service/MetadataService.java | 22 --------- .../conductor/grpc/AbstractProtoMapper.java | 11 ++--- grpc/src/main/proto/model/workflowtask.proto | 2 +- .../server/resources/AdminResource.java | 2 +- .../server/resources/WorkflowResource.java | 28 +++++------ .../dao/mysql/MySQLExecutionDAO.java | 19 ++++---- .../dao/dynomite/RedisExecutionDAO.java | 11 +++-- .../conductor/jetty/server/JettyServer.java | 8 ++-- ...k-ephemeralWorkflowWithEphemeralTasks.json | 10 ++-- .../integration/WorkflowServiceTest.java | 6 +-- 19 files changed, 134 insertions(+), 159 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 51d4e2bcf0..af37912c0a 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -26,8 +26,10 @@ import java.util.Objects; import java.util.Optional; -import com.github.vmg.protogen.annotations.*; +import com.github.vmg.protogen.annotations.ProtoField; +import com.github.vmg.protogen.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.Auditable; +import com.netflix.conductor.common.metadata.tasks.TaskDef; /** * @author Viren @@ -46,10 +48,10 @@ public class WorkflowDef extends Auditable { private int version = 1; @ProtoField(id = 4) - private LinkedList tasks = new LinkedList(); + private List tasks = new LinkedList<>(); @ProtoField(id = 5) - private List inputParameters = new LinkedList(); + private List inputParameters = new LinkedList<>(); @ProtoField(id = 6) private Map outputParameters = new HashMap<>(); @@ -95,14 +97,14 @@ public void setDescription(String description) { /** * @return the tasks */ - public LinkedList getTasks() { + public List getTasks() { return tasks; } /** * @param tasks the tasks to set */ - public void setTasks(LinkedList tasks) { + public void setTasks(List tasks) { this.tasks = tasks; } @@ -228,7 +230,7 @@ public WorkflowTask getNextTask(String taskReferenceName){ public WorkflowTask getTaskByRefName(String taskReferenceName){ Optional found = all().stream() - .filter(wft -> wft.getTaskReferenceName().equals(taskReferenceName)) + .filter(workflowTask -> workflowTask.getTaskReferenceName().equals(taskReferenceName)) .findFirst(); if(found.isPresent()){ return found.get(); diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 81af0c2fe0..162e268fa3 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -137,7 +137,7 @@ public void setTasks(List tasks) { private boolean optional = false; @ProtoField(id = 19) - private TaskDef taskDef; + private TaskDef taskDefinition; /** * @return the name @@ -398,27 +398,19 @@ public boolean isOptional() { return optional; } - /** - * - * @return If the task is defined by the user, regardless of its nature (ephemeral or stored) - */ - public boolean isUserDefined() { - return this.getType().equals(Type.SIMPLE.name()); - } - /** * * @return Task definition associated to the Workflow Task */ - public TaskDef getTaskDef() { - return taskDef; + public TaskDef getTaskDefinition() { + return taskDefinition; } /** - * @param taskDef Task definition + * @param taskDefinition Task definition */ - public void setTaskDef(TaskDef taskDef) { - this.taskDef = taskDef; + public void setTaskDefinition(TaskDef taskDefinition) { + this.taskDefinition = taskDefinition; } /** diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index 632f757850..f58a5049df 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -378,19 +378,19 @@ public Task getTaskByRefName(String refName) { if (refName == null) { throw new RuntimeException("refName passed is null. Check the workflow execution. For dynamic tasks, make sure referenceTaskName is set to a not null value"); } - LinkedList found = new LinkedList(); - for (Task t : tasks) { - if (t.getReferenceTaskName() == null) { - throw new RuntimeException("Task " + t.getTaskDefName() + ", seq=" + t.getSeq() + " does not have reference name specified."); + List found = new LinkedList<>(); + for (Task task : tasks) { + if (task.getReferenceTaskName() == null) { + throw new RuntimeException("Task " + task.getTaskDefName() + ", seq=" + task.getSeq() + " does not have reference name specified."); } - if (t.getReferenceTaskName().equals(refName)) { - found.add(t); + if (task.getReferenceTaskName().equals(refName)) { + found.add(task); } } if (found.isEmpty()) { return null; } - return found.getLast(); + return found.get(found.size() - 1); } @Override diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index 3f45fdcc16..b03d37f290 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -37,13 +37,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedHashMap; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; +import java.util.*; import java.util.stream.Collectors; import javax.inject.Inject; @@ -132,9 +126,9 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa Map tasksToBeScheduled = new LinkedHashMap<>(); - preScheduledTasks.forEach(pst -> { - executedTaskRefNames.remove(pst.getReferenceTaskName()); - tasksToBeScheduled.put(pst.getReferenceTaskName(), pst); + preScheduledTasks.forEach(preScheduledTask -> { + executedTaskRefNames.remove(preScheduledTask.getReferenceTaskName()); + tasksToBeScheduled.put(preScheduledTask.getReferenceTaskName(), preScheduledTask); }); // A new workflow does not enter this code branch @@ -145,7 +139,10 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa executedTaskRefNames.remove(pendingTask.getReferenceTaskName()); } - TaskDef taskDefinition = metadataDAO.getTaskDef(pendingTask.getTaskDefName()); + String taskDefName = pendingTask.getTaskDefName(); + TaskDef taskDefinition = Optional.ofNullable(pendingTask.getWorkflowTask().getTaskDefinition()) + .orElse(metadataDAO.getTaskDef(taskDefName)); + if (taskDefinition != null) { checkForTimeout(taskDefinition, pendingTask); // If the task has not been updated for "responseTimeout" then mark task as TIMED_OUT @@ -213,7 +210,7 @@ private List startWorkflow(Workflow workflow) throws TerminateWorkflowExce throw new TerminateWorkflowException("No tasks found to be executed", WorkflowStatus.COMPLETED); } - WorkflowTask taskToSchedule = def.getTasks().getFirst(); //Nothing isSystemTask running yet - so schedule the first task + WorkflowTask taskToSchedule = def.getTasks().get(0); //Nothing isSystemTask running yet - so schedule the first task //Loop until a non-skipped task isSystemTask found while (isTaskSkipped(taskToSchedule, workflow)) { taskToSchedule = def.getNextTask(taskToSchedule.getTaskReferenceName()); @@ -244,19 +241,18 @@ private List startWorkflow(Workflow workflow) throws TerminateWorkflowExce } private void updateOutput(final Workflow workflow) { - final WorkflowDef def = workflow.getWorkflowDefinition(); + final WorkflowDef workflowDefinition = workflow.getWorkflowDefinition(); - List allTasks = workflow.getTasks(); - if (allTasks.isEmpty()) { + List tasks = workflow.getTasks(); + if (tasks.isEmpty()) { return; } - Task last; - last = allTasks.get(allTasks.size() - 1); - Map output = last.getOutputData(); + Task lastTask = tasks.get(tasks.size() - 1); + Map output = lastTask.getOutputData(); - if (!def.getOutputParameters().isEmpty()) { - output = parametersUtils.getTaskInput(def.getOutputParameters(), workflow, null, null); + if (!workflowDefinition.getOutputParameters().isEmpty()) { + output = parametersUtils.getTaskInput(workflowDefinition.getOutputParameters(), workflow, null, null); } workflow.setOutput(output); } @@ -270,8 +266,8 @@ private boolean checkForWorkflowCompletion(final Workflow workflow) throws Termi Map taskStatusMap = new HashMap<>(); workflow.getTasks().forEach(task -> taskStatusMap.put(task.getReferenceTaskName(), task.getStatus())); - LinkedList wftasks = workflow.getWorkflowDefinition().getTasks(); - boolean allCompletedSuccessfully = wftasks.stream().parallel().allMatch(wftask -> { + List workflowTasks = workflow.getWorkflowDefinition().getTasks(); + boolean allCompletedSuccessfully = workflowTasks.stream().parallel().allMatch(wftask -> { Status status = taskStatusMap.get(wftask.getTaskReferenceName()); return status != null && status.isSuccessful() && status.isTerminal(); }); @@ -475,16 +471,16 @@ public List getTasksToBeScheduled(Workflow workflowInstance, private boolean isTaskSkipped(WorkflowTask taskToSchedule, Workflow workflow) { try { - boolean retval = false; + boolean isTaskSkipped = false; if (taskToSchedule != null) { Task t = workflow.getTaskByRefName(taskToSchedule.getTaskReferenceName()); if (t == null) { - retval = false; + isTaskSkipped = false; } else if (t.getStatus().equals(SKIPPED)) { - retval = true; + isTaskSkipped = true; } } - return retval; + return isTaskSkipped; } catch (Exception e) { throw new TerminateWorkflowException(e.getMessage()); } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index b0770e3543..6062af0490 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -90,10 +90,11 @@ public class WorkflowExecutor { private ParametersUtils parametersUtils = new ParametersUtils(); - public static final String deciderQueue = "_deciderQueue"; - private int activeWorkerLastPollnSecs; + public static final String DECIDER_QUEUE = "_deciderQueue"; + + @Inject public WorkflowExecutor( DeciderService deciderService, @@ -249,17 +250,20 @@ public String startWorkflow( throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); } - // Obtain the missing task definitions: those that are not system tasks and also don't have embedded definitions - Set missingTaskDefinitions = workflowDefinition.all().stream() - .filter(workflowTask -> (workflowTask.isUserDefined() && workflowTask.getTaskDef() == null)) + // Obtain the names of the tasks with missing definitions: + // - Are not system tasks + // - Don't have embedded definitions + Set missingTaskDefinitionNames = workflowDefinition.all().stream() + .filter(workflowTask -> + (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null)) .map(workflowTask -> workflowTask.getName()) .filter(task -> metadataDAO.getTaskDef(task) == null) .collect(Collectors.toSet()); - if (!missingTaskDefinitions.isEmpty()) { - logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefinitions); + if (!missingTaskDefinitionNames.isEmpty()) { + logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefinitionNames); Monitors.recordWorkflowStartError(workflowDefinition.getName(), WorkflowContext.get().getClientApp()); - throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefinitions); + throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefinitionNames); } //A random UUID is assigned to the work flow instance String workflowId = IDGenerator.generate(); @@ -467,7 +471,7 @@ void completeWorkflow(Workflow wf) { decide(parent.getWorkflowId()); } Monitors.recordWorkflowCompletion(workflow.getWorkflowName(), workflow.getEndTime() - workflow.getStartTime(), wf.getOwnerApp()); - queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue + queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); //remove from the sweep queue logger.debug("Removed workflow {} from decider queue", wf.getWorkflowId()); } @@ -577,7 +581,7 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo } } - queueDAO.remove(deciderQueue, workflow.getWorkflowId()); //remove from the sweep queue + queueDAO.remove(DECIDER_QUEUE, workflow.getWorkflowId()); //remove from the sweep queue executionDAO.removeFromPendingWorkflow(workflow.getWorkflowName(), workflow.getWorkflowId()); // Send to atlas @@ -781,7 +785,7 @@ public boolean decide(String workflowId) { if (!outcome.tasksToBeUpdated.isEmpty() || !outcome.tasksToBeScheduled.isEmpty()) { executionDAO.updateTasks(tasksToBeUpdated); executionDAO.updateWorkflow(workflow); - queueDAO.push(deciderQueue, workflow.getWorkflowId(), config.getSweepFrequency()); + queueDAO.push(DECIDER_QUEUE, workflow.getWorkflowId(), config.getSweepFrequency()); } if (stateChanged) { diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowSweeper.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowSweeper.java index a0decc9141..f3236636e6 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowSweeper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowSweeper.java @@ -80,8 +80,8 @@ public void init(WorkflowExecutor workflowExecutor) { logger.info("Workflow sweep is disabled."); return; } - List workflowIds = queueDAO.pop(WorkflowExecutor.deciderQueue, 2 * executorThreadPoolSize, 2000); - int currentQueueSize = queueDAO.getSize(WorkflowExecutor.deciderQueue); + List workflowIds = queueDAO.pop(WorkflowExecutor.DECIDER_QUEUE, 2 * executorThreadPoolSize, 2000); + int currentQueueSize = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); logger.debug("Sweeper's current deciderqueue size: {}.", currentQueueSize); int retrievedWorkflows = (workflowIds != null) ? workflowIds.size() : 0; logger.debug("Sweeper retrieved {} workflows from the decider queue.", retrievedWorkflows); @@ -108,15 +108,15 @@ public void sweep(List workflowIds, WorkflowExecutor workflowExecutor) t } boolean done = workflowExecutor.decide(workflowId); if(!done) { - queueDAO.setUnackTimeout(WorkflowExecutor.deciderQueue, workflowId, config.getSweepFrequency() * 1000); + queueDAO.setUnackTimeout(WorkflowExecutor.DECIDER_QUEUE, workflowId, config.getSweepFrequency() * 1000); } else { - queueDAO.remove(WorkflowExecutor.deciderQueue, workflowId); + queueDAO.remove(WorkflowExecutor.DECIDER_QUEUE, workflowId); } } catch (ApplicationException e) { if(e.getCode().equals(Code.NOT_FOUND)) { logger.error("Workflow NOT found for id: " + workflowId, e); - queueDAO.remove(WorkflowExecutor.deciderQueue, workflowId); + queueDAO.remove(WorkflowExecutor.DECIDER_QUEUE, workflowId); } } catch (Exception e) { diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java index d43115d491..6027ac42ea 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java @@ -124,11 +124,15 @@ String getDynamicTaskName(Map taskInput, String taskNameParam) t */ @VisibleForTesting TaskDef getDynamicTaskDefinition(WorkflowTask taskToSchedule) throws TerminateWorkflowException { //TODO this is a common pattern in code base can be moved to DAO - return Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) - .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", - taskToSchedule.getName()); - return new TerminateWorkflowException(reason); - }); + TaskDef taskDefinition = taskToSchedule.getTaskDefinition(); + if (taskDefinition == null) { + taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) + .orElseThrow(() -> { + String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", + taskToSchedule.getName()); + return new TerminateWorkflowException(reason); + }); + } + return taskDefinition; } } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java index a6f71339d5..51e9aee7fb 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java @@ -69,11 +69,14 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter int retryCount = taskMapperContext.getRetryCount(); String retriedTaskId = taskMapperContext.getRetryTaskId(); - TaskDef taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) - .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); - return new TerminateWorkflowException(reason); - }); + TaskDef taskDefinition = taskToSchedule.getTaskDefinition(); + if (taskDefinition == null) { + taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) + .orElseThrow(() -> { + String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); + return new TerminateWorkflowException(reason); + }); + } Map input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, taskDefinition, taskMapperContext.getTaskId()); Task simpleTask = new Task(); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java index 7518ddad58..ebda28ef6b 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java @@ -66,11 +66,14 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); - TaskDef taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) - .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); - return new TerminateWorkflowException(reason); - }); + TaskDef taskDefinition = taskToSchedule.getTaskDefinition(); + if (taskDefinition == null) { + taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) + .orElseThrow(() -> { + String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); + return new TerminateWorkflowException(reason); + }); + } Map input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition); diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java index 88aee0f664..bfed855653 100644 --- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java +++ b/core/src/main/java/com/netflix/conductor/service/MetadataService.java @@ -159,28 +159,6 @@ public void registerWorkflowDef(WorkflowDef def) { metadata.create(def); } - /** - * Creates a workflow based on a definition considering stored and ephemeral definitions - * Improvement: Add transactional support - * - * @param def Ephemeral workflow definition - */ - public void registerEphemeralWorkflowDef(WorkflowDef def) { - - this.registerWorkflowDef(def); - - List ephemeralTaskDefinitions = def.getTasks().stream() - .filter(workflowTask -> (workflowTask.isUserDefined() && workflowTask.getTaskDef() != null)) - .map(workflowTask -> workflowTask.getTaskDef()) - .collect(Collectors.toList()); - - // TODO: add ability to batch read/write tasks on behalf of performance increase - for (TaskDef ephemeralTaskDefinition : ephemeralTaskDefinitions) { - metadata.createTaskDef(ephemeralTaskDefinition); - } - - } - /** * * @param eventHandler Event handler to be added. diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 474e857fba..3cabbe1f13 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -44,7 +44,6 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; -import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -829,7 +828,7 @@ public WorkflowDef fromProto(WorkflowDefPb.WorkflowDef from) { to.setName( from.getName() ); to.setDescription( from.getDescription() ); to.setVersion( from.getVersion() ); - to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(LinkedList::new)) ); + to.setTasks( from.getTasksList().stream().map(this::fromProto).collect(Collectors.toCollection(ArrayList::new)) ); to.setInputParameters( from.getInputParametersList().stream().collect(Collectors.toCollection(ArrayList::new)) ); Map outputParametersMap = new HashMap(); for (Map.Entry pair : from.getOutputParametersMap().entrySet()) { @@ -892,8 +891,8 @@ public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { to.setSink( from.getSink() ); } to.setOptional( from.isOptional() ); - if (from.getTaskDef() != null) { - to.setTaskDef( toProto( from.getTaskDef() ) ); + if (from.getTaskDefinition() != null) { + to.setTaskDefinition( toProto( from.getTaskDefinition() ) ); } return to.build(); } @@ -928,8 +927,8 @@ public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { to.setJoinOn( from.getJoinOnList().stream().collect(Collectors.toCollection(ArrayList::new)) ); to.setSink( from.getSink() ); to.setOptional( from.getOptional() ); - if (from.hasTaskDef()) { - to.setTaskDef( fromProto( from.getTaskDef() ) ); + if (from.hasTaskDefinition()) { + to.setTaskDefinition( fromProto( from.getTaskDefinition() ) ); } return to; } diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto index 2ca2eaf891..22c2c69874 100644 --- a/grpc/src/main/proto/model/workflowtask.proto +++ b/grpc/src/main/proto/model/workflowtask.proto @@ -43,5 +43,5 @@ message WorkflowTask { repeated string join_on = 16; string sink = 17; bool optional = 18; - TaskDef task_def = 19; + TaskDef task_definition = 19; } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java index b1cdbb5765..3a8214027b 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/AdminResource.java @@ -121,7 +121,7 @@ public List view(@PathParam("tasktype") String taskType, @DefaultValue("0 @Consumes({ MediaType.WILDCARD }) @Produces({ MediaType.TEXT_PLAIN }) public String requeueSweep(@PathParam("workflowId") String workflowId) throws Exception { - boolean pushed = queue.pushIfNotExists(WorkflowExecutor.deciderQueue, workflowId, config.getSweepFrequency()); + boolean pushed = queue.pushIfNotExists(WorkflowExecutor.DECIDER_QUEUE, workflowId, config.getSweepFrequency()); return pushed + "." + workflowId; } diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java index b799ac207e..0132795c84 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java @@ -86,9 +86,7 @@ public String startWorkflow(StartWorkflowRequest request) { if (Strings.isNullOrEmpty(request.getName())) { throw new ApplicationException(Code.INVALID_INPUT, "A name is required to start a workflow."); } - if (request.getWorkflowDef() != null) { - metadata.registerEphemeralWorkflowDef(request.getWorkflowDef()); - } + return executor.startWorkflow( request.getName(), request.getVersion(), @@ -105,21 +103,17 @@ public String startWorkflow(StartWorkflowRequest request) { @Produces({MediaType.TEXT_PLAIN}) @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking.") public String startWorkflow(@PathParam("name") String name, StartWorkflowRequest request) { - String workflowName = request.getName(); - if (Strings.isNullOrEmpty(name) || (!Strings.isNullOrEmpty(workflowName) && !name.equals(workflowName))) { - throw new ApplicationException( - Code.INVALID_INPUT, - "Cannot run workflow with name inconsistencies. " + - "Make sure the name on the url and the name on the payload match." - ); - } - - if (request.getWorkflowDef() != null) { - metadata.registerEphemeralWorkflowDef(request.getWorkflowDef()); + if (request == null) { + throw new ApplicationException(Code.INVALID_INPUT, "Payload for starting a new workflow is needed"); } - - return executor.startWorkflow(name, request.getVersion(), request.getCorrelationId(), - request.getInput(), null, request.getTaskToDomain(), request.getWorkflowDef()); + return executor.startWorkflow( + name, + request.getVersion(), + request.getCorrelationId(), + request.getInput(), + null, + request.getTaskToDomain(), + request.getWorkflowDef()); } @GET diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java index 12a73553df..b80f622c7d 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java @@ -2,11 +2,7 @@ import java.sql.Connection; import java.text.SimpleDateFormat; -import java.util.ArrayList; -import java.util.Comparator; -import java.util.Date; -import java.util.LinkedList; -import java.util.List; +import java.util.*; import java.util.stream.Collectors; import javax.inject.Inject; @@ -35,13 +31,13 @@ public class MySQLExecutionDAO extends MySQLBaseDAO implements ExecutionDAO { private IndexDAO indexer; - private MetadataDAO metadata; + private MetadataDAO metadataDAO; @Inject - public MySQLExecutionDAO(IndexDAO indexer, MetadataDAO metadata, ObjectMapper om, DataSource dataSource) { + public MySQLExecutionDAO(IndexDAO indexer, MetadataDAO metadataDAO, ObjectMapper om, DataSource dataSource) { super(om, dataSource); this.indexer = indexer; - this.metadata = metadata; + this.metadataDAO = metadataDAO; } private static String dateStr(Long timeInMs) { @@ -130,7 +126,9 @@ public void updateTask(Task task) { @Override public boolean exceedsInProgressLimit(Task task) { - TaskDef taskDef = metadata.getTaskDef(task.getTaskDefName()); + TaskDef taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) + .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); + if (taskDef == null) { return false; } @@ -502,7 +500,8 @@ private void updateTask(Connection connection, Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = metadata.getTaskDef(task.getTaskDefName()); + TaskDef taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) + .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); if (taskDef != null && taskDef.concurrencyLimit() > 0) { boolean inProgress = task.getStatus() != null && task.getStatus().equals(Task.Status.IN_PROGRESS); diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index be78b548a1..795d0a3580 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -146,7 +146,7 @@ public List getTasks(String taskDefName, String startKey, int count) { @Override public List createTasks(List tasks) { - List created = new LinkedList(); + List tasksCreated = new LinkedList<>(); for (Task task : tasks) { validate(task); @@ -172,10 +172,10 @@ public List createTasks(List tasks) { inProgressTaskKey, task.getWorkflowInstanceId(), task.getTaskId(), task.getTaskType()); updateTask(task); - created.add(task); + tasksCreated.add(task); } - return created; + return tasksCreated; } @@ -194,7 +194,8 @@ public void updateTask(Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = metadataDA0.getTaskDef(task.getTaskDefName()); + TaskDef taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) + .orElse(metadataDA0.getTaskDef(task.getTaskDefName())); if(taskDef != null && taskDef.concurrencyLimit() > 0) { @@ -329,7 +330,7 @@ public List getTasks(List taskIds) { recordRedisDaoPayloadSize("getTask", jsonString.length(), task.getTaskType(), task.getWorkflowType()); return task; }) - .collect(Collectors.toCollection(LinkedList::new)); + .collect(Collectors.toList()); } @Override diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java index 9aff2fe619..b54010989f 100644 --- a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java @@ -103,9 +103,7 @@ private static void createKitchenSink(int port) throws Exception { Client client = Client.create(); ObjectMapper objectMapper = new ObjectMapper(); - /* - * Kitchensink example (stored workflow with stored tasks) - */ + List taskDefs = new LinkedList<>(); for (int i = 0; i < 40; i++) { taskDefs.add(new TaskDef("task_" + i, "task_" + i, 1, 0)); @@ -114,6 +112,9 @@ private static void createKitchenSink(int port) throws Exception { client.resource("http://localhost:" + port + "/api/metadata/taskdefs").type(MediaType.APPLICATION_JSON).post(objectMapper.writeValueAsString(taskDefs)); + /* + * Kitchensink example (stored workflow with stored tasks) + */ InputStream stream = Main.class.getResourceAsStream("/kitchensink.json"); client.resource("http://localhost:" + port + "/api/metadata/workflow").type(MediaType.APPLICATION_JSON).post(stream); @@ -134,7 +135,6 @@ private static void createKitchenSink(int port) throws Exception { client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSinkStoredTasks").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); logger.info("Ephemeral Kitchen sink workflow with stored tasks is created!"); - /* * Kitchensink example with ephemeral workflow and ephemeral tasks */ diff --git a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json index 8dc1ff3191..76051ed624 100644 --- a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json +++ b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json @@ -16,7 +16,7 @@ } }, "type": "SIMPLE", - "taskDef": { + "taskDefinition": { "ownerApp": null, "createTime": null, "updateTime": null, @@ -73,7 +73,7 @@ "oddEven": "${task_2.output.oddEven}" }, "type": "SIMPLE", - "taskDef": { + "taskDefinition": { "ownerApp": null, "createTime": null, "updateTime": null, @@ -121,7 +121,7 @@ "name": "task_100010", "taskReferenceName": "task_100010", "type": "SIMPLE", - "taskDef": { + "taskDefinition": { "ownerApp": null, "createTime": null, "updateTime": null, @@ -160,7 +160,7 @@ "name": "task_100011", "taskReferenceName": "task_100011", "type": "SIMPLE", - "taskDef": { + "taskDefinition": { "ownerApp": null, "createTime": null, "updateTime": null, @@ -227,7 +227,7 @@ "workflowIds": "${get_es_1.output..workflowId}" }, "type": "SIMPLE", - "taskDef": { + "taskDefinition": { "ownerApp": null, "createTime": null, "updateTime": null, diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index f7819bca2a..563e5bd266 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -2931,7 +2931,7 @@ public void testTimeout() throws Exception { assertNotNull(wfid); //Ensure that we have a workflow queued up for evaluation here... - long size = queueDAO.getSize(WorkflowExecutor.deciderQueue); + long size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); assertEquals(1, size); // If we get the full workflow here then, last task should be completed and the next task should be scheduled @@ -2947,7 +2947,7 @@ public void testTimeout() throws Exception { //Ensure that we have a workflow queued up for evaluation here... - size = queueDAO.getSize(WorkflowExecutor.deciderQueue); + size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); assertEquals(1, size); @@ -2978,7 +2978,7 @@ public void testTimeout() throws Exception { assertEquals(Status.TIMED_OUT, es.getTasks().get(1).getStatus()); assertEquals(WorkflowStatus.TIMED_OUT, es.getStatus()); - assertEquals(1, queueDAO.getSize(WorkflowExecutor.deciderQueue)); + assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE)); taskDef.setTimeoutSeconds(0); taskDef.setRetryCount(RETRY_COUNT); From be4360b2272203f313283c2dbd095628ca89bbb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 30 Jul 2018 09:25:03 -0700 Subject: [PATCH 108/163] Updated way of retrieving elastic search instance type configuration --- .../netflix/conductor/core/config/Configuration.java | 1 - .../elasticsearch/ElasticSearchConfiguration.java | 10 +++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index ffb789efa1..920aeef36c 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -166,5 +166,4 @@ default List getAdditionalModules() { enum DB { REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL } - } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index 7f1866edf6..bf14b9c294 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -1,5 +1,6 @@ package com.netflix.conductor.elasticsearch; +import com.google.common.base.Strings; import com.netflix.conductor.core.config.Configuration; import java.net.URI; @@ -81,9 +82,12 @@ default String getEmbeddedSettingsFile() { } default ElasticSearchInstanceType getElasticSearchInstanceType() { - return ElasticSearchInstanceType.valueOf( - getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE.name()).toUpperCase() - ); + ElasticSearchInstanceType elasticSearchInstanceType = ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE; + String instanceTypeConfig = getProperty(ELASTICSEARCH_PROPERTY_NAME, ""); + if (!Strings.isNullOrEmpty(instanceTypeConfig)) { + elasticSearchInstanceType = ElasticSearchInstanceType.valueOf(instanceTypeConfig.toUpperCase()); + } + return elasticSearchInstanceType; } enum ElasticSearchInstanceType { From f3aab259ee4a6191718fd1fe92be9ce0deb8de57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 30 Jul 2018 15:56:40 -0700 Subject: [PATCH 109/163] Updated WorkflowResource to point to the right definition of workflow executor - On workflow execution creation, the resource checks for embedded workflow definition before calling the workflow executor --- .../core/execution/WorkflowExecutor.java | 34 +++++++++---- .../core/execution/tasks/SubWorkflow.java | 2 +- .../server/service/WorkflowServiceImpl.java | 20 ++++++-- .../server/resources/WorkflowResource.java | 50 ++++++++++--------- .../conductor/jetty/server/JettyServer.java | 4 +- ...k-ephemeralWorkflowWithEphemeralTasks.json | 1 + ...Sink-ephemeralWorkflowWithStoredTasks.json | 3 +- 7 files changed, 74 insertions(+), 40 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 6062af0490..dd856ce25d 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -142,8 +142,7 @@ public String startWorkflow( String correlationId, Map input, String event, - Map taskToDomain, - WorkflowDef workflowDef + Map taskToDomain ) { return startWorkflow( name, @@ -153,8 +152,7 @@ public String startWorkflow( null, null, event, - taskToDomain, - workflowDef + taskToDomain ); } @@ -178,11 +176,31 @@ public String startWorkflow( parentWorkflowId, parentWorkflowTaskId, event, - null, null ); } + /** + * @throws ApplicationException + */ + public String startWorkflow( + WorkflowDef workflowDefinition, + Map workflowInput, + String correlationId, + String event, + Map taskToDomain + ) { + return startWorkflow( + workflowDefinition, + workflowInput, + correlationId, + null, + null, + event, + taskToDomain + ); + } + /** * @throws ApplicationException */ @@ -194,13 +212,11 @@ public String startWorkflow( String parentWorkflowId, String parentWorkflowTaskId, String event, - Map taskToDomain, - WorkflowDef workflowDef + Map taskToDomain ) { Optional potentialDef = - workflowDef != null ? Optional.of(workflowDef) : - version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); + version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); //Check if the workflow definition is valid WorkflowDef workflowDefinition = potentialDef diff --git a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java index a26be43656..79afced249 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/tasks/SubWorkflow.java @@ -59,7 +59,7 @@ public void start(Workflow workflow, Task task, WorkflowExecutor provider) throw try { - String subWorkflowId = provider.startWorkflow(name, version, wfInput, correlationId, workflow.getWorkflowId(), task.getTaskId(), null, workflow.getTaskToDomain(), null); + String subWorkflowId = provider.startWorkflow(name, version, wfInput, correlationId, workflow.getWorkflowId(), task.getTaskId(), null, workflow.getTaskToDomain()); task.getOutputData().put("subWorkflowId", subWorkflowId); task.getInputData().put("subWorkflowId", subWorkflowId); task.setStatus(Status.IN_PROGRESS); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index 607078eafa..64f6413ac5 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -54,9 +54,23 @@ public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, final StartWorkflowRequest request = PROTO_MAPPER.fromProto(pbRequest); try { - String id = executor.startWorkflow( - request.getName(), GRPC_HELPER.optional(request.getVersion()), request.getCorrelationId(), - request.getInput(), null, request.getTaskToDomain(), request.getWorkflowDef()); + String id; + if (request.getWorkflowDef() == null) { + id = executor.startWorkflow( + request.getName(), + GRPC_HELPER.optional(request.getVersion()), + request.getCorrelationId(), + request.getInput(), + null, + request.getTaskToDomain()); + } else { + id = executor.startWorkflow( + request.getWorkflowDef(), + request.getInput(), + request.getCorrelationId(), + null, + request.getTaskToDomain()); + } response.onNext(WorkflowServicePb.StartWorkflowResponse.newBuilder() .setWorkflowId(id) .build() diff --git a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java index 0132795c84..cade607a87 100644 --- a/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java +++ b/jersey/src/main/java/com/netflix/conductor/server/resources/WorkflowResource.java @@ -19,6 +19,7 @@ import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; @@ -84,36 +85,39 @@ public WorkflowResource(WorkflowExecutor executor, ExecutionService service, Met @ApiOperation("Start a new workflow with StartWorkflowRequest, which allows task to be executed in a domain") public String startWorkflow(StartWorkflowRequest request) { if (Strings.isNullOrEmpty(request.getName())) { - throw new ApplicationException(Code.INVALID_INPUT, "A name is required to start a workflow."); + throw new ApplicationException(Code.INVALID_INPUT, "A name is required to start a workflow."); + } + + WorkflowDef workflowDefinition = request.getWorkflowDef(); + if (workflowDefinition == null) { + return executor.startWorkflow( + request.getName(), + request.getVersion(), + request.getCorrelationId(), + request.getInput(), + null, + request.getTaskToDomain() + ); + } else { + return executor.startWorkflow( + request.getWorkflowDef(), + request.getInput(), + request.getCorrelationId(), + null, + request.getTaskToDomain() + ); } - return executor.startWorkflow( - request.getName(), - request.getVersion(), - request.getCorrelationId(), - request.getInput(), - null, - request.getTaskToDomain(), - request.getWorkflowDef() - ); } @POST @Path("/{name}") @Produces({MediaType.TEXT_PLAIN}) - @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking.") - public String startWorkflow(@PathParam("name") String name, StartWorkflowRequest request) { - if (request == null) { - throw new ApplicationException(Code.INVALID_INPUT, "Payload for starting a new workflow is needed"); - } - return executor.startWorkflow( - name, - request.getVersion(), - request.getCorrelationId(), - request.getInput(), - null, - request.getTaskToDomain(), - request.getWorkflowDef()); + @ApiOperation("Start a new workflow. Returns the ID of the workflow instance that can be later used for tracking") + public String startWorkflow( + @PathParam("name") String name, @QueryParam("version") Integer version, + @QueryParam("correlationId") String correlationId, Map input) { + return executor.startWorkflow(name, version, correlationId, input, null); } @GET diff --git a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java index b54010989f..2f026e3611 100644 --- a/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java +++ b/server/src/main/java/com/netflix/conductor/jetty/server/JettyServer.java @@ -132,14 +132,14 @@ private static void createKitchenSink(int port) throws Exception { * Kitchensink example with ephemeral workflow and stored tasks */ InputStream ephemeralInputStream = Main.class.getResourceAsStream("/kitchenSink-ephemeralWorkflowWithStoredTasks.json"); - client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSinkStoredTasks").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); + client.resource("http://localhost:" + port + "/api/workflow/").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); logger.info("Ephemeral Kitchen sink workflow with stored tasks is created!"); /* * Kitchensink example with ephemeral workflow and ephemeral tasks */ ephemeralInputStream = Main.class.getResourceAsStream("/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json"); - client.resource("http://localhost:" + port + "/api/workflow/ephemeralKitchenSinkEphemeralTasks").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); + client.resource("http://localhost:" + port + "/api/workflow/").type(MediaType.APPLICATION_JSON).post(ephemeralInputStream); logger.info("Ephemeral Kitchen sink workflow with ephemeral tasks is created!"); } diff --git a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json index 76051ed624..6901a577e9 100644 --- a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json +++ b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithEphemeralTasks.json @@ -1,4 +1,5 @@ { + "name": "kitchenSink-ephemeralWorkflowWithEphemeralTasks", "workflowDef": { "name": "ephemeralKitchenSinkEphemeralTasks", "description": "Kitchensink ephemeral workflow with ephemeral tasks", diff --git a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json index 4fc9c81161..d47081d519 100644 --- a/server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json +++ b/server/src/main/resources/kitchenSink-ephemeralWorkflowWithStoredTasks.json @@ -1,5 +1,5 @@ { - + "name": "kitchenSink-ephemeralWorkflowWithStoredTasks", "workflowDef": { "name": "ephemeralKitchenSinkStoredTasks", "description": "kitchensink workflow definition", @@ -160,7 +160,6 @@ }, "schemaVersion": 2 }, - "input": { "task2Name": "task_5" } From ebed6ffb3edecbf24ed31ebbf8141668c91bf60c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 30 Jul 2018 17:53:57 -0700 Subject: [PATCH 110/163] Updated tests related to ephemeral workflows and tasks use cases --- .../conductor/common/metadata/tasks/Task.java | 11 +++++++++++ .../conductor/contribs/http/TestHttpTask.java | 12 ++++++------ .../conductor/core/execution/DeciderService.java | 2 +- .../conductor/core/execution/TestDeciderService.java | 2 +- .../conductor/dao/mysql/MySQLExecutionDAO.java | 6 +++--- .../conductor/dao/dynomite/RedisExecutionDAO.java | 9 +++++++-- 6 files changed, 29 insertions(+), 13 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index 1fddb0a807..2c30236608 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -593,6 +593,17 @@ public void setOutputMessage(Any outputMessage) { this.outputMessage = outputMessage; } + /** + * @return the task definition associated to the running task if available + */ + public TaskDef getTaskDefinition() { + TaskDef taskDefinition = null; + if (this.getWorkflowTask() != null) { + taskDefinition = this.getWorkflowTask().getTaskDefinition(); + } + return taskDefinition; + } + public Task copy() { Task copy = new Task(); copy.setCallbackAfterSeconds(callbackAfterSeconds); diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index 236e402ac1..b79bb42b58 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -283,14 +283,14 @@ public void testOptional() throws Exception { assertEquals(HttpTask.MISSING_REQUEST, task.getReasonForIncompletion()); assertTrue(!task.getStatus().isSuccessful()); - WorkflowTask wft = new WorkflowTask(); - wft.setOptional(true); - wft.setName("HTTP"); - wft.setWorkflowTaskType(Type.USER_DEFINED); - wft.setTaskReferenceName("t1"); + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setOptional(true); + workflowTask.setName("HTTP"); + workflowTask.setWorkflowTaskType(Type.USER_DEFINED); + workflowTask.setTaskReferenceName("t1"); WorkflowDef def = new WorkflowDef(); - def.getTasks().add(wft); + def.getTasks().add(workflowTask); Workflow workflow = new Workflow(); workflow.setWorkflowDefinition(def); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index b03d37f290..68169d6d18 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -140,7 +140,7 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa } String taskDefName = pendingTask.getTaskDefName(); - TaskDef taskDefinition = Optional.ofNullable(pendingTask.getWorkflowTask().getTaskDefinition()) + TaskDef taskDefinition = Optional.ofNullable(pendingTask.getTaskDefinition()) .orElse(metadataDAO.getTaskDef(taskDefName)); if (taskDefinition != null) { diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index 90ac876bcd..17f8bc626b 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -628,7 +628,7 @@ public void testDecideSuccessfulWorkflow() throws Exception { DeciderOutcome deciderOutcome = deciderService.decide(workflow); assertNotNull(deciderOutcome); - assertTrue(workflow.getTaskByRefName("s1").isExecuted()); +System.out.println(workflow); assertFalse(workflow.getTaskByRefName("s1").isRetried()); assertEquals(1, deciderOutcome.tasksToBeUpdated.size()); assertEquals("s1", deciderOutcome.tasksToBeUpdated.get(0).getReferenceTaskName()); diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java index b80f622c7d..46f5fbb6aa 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java @@ -126,8 +126,8 @@ public void updateTask(Task task) { @Override public boolean exceedsInProgressLimit(Task task) { - TaskDef taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) - .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); + TaskDef taskDef = Optional.ofNullable(task.getTaskDefinition()) + .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); if (taskDef == null) { return false; @@ -500,7 +500,7 @@ private void updateTask(Connection connection, Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) + TaskDef taskDef = Optional.ofNullable(task.getTaskDefinition()) .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); if (taskDef != null && taskDef.concurrencyLimit() > 0) { diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index 795d0a3580..fd701c4d94 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -194,8 +194,13 @@ public void updateTask(Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) - .orElse(metadataDA0.getTaskDef(task.getTaskDefName())); + TaskDef taskDef; + if (task.getWorkflowTask() != null) { + taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) + .orElse(metadataDA0.getTaskDef(task.getTaskDefName())); + } else { + taskDef = metadataDA0.getTaskDef(task.getTaskDefName()); + } if(taskDef != null && taskDef.concurrencyLimit() > 0) { From 904c2b02e7e6efa374869054886fb36ee17e479c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 30 Jul 2018 20:00:59 -0700 Subject: [PATCH 111/163] Added integration tests for ephemeral workflows and tasks - Integration test for an ephemeral workflow with stored tasks - Integration test for an ephemeral workflow with ephemeral tasks --- .../tests/integration/End2EndTests.java | 176 ++++++++++++++---- 1 file changed, 135 insertions(+), 41 deletions(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 58c510c273..30a6d96b38 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -27,6 +27,7 @@ import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; @@ -47,6 +48,7 @@ import java.util.HashMap; import java.util.LinkedList; import java.util.List; +import java.util.Optional; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -57,10 +59,13 @@ * */ public class End2EndTests { - private static TaskClient tc; - private static WorkflowClient wc; + private static TaskClient taskClient; + private static WorkflowClient workflowClient; private static EmbeddedElasticSearch search; + private static final int SERVER_PORT = 8080; + private static final String TASK_DEFINITION_PREFIX = "task_"; + @BeforeClass public static void setup() throws Exception { TestEnvironment.setup(); @@ -73,14 +78,14 @@ public static void setup() throws Exception { search = serverInjector.getInstance(EmbeddedElasticSearchProvider.class).get().get(); search.start(); - JettyServer server = new JettyServer(8080, false); + JettyServer server = new JettyServer(SERVER_PORT, false); server.start(); - tc = new TaskClient(); - tc.setRootURI("http://localhost:8080/api/"); + taskClient = new TaskClient(); + taskClient.setRootURI("http://localhost:8080/api/"); - wc = new WorkflowClient(); - wc.setRootURI("http://localhost:8080/api/"); + workflowClient = new WorkflowClient(); + workflowClient.setRootURI("http://localhost:8080/api/"); } @AfterClass @@ -91,17 +96,11 @@ public static void teardown() throws Exception { @Test public void testAll() throws Exception { - assertNotNull(tc); - List defs = new LinkedList<>(); - for (int i = 0; i < 5; i++) { - TaskDef def = new TaskDef("t" + i, "task " + i); - def.setTimeoutPolicy(TimeoutPolicy.RETRY); - defs.add(def); - } - tc.registerTaskDefs(defs); - List found = tc.getTaskDef(); + List definitions = createAndRegisterTaskDefinitions("t", 5); + + List found = taskClient.getTaskDef(); assertNotNull(found); - assertEquals(defs.size(), found.size()); + assertEquals(definitions.size(), found.size()); WorkflowDef def = new WorkflowDef(); def.setName("test"); @@ -119,56 +118,56 @@ public void testAll() throws Exception { def.getTasks().add(t0); def.getTasks().add(t1); - wc.registerWorkflow(def); - WorkflowDef foundd = wc.getWorkflowDef(def.getName(), null); - assertNotNull(foundd); - assertEquals(def.getName(), foundd.getName()); - assertEquals(def.getVersion(), foundd.getVersion()); + workflowClient.registerWorkflow(def); + WorkflowDef workflowDefinitionFromSystem = workflowClient.getWorkflowDef(def.getName(), null); + assertNotNull(workflowDefinitionFromSystem); + assertEquals(def.getName(), workflowDefinitionFromSystem.getName()); + assertEquals(def.getVersion(), workflowDefinitionFromSystem.getVersion()); String correlationId = "test_corr_id"; - String workflowId = wc.startWorkflow(def.getName(), null, correlationId, new HashMap<>()); + String workflowId = workflowClient.startWorkflow(def.getName(), null, correlationId, new HashMap<>()); assertNotNull(workflowId); System.out.println(workflowId); - Workflow wf = wc.getWorkflow(workflowId, false); + Workflow wf = workflowClient.getWorkflow(workflowId, false); assertEquals(0, wf.getTasks().size()); assertEquals(workflowId, wf.getWorkflowId()); - wf = wc.getWorkflow(workflowId, true); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(1, wf.getTasks().size()); assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); assertEquals(workflowId, wf.getWorkflowId()); - List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); + List runningIds = workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); assertNotNull(runningIds); assertEquals(1, runningIds.size()); assertEquals(workflowId, runningIds.get(0)); - List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); + List polled = taskClient.batchPollTasksByTaskType("non existing task", "test", 1, 100); assertNotNull(polled); assertEquals(0, polled.size()); - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertEquals(1, polled.size()); assertEquals(t0.getName(), polled.get(0).getTaskDefName()); Task task = polled.get(0); - Boolean acked = tc.ack(task.getTaskId(), "test"); + Boolean acked = taskClient.ack(task.getTaskId(), "test"); assertNotNull(acked); assertTrue(acked.booleanValue()); task.getOutputData().put("key1", "value1"); task.setStatus(Status.COMPLETED); - tc.updateTask(new TaskResult(task), task.getTaskType()); + taskClient.updateTask(new TaskResult(task), task.getTaskType()); - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertTrue(polled.toString(), polled.isEmpty()); - wf = wc.getWorkflow(workflowId, true); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(2, wf.getTasks().size()); @@ -177,42 +176,137 @@ public void testAll() throws Exception { assertEquals(Task.Status.COMPLETED, wf.getTasks().get(0).getStatus()); assertEquals(Task.Status.SCHEDULED, wf.getTasks().get(1).getStatus()); - Task taskById = tc.getTaskDetails(task.getTaskId()); + Task taskById = taskClient.getTaskDetails(task.getTaskId()); assertNotNull(taskById); assertEquals(task.getTaskId(), taskById.getTaskId()); - List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); + List getTasks = taskClient.getPendingTasksByType(t0.getName(), null, 1); assertNotNull(getTasks); assertEquals(0, getTasks.size()); //getTasks only gives pending tasks - getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); + getTasks = taskClient.getPendingTasksByType(t1.getName(), null, 1); assertNotNull(getTasks); assertEquals(1, getTasks.size()); - Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); + Task pending = taskClient.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); assertNotNull(pending); assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); assertEquals(workflowId, pending.getWorkflowInstanceId()); Thread.sleep(1000); - SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); + SearchResult searchResult = workflowClient.search("workflowType='" + def.getName() + "'"); assertNotNull(searchResult); assertEquals(1, searchResult.getTotalHits()); - wc.terminateWorkflow(workflowId, "terminate reason"); - wf = wc.getWorkflow(workflowId, true); + workflowClient.terminateWorkflow(workflowId, "terminate reason"); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); - wc.restart(workflowId); - wf = wc.getWorkflow(workflowId, true); + workflowClient.restart(workflowId); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(1, wf.getTasks().size()); + } + + @Test + public void testEphemeralWorkflowsWithStoredTasks() throws Exception { + List definitions = createAndRegisterTaskDefinitions("storedTaskDef", 5); + + List found = taskClient.getTaskDef(); + assertNotNull(found); + assertTrue(definitions.size() > 0); + + WorkflowDef workflowDefinition = new WorkflowDef(); + workflowDefinition.setName("testEphemeralWorkflow"); + + WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); + WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflow"; + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + String workflowId = workflowClient.startWorkflow(workflowRequest); + assertNotNull(workflowId); + + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + } + + @Test + public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { + WorkflowDef workflowDefinition = new WorkflowDef(); + workflowDefinition.setName("testEphemeralWorkflowWithEphemeralTasks"); + + WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); + TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); + workflowTask1.setTaskDefinition(taskDefinition1); + + WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); + TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); + workflowTask2.setTaskDefinition(taskDefinition2); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + String workflowId = workflowClient.startWorkflow(workflowRequest); + assertNotNull(workflowId); + + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + + List ephemeralTasks = ephemeralWorkflow.getTasks(); + assertEquals(2, ephemeralTasks.size()); + for (WorkflowTask ephemeralTask : ephemeralTasks) { + assertNotNull(ephemeralTask.getTaskDefinition()); + } + + } + + private WorkflowTask createWorkflowTask(String name) { + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName(name); + workflowTask.setWorkflowTaskType(Type.SIMPLE); + workflowTask.setTaskReferenceName(name); + return workflowTask; + } + private TaskDef createTaskDefinition(String name) { + TaskDef taskDefinition = new TaskDef(); + taskDefinition.setName(name); + return taskDefinition; + } + + // Helper method for creating task definitions on the server + private List createAndRegisterTaskDefinitions(String prefixTaskDefinition, int numberOfTaskDefinitions) { + assertNotNull(taskClient); + String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); + List definitions = new LinkedList<>(); + for (int i = 0; i < numberOfTaskDefinitions; i++) { + TaskDef def = new TaskDef(prefix + i, "task " + i + "description"); + def.setTimeoutPolicy(TimeoutPolicy.RETRY); + definitions.add(def); + } + taskClient.registerTaskDefs(definitions); + return definitions; } } From a5f2aaac3f97d56d4fe8c6df33fb80c6eb06ef5c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 25 Jul 2018 18:01:20 -0700 Subject: [PATCH 112/163] Decoupling elastic search instance configuration from db namespace - Added property 'elasticsearch' so we are able to run an external or inmemory instance regardless of the option chosen for db --- .../conductor/core/config/Configuration.java | 15 +++++++++++++++ docker/server/config/config-local.properties | 8 ++++++++ docker/server/config/config-mysql-grpc.properties | 8 ++++++++ docker/server/config/config-mysql.properties | 8 ++++++++ docker/server/config/config.properties | 8 ++++++++ .../es5/EmbeddedElasticSearchV5Provider.java | 2 +- 6 files changed, 48 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index 920aeef36c..42138741f3 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -27,6 +27,9 @@ public interface Configuration { String DB_PROPERTY_NAME = "db"; String DB_DEFAULT_VALUE = "memory"; + String ELASTICSEARCH_PROPERTY_NAME = "elasticsearch"; + String ELASTICSEARCH_DEFAULT_VALUE = "memory"; + String SWEEP_FREQUENCY_PROPERTY_NAME = "decider.sweep.frequency.seconds"; int SWEEP_FREQUENCY_DEFAULT_VALUE = 30; @@ -66,6 +69,14 @@ default String getDBString() { return getProperty(DB_PROPERTY_NAME, DB_DEFAULT_VALUE).toUpperCase(); } + default ELASTICSEARCH getElasticSearchType() { + return ELASTICSEARCH.valueOf(getElasticSearchString()); + } + + default String getElasticSearchString() { + return getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_DEFAULT_VALUE).toUpperCase(); + } + /** * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. */ @@ -166,4 +177,8 @@ default List getAdditionalModules() { enum DB { REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL } + + enum ELASTICSEARCH { + MEMORY, EXTERNAL + } } diff --git a/docker/server/config/config-local.properties b/docker/server/config/config-local.properties index b59ea05a26..640f9c6ff4 100755 --- a/docker/server/config/config-local.properties +++ b/docker/server/config/config-local.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=false db=memory +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c diff --git a/docker/server/config/config-mysql-grpc.properties b/docker/server/config/config-mysql-grpc.properties index 57de7df2d3..651901077c 100755 --- a/docker/server/config/config-mysql-grpc.properties +++ b/docker/server/config/config-mysql-grpc.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=true db=mysql +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties index 52f2eb4b3c..d7b574143b 100755 --- a/docker/server/config/config-mysql.properties +++ b/docker/server/config/config-mysql.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=false db=mysql +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config.properties b/docker/server/config/config.properties index c289e65964..23866501b9 100755 --- a/docker/server/config/config.properties +++ b/docker/server/config/config.properties @@ -11,6 +11,14 @@ conductor.grpc.server.enabled=false db=dynomite +# Elastic search instance. Possible values are memory and external. +# If not specified, the instance will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +elasticsearch=external + # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java index b0b0ac24eb..5ee496a43e 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java @@ -29,6 +29,6 @@ public Optional get() { } private boolean isEmbedded() { - return configuration.getDB().equals(Configuration.DB.MEMORY); + return configuration.getElasticSearchType().equals(Configuration.ELASTICSEARCH.MEMORY); } } From 77dc06c82c4eb8c73476ba649f726fca5e7bbd27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 27 Jul 2018 15:22:45 -0700 Subject: [PATCH 113/163] Moved ElasticSearch instance type property to ElasticSearchConfiguration --- .../conductor/core/config/Configuration.java | 14 -------------- docker/server/config/config-local.properties | 15 +++++++-------- docker/server/config/config-mysql-grpc.properties | 9 +++++---- docker/server/config/config-mysql.properties | 9 +++++---- docker/server/config/config.properties | 15 +++++++-------- .../elasticsearch/ElasticSearchConfiguration.java | 14 ++++++++++++++ .../es5/EmbeddedElasticSearchV5Provider.java | 6 ++---- 7 files changed, 40 insertions(+), 42 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index 42138741f3..ffb789efa1 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -27,9 +27,6 @@ public interface Configuration { String DB_PROPERTY_NAME = "db"; String DB_DEFAULT_VALUE = "memory"; - String ELASTICSEARCH_PROPERTY_NAME = "elasticsearch"; - String ELASTICSEARCH_DEFAULT_VALUE = "memory"; - String SWEEP_FREQUENCY_PROPERTY_NAME = "decider.sweep.frequency.seconds"; int SWEEP_FREQUENCY_DEFAULT_VALUE = 30; @@ -69,14 +66,6 @@ default String getDBString() { return getProperty(DB_PROPERTY_NAME, DB_DEFAULT_VALUE).toUpperCase(); } - default ELASTICSEARCH getElasticSearchType() { - return ELASTICSEARCH.valueOf(getElasticSearchString()); - } - - default String getElasticSearchString() { - return getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_DEFAULT_VALUE).toUpperCase(); - } - /** * @return time frequency in seconds, at which the workflow sweeper should run to evaluate running workflows. */ @@ -178,7 +167,4 @@ enum DB { REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL } - enum ELASTICSEARCH { - MEMORY, EXTERNAL - } } diff --git a/docker/server/config/config-local.properties b/docker/server/config/config-local.properties index 640f9c6ff4..b72d893d56 100755 --- a/docker/server/config/config-local.properties +++ b/docker/server/config/config-local.properties @@ -11,14 +11,6 @@ conductor.grpc.server.enabled=false db=memory -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory -# -# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. -# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when -# the server dies. Useful for more stable environments like staging or production. -elasticsearch=external - # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c @@ -37,6 +29,13 @@ queues.dynomite.threads=10 # For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. queues.dynomite.nonQuorum.port=22122 +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +workflow.elasticsearch.instanceType=external # Transport address to elasticsearch workflow.elasticsearch.url=localhost:9300 diff --git a/docker/server/config/config-mysql-grpc.properties b/docker/server/config/config-mysql-grpc.properties index 651901077c..e8aff4c808 100755 --- a/docker/server/config/config-mysql-grpc.properties +++ b/docker/server/config/config-mysql-grpc.properties @@ -11,15 +11,16 @@ conductor.grpc.server.enabled=true db=mysql -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory +jdbc.url=jdbc:mysql://mysql:3306/conductor + +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory # # memory: The instance is created in memory and lost when the server dies. Useful for development and testing. # external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when # the server dies. Useful for more stable environments like staging or production. -elasticsearch=external +workflow.elasticsearch.instanceType=external -jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config-mysql.properties b/docker/server/config/config-mysql.properties index d7b574143b..7c8a0f88b6 100755 --- a/docker/server/config/config-mysql.properties +++ b/docker/server/config/config-mysql.properties @@ -11,15 +11,16 @@ conductor.grpc.server.enabled=false db=mysql -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory +jdbc.url=jdbc:mysql://mysql:3306/conductor + +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory # # memory: The instance is created in memory and lost when the server dies. Useful for development and testing. # external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when # the server dies. Useful for more stable environments like staging or production. -elasticsearch=external +workflow.elasticsearch.instanceType=external -jdbc.url=jdbc:mysql://mysql:3306/conductor # Transport address to elasticsearch workflow.elasticsearch.url=elasticsearch:9300 diff --git a/docker/server/config/config.properties b/docker/server/config/config.properties index 23866501b9..d66187dd3f 100755 --- a/docker/server/config/config.properties +++ b/docker/server/config/config.properties @@ -11,14 +11,6 @@ conductor.grpc.server.enabled=false db=dynomite -# Elastic search instance. Possible values are memory and external. -# If not specified, the instance will be embedded in memory -# -# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. -# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when -# the server dies. Useful for more stable environments like staging or production. -elasticsearch=external - # Dynomite Cluster details. # format is host:port:rack separated by semicolon workflow.dynomite.cluster.hosts=dyno1:8102:us-east-1c @@ -40,6 +32,13 @@ queues.dynomite.threads=10 # For Dynomite, this is 22122 by default or the local redis-server port used by Dynomite. queues.dynomite.nonQuorum.port=22122 +# Elastic search instance type. Possible values are memory and external. +# If not specified, the instance type will be embedded in memory +# +# memory: The instance is created in memory and lost when the server dies. Useful for development and testing. +# external: Elastic search instance runs outside of the server. Data is persisted and does not get lost when +# the server dies. Useful for more stable environments like staging or production. +workflow.elasticsearch.instanceType=external # Transport address to elasticsearch workflow.elasticsearch.url=es:9300 diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index 87a373793b..908d0b06ac 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -9,6 +9,9 @@ public interface ElasticSearchConfiguration extends Configuration { + String ELASTICSEARCH_PROPERTY_NAME = "workflow.elasticsearch.instanceType"; + ElasticSearchInstanceType ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE = ElasticSearchInstanceType.MEMORY; + String ELASTIC_SEARCH_URL_PROPERTY_NAME = "workflow.elasticsearch.url"; String ELASTIC_SEARCH_URL_DEFAULT_VALUE = "localhost:9300"; @@ -75,4 +78,15 @@ default String getEmbeddedHost() { default String getEmbeddedSettingsFile() { return getProperty(EMBEDDED_SETTINGS_FILE_PROPERTY_NAME, EMBEDDED_SETTINGS_FILE_DEFAULT_VALUE); } + + default ElasticSearchInstanceType getElasticSearchInstanceType() { + return ElasticSearchInstanceType.valueOf( + getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE.name()).toUpperCase() + ); + } + + enum ElasticSearchInstanceType { + MEMORY, EXTERNAL + } + } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java index 5ee496a43e..19dabec1b8 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/es5/EmbeddedElasticSearchV5Provider.java @@ -1,13 +1,11 @@ package com.netflix.conductor.elasticsearch.es5; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearch; import com.netflix.conductor.elasticsearch.EmbeddedElasticSearchProvider; -import java.util.Optional; - import javax.inject.Inject; +import java.util.Optional; public class EmbeddedElasticSearchV5Provider implements EmbeddedElasticSearchProvider { private final ElasticSearchConfiguration configuration; @@ -29,6 +27,6 @@ public Optional get() { } private boolean isEmbedded() { - return configuration.getElasticSearchType().equals(Configuration.ELASTICSEARCH.MEMORY); + return configuration.getElasticSearchInstanceType().equals(ElasticSearchConfiguration.ElasticSearchInstanceType.MEMORY); } } From 0f50eefb8ff3128bbe05c7e8ec2f7cd61f4b933a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 30 Jul 2018 09:25:03 -0700 Subject: [PATCH 114/163] Updated way of retrieving elastic search instance type configuration --- .../netflix/conductor/core/config/Configuration.java | 1 - .../elasticsearch/ElasticSearchConfiguration.java | 10 +++++++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java index ffb789efa1..920aeef36c 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/Configuration.java +++ b/core/src/main/java/com/netflix/conductor/core/config/Configuration.java @@ -166,5 +166,4 @@ default List getAdditionalModules() { enum DB { REDIS, DYNOMITE, MEMORY, REDIS_CLUSTER, MYSQL } - } diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index 908d0b06ac..e77b82c4f4 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -1,5 +1,6 @@ package com.netflix.conductor.elasticsearch; +import com.google.common.base.Strings; import com.netflix.conductor.core.config.Configuration; import java.net.URI; @@ -80,9 +81,12 @@ default String getEmbeddedSettingsFile() { } default ElasticSearchInstanceType getElasticSearchInstanceType() { - return ElasticSearchInstanceType.valueOf( - getProperty(ELASTICSEARCH_PROPERTY_NAME, ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE.name()).toUpperCase() - ); + ElasticSearchInstanceType elasticSearchInstanceType = ELASTICSEARCH_INSTANCE_TYPE_DEFAULT_VALUE; + String instanceTypeConfig = getProperty(ELASTICSEARCH_PROPERTY_NAME, ""); + if (!Strings.isNullOrEmpty(instanceTypeConfig)) { + elasticSearchInstanceType = ElasticSearchInstanceType.valueOf(instanceTypeConfig.toUpperCase()); + } + return elasticSearchInstanceType; } enum ElasticSearchInstanceType { From 7af2fb4e252151959c638e525959cce2426e5c23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 31 Jul 2018 17:26:44 -0700 Subject: [PATCH 115/163] Added unit test for TestTask - Test for availability of the task definition within the Task object --- .../metadata/workflow/WorkflowTask.java | 29 ------------------- .../conductor/common/tasks/TestTask.java | 19 ++++++++++++ .../core/execution/TestDeciderOutcomes.java | 2 +- .../tests/integration/End2EndTests.java | 1 - 4 files changed, 20 insertions(+), 31 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 162e268fa3..a580f72cbb 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -545,35 +545,6 @@ public boolean has(String taskReferenceName){ } - public boolean has2(String taskReferenceName){ - - if(this.getTaskReferenceName().equals(taskReferenceName)){ - return true; - } - Type tt = Type.USER_DEFINED; - if(Type.isSystemTask(type)) { - tt = Type.valueOf(type); - } - - switch(tt){ - - case DECISION: - case FORK_JOIN: - for(List childx : children()){ - for(WorkflowTask child : childx){ - if(child.getTaskReferenceName().equals(taskReferenceName)){ - return true; - } - } - } - break; - default: - break; - } - return false; - - } - public WorkflowTask get(String taskReferenceName){ if(this.getTaskReferenceName().equals(taskReferenceName)){ diff --git a/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java b/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java index 9cf5d79478..99fd8f6731 100644 --- a/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java +++ b/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java @@ -19,11 +19,14 @@ package com.netflix.conductor.common.tasks; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import org.junit.Test; import com.netflix.conductor.common.metadata.tasks.Task; @@ -59,4 +62,20 @@ public void test() { } } } + + @Test + public void testTaskDefinitionIfAvailable() { + Task task = new Task(); + task.setStatus(Status.FAILED); + assertEquals(Status.FAILED, task.getStatus()); + assertNull(task.getWorkflowTask()); + assertNull(task.getTaskDefinition()); + + WorkflowTask workflowTask = new WorkflowTask(); + TaskDef taskDefinition = new TaskDef(); + workflowTask.setTaskDefinition(taskDefinition); + task.setWorkflowTask(workflowTask); + + assertEquals(taskDefinition, task.getTaskDefinition()); + } } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java index c34316c299..f7ff8f18f5 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java @@ -298,7 +298,7 @@ public void testOptional() { } @Test - public void testOptionalWithDyammicFork() throws Exception { + public void testOptionalWithDynamicFork() throws Exception { WorkflowDef def = new WorkflowDef(); def.setName("test"); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 30a6d96b38..cbfdfa0d03 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -127,7 +127,6 @@ public void testAll() throws Exception { String correlationId = "test_corr_id"; String workflowId = workflowClient.startWorkflow(def.getName(), null, correlationId, new HashMap<>()); assertNotNull(workflowId); - System.out.println(workflowId); Workflow wf = workflowClient.getWorkflow(workflowId, false); assertEquals(0, wf.getTasks().size()); From 9fc33e63b49eeacfd6c082cc5b20a81896785524 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 2 Aug 2018 13:40:31 +0200 Subject: [PATCH 116/163] Add basic/simple gRPC healthcheck based on the gRPC spec proto and the Netflix healthcheck library. --- grpc-server/build.gradle | 4 + .../conductor/grpc/server/GRPCModule.java | 5 + .../grpc/server/GRPCServerProvider.java | 12 +- .../server/service/HealthServiceImpl.java | 44 +++++++ .../server/service/HealthServiceImplTest.java | 108 ++++++++++++++++++ grpc/build.gradle | 7 ++ versionsOfDependencies.gradle | 2 +- 7 files changed, 178 insertions(+), 4 deletions(-) create mode 100644 grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java create mode 100644 grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index 4efa7138ee..3e4e89674b 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -10,4 +10,8 @@ dependencies { compile "io.grpc:grpc-netty:${revGrpc}" compile "log4j:log4j:1.2.17" + + testCompile "io.grpc:grpc-testing:${revGrpc}" + testCompile "org.mockito:mockito-all:${revMockito}" + testCompile 'org.testinfected.hamcrest-matchers:all-matchers:1.8' } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java index acd53a1d9d..55f9488765 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCModule.java @@ -7,15 +7,20 @@ import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; import com.netflix.conductor.grpc.server.service.EventServiceImpl; +import com.netflix.conductor.grpc.server.service.HealthServiceImpl; import com.netflix.conductor.grpc.server.service.MetadataServiceImpl; import com.netflix.conductor.grpc.server.service.TaskServiceImpl; import com.netflix.conductor.grpc.server.service.WorkflowServiceImpl; +import io.grpc.health.v1.HealthGrpc; + public class GRPCModule extends AbstractModule { @Override protected void configure() { + bind(HealthGrpc.HealthImplBase.class).to(HealthServiceImpl.class); + bind(EventServiceGrpc.EventServiceImplBase.class).to(EventServiceImpl.class); bind(MetadataServiceGrpc.MetadataServiceImplBase.class).to(MetadataServiceImpl.class); bind(TaskServiceGrpc.TaskServiceImplBase.class).to(TaskServiceImpl.class); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java index 83a80a0cee..be85c0a472 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java @@ -11,26 +11,31 @@ import javax.inject.Provider; import io.grpc.BindableService; +import io.grpc.health.v1.HealthGrpc; public class GRPCServerProvider implements Provider> { private final GRPCServerConfiguration configuration; + private final BindableService healthServiceImpl; private final BindableService eventServiceImpl; - private final BindableService metadataSercieImpl; + private final BindableService metadataServiceImpl; private final BindableService taskServiceImpl; private final BindableService workflowServiceImpl; @Inject public GRPCServerProvider( GRPCServerConfiguration conf, + HealthGrpc.HealthImplBase healthServiceImpl, EventServiceGrpc.EventServiceImplBase eventServiceImpl, MetadataServiceGrpc.MetadataServiceImplBase metadataServiceImpl, TaskServiceGrpc.TaskServiceImplBase taskServiceImpl, WorkflowServiceGrpc.WorkflowServiceImplBase workflowServiceImpl ) { this.configuration = conf; + this.healthServiceImpl = healthServiceImpl; + this.eventServiceImpl = eventServiceImpl; - this.metadataSercieImpl = metadataServiceImpl; + this.metadataServiceImpl = metadataServiceImpl; this.taskServiceImpl = taskServiceImpl; this.workflowServiceImpl = workflowServiceImpl; } @@ -41,8 +46,9 @@ public Optional get() { Optional.of( new GRPCServer( configuration.getPort(), + healthServiceImpl, eventServiceImpl, - metadataSercieImpl, + metadataServiceImpl, taskServiceImpl, workflowServiceImpl )) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java new file mode 100644 index 0000000000..1aca1f8781 --- /dev/null +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/HealthServiceImpl.java @@ -0,0 +1,44 @@ +package com.netflix.conductor.grpc.server.service; + +import com.netflix.runtime.health.api.HealthCheckAggregator; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; + +import io.grpc.health.v1.HealthCheckRequest; +import io.grpc.health.v1.HealthCheckResponse; +import io.grpc.health.v1.HealthGrpc; +import io.grpc.stub.StreamObserver; + +public class HealthServiceImpl extends HealthGrpc.HealthImplBase { + private static final Logger LOGGER = LoggerFactory.getLogger(HealthServiceImpl.class); + private static final GRPCHelper GRPC_HELPER = new GRPCHelper(LOGGER); + + private final HealthCheckAggregator healthCheck; + + @Inject + public HealthServiceImpl(HealthCheckAggregator healthCheck) { + this.healthCheck = healthCheck; + } + + @Override + public void check(HealthCheckRequest request, StreamObserver responseObserver) { + try { + if (healthCheck.check().get().isHealthy()) { + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(HealthCheckResponse.ServingStatus.SERVING).build() + ); + } else { + responseObserver.onNext( + HealthCheckResponse.newBuilder().setStatus(HealthCheckResponse.ServingStatus.NOT_SERVING).build() + ); + } + } catch (Exception ex) { + GRPC_HELPER.onError(responseObserver, ex); + } finally { + responseObserver.onCompleted(); + } + } +} diff --git a/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java b/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java new file mode 100644 index 0000000000..8e320f6ddb --- /dev/null +++ b/grpc-server/src/test/java/com/netflix/conductor/grpc/server/service/HealthServiceImplTest.java @@ -0,0 +1,108 @@ +package com.netflix.conductor.grpc.server.service; + +import com.netflix.runtime.health.api.HealthCheckAggregator; +import com.netflix.runtime.health.api.HealthCheckStatus; + +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; + +import java.util.concurrent.CompletableFuture; + +import io.grpc.BindableService; +import io.grpc.Status; +import io.grpc.StatusRuntimeException; +import io.grpc.health.v1.HealthCheckRequest; +import io.grpc.health.v1.HealthCheckResponse; +import io.grpc.health.v1.HealthGrpc; +import io.grpc.inprocess.InProcessChannelBuilder; +import io.grpc.inprocess.InProcessServerBuilder; +import io.grpc.testing.GrpcCleanupRule; + +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class HealthServiceImplTest { + + @Rule + public final GrpcCleanupRule grpcCleanup = new GrpcCleanupRule(); + + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @Test + public void healthServing() throws Exception { + // Generate a unique in-process server name. + String serverName = InProcessServerBuilder.generateName(); + HealthCheckAggregator hca = mock(HealthCheckAggregator.class); + CompletableFuture hcsf = mock(CompletableFuture.class); + HealthCheckStatus hcs = mock(HealthCheckStatus.class); + when(hcs.isHealthy()).thenReturn(true); + when(hcsf.get()).thenReturn(hcs); + when(hca.check()).thenReturn(hcsf); + HealthServiceImpl healthyService = new HealthServiceImpl(hca); + + addService(serverName, healthyService); + HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( + // Create a client channel and register for automatic graceful shutdown. + grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); + + + HealthCheckResponse reply = blockingStub.check(HealthCheckRequest.newBuilder().build()); + + assertEquals(HealthCheckResponse.ServingStatus.SERVING, reply.getStatus()); + } + + @Test + public void healthNotServing() throws Exception { + // Generate a unique in-process server name. + String serverName = InProcessServerBuilder.generateName(); + HealthCheckAggregator hca = mock(HealthCheckAggregator.class); + CompletableFuture hcsf = mock(CompletableFuture.class); + HealthCheckStatus hcs = mock(HealthCheckStatus.class); + when(hcs.isHealthy()).thenReturn(false); + when(hcsf.get()).thenReturn(hcs); + when(hca.check()).thenReturn(hcsf); + HealthServiceImpl healthyService = new HealthServiceImpl(hca); + + addService(serverName, healthyService); + HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( + // Create a client channel and register for automatic graceful shutdown. + grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); + + + HealthCheckResponse reply = blockingStub.check(HealthCheckRequest.newBuilder().build()); + + assertEquals(HealthCheckResponse.ServingStatus.NOT_SERVING, reply.getStatus()); + } + + @Test + public void healthException() throws Exception { + // Generate a unique in-process server name. + String serverName = InProcessServerBuilder.generateName(); + HealthCheckAggregator hca = mock(HealthCheckAggregator.class); + CompletableFuture hcsf = mock(CompletableFuture.class); + when(hcsf.get()).thenThrow(InterruptedException.class); + when(hca.check()).thenReturn(hcsf); + HealthServiceImpl healthyService = new HealthServiceImpl(hca); + + addService(serverName, healthyService); + HealthGrpc.HealthBlockingStub blockingStub = HealthGrpc.newBlockingStub( + // Create a client channel and register for automatic graceful shutdown. + grpcCleanup.register(InProcessChannelBuilder.forName(serverName).directExecutor().build())); + + thrown.expect(StatusRuntimeException.class); + thrown.expect(hasProperty("status", is(Status.INTERNAL))); + blockingStub.check(HealthCheckRequest.newBuilder().build()); + + } + + private void addService(String name, BindableService service) throws Exception { + // Create a server, add service, start, and register for automatic graceful shutdown. + grpcCleanup.register(InProcessServerBuilder + .forName(name).directExecutor().addService(service).build().start()); + } +} diff --git a/grpc/build.gradle b/grpc/build.gradle index ea63b793e4..c653d82a19 100644 --- a/grpc/build.gradle +++ b/grpc/build.gradle @@ -10,13 +10,20 @@ plugins { id "com.google.protobuf" version "0.8.5" } +repositories{ + maven { url "https://dl.bintray.com/chaos-systems/mvn" } +} + dependencies { compile project(':conductor-common') compile project(':conductor-core') + protobuf 'io.chaossystems.grpc:grpc-healthcheck:1.0.+:protos' compile "com.google.api.grpc:proto-google-common-protos:1.0.0" compile "io.grpc:grpc-protobuf:${revGrpc}" compile "io.grpc:grpc-stub:${revGrpc}" + + compile "com.netflix.runtime:health-api:${revHealth}" } protobuf { diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index a738972d31..9e7a445740 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -13,7 +13,7 @@ ext { revElasticSearch5Client = '5.6.8' revEurekaClient = '1.8.7' revFlywayCore ='4.0.3' - revGrpc = '1.11.0' + revGrpc = '1.14.+' revGuavaRetrying = '2.0.0' revGuice = '4.1.0' revGuiceMultiBindings = '4.1.0' From 98c4053dc3887af6bf8d485c879fd7897c19dd6b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 2 Aug 2018 19:30:39 -0700 Subject: [PATCH 117/163] Metadata pre fetching at execution service layer - Metadata pre fetching on behalf of getting rid of MetadataDAO access from DeciderService onwards --- .../common/metadata/workflow/WorkflowDef.java | 4 +- .../conductor/contribs/http/TestHttpTask.java | 6 +-- .../conductor/core/config/CoreModule.java | 16 +++---- .../core/execution/DeciderService.java | 24 +++++------ .../core/execution/MetadataMapperService.java | 30 +++++++++++++ .../core/execution/WorkflowExecutor.java | 25 ++++++++--- .../execution/mapper/DecisionTaskMapper.java | 8 +--- .../execution/mapper/DynamicTaskMapper.java | 25 ++++------- .../execution/mapper/SimpleTaskMapper.java | 21 +++------ .../mapper/UserDefinedTaskMapper.java | 20 +++------ .../conductor/service/MetadataService.java | 43 ++++++++++--------- .../dao/mysql/MySQLExecutionDAO.java | 31 ++++++------- .../dao/dynomite/RedisExecutionDAO.java | 23 +++------- 13 files changed, 138 insertions(+), 138 deletions(-) create mode 100644 core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index af37912c0a..70f5844456 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -240,8 +240,8 @@ public WorkflowTask getTaskByRefName(String taskReferenceName){ public List all(){ List all = new LinkedList<>(); - for(WorkflowTask wft : tasks){ - all.addAll(wft.all()); + for(WorkflowTask workflowTask : tasks){ + all.addAll(workflowTask.all()); } return all; } diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index b79bb42b58..50b8696531 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -300,12 +300,12 @@ public void testOptional() throws Exception { ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); + taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); diff --git a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java index b5f13859b0..2fbb0db142 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java +++ b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java @@ -33,14 +33,14 @@ import com.netflix.conductor.core.events.queue.dyno.DynoEventQueueProvider; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper; -import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper; import com.netflix.conductor.core.execution.mapper.DynamicTaskMapper; import com.netflix.conductor.core.execution.mapper.EventTaskMapper; +import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper; import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper; import com.netflix.conductor.core.execution.mapper.JoinTaskMapper; import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper; -import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper; +import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; import com.netflix.conductor.core.execution.tasks.Event; @@ -95,8 +95,8 @@ public TaskMapper getDecisionTaskMapper() { @StringMapKey("DYNAMIC") @Singleton @Named("TaskMappers") - public TaskMapper getDynamicTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - return new DynamicTaskMapper(parametersUtils, metadataDAO); + public TaskMapper getDynamicTaskMapper(ParametersUtils parametersUtils) { + return new DynamicTaskMapper(parametersUtils); } @ProvidesIntoMap @@ -153,16 +153,16 @@ public TaskMapper getForkJoinTaskMapper() { @StringMapKey("USER_DEFINED") @Singleton @Named("TaskMappers") - public TaskMapper getUserDefinedTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - return new UserDefinedTaskMapper(parametersUtils, metadataDAO); + public TaskMapper getUserDefinedTaskMapper(ParametersUtils parametersUtils) { + return new UserDefinedTaskMapper(parametersUtils); } @ProvidesIntoMap @StringMapKey("SIMPLE") @Singleton @Named("TaskMappers") - public TaskMapper getSimpleTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - return new SimpleTaskMapper(parametersUtils, metadataDAO); + public TaskMapper getSimpleTaskMapper(ParametersUtils parametersUtils) { + return new SimpleTaskMapper(parametersUtils); } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index 68169d6d18..b082200017 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -19,7 +19,6 @@ package com.netflix.conductor.core.execution; import com.google.common.annotations.VisibleForTesting; - import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -31,17 +30,20 @@ import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.TaskMapperContext; import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.metrics.Monitors; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.util.*; -import java.util.stream.Collectors; - import javax.inject.Inject; import javax.inject.Named; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED_WITH_ERRORS; import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS; @@ -60,15 +62,12 @@ public class DeciderService { private static Logger logger = LoggerFactory.getLogger(DeciderService.class); - private MetadataDAO metadataDAO; - private ParametersUtils parametersUtils = new ParametersUtils(); private Map taskMappers; @Inject - public DeciderService(MetadataDAO metadataDAO, @Named("TaskMappers") Map taskMappers) { - this.metadataDAO = metadataDAO; + public DeciderService(@Named("TaskMappers") Map taskMappers) { this.taskMappers = taskMappers; } @@ -139,9 +138,7 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa executedTaskRefNames.remove(pendingTask.getReferenceTaskName()); } - String taskDefName = pendingTask.getTaskDefName(); - TaskDef taskDefinition = Optional.ofNullable(pendingTask.getTaskDefinition()) - .orElse(metadataDAO.getTaskDef(taskDefName)); + TaskDef taskDefinition = pendingTask.getTaskDefinition(); if (taskDefinition != null) { checkForTimeout(taskDefinition, pendingTask); @@ -468,7 +465,6 @@ public List getTasksToBeScheduled(Workflow workflowInstance, .collect(Collectors.toList()); } - private boolean isTaskSkipped(WorkflowTask taskToSchedule, Workflow workflow) { try { boolean isTaskSkipped = false; diff --git a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java new file mode 100644 index 0000000000..8e778b9135 --- /dev/null +++ b/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java @@ -0,0 +1,30 @@ +package com.netflix.conductor.core.execution; + +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.dao.MetadataDAO; + +import javax.inject.Inject; +import java.util.List; + +public class MetadataMapperService { + + private MetadataDAO metadataDAO; + + @Inject + public MetadataMapperService(MetadataDAO metadataDAO) { + this.metadataDAO = metadataDAO; + } + + public WorkflowDef populateTaskDefinitionsMetadata(WorkflowDef workflowDefinition) { + List workflowTasks = workflowDefinition.all(); + for (WorkflowTask workflowTask : workflowTasks) { + + if (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null) { + workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); + } + } + return workflowDefinition; + } + +} diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index dd856ce25d..22572e7861 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -94,6 +94,8 @@ public class WorkflowExecutor { public static final String DECIDER_QUEUE = "_deciderQueue"; + public MetadataMapperService metadataMapperService; + @Inject public WorkflowExecutor( @@ -101,6 +103,7 @@ public WorkflowExecutor( MetadataDAO metadataDAO, ExecutionDAO executionDAO, QueueDAO queueDAO, + MetadataMapperService metadataMapperService, Configuration config ) { this.deciderService = deciderService; @@ -108,6 +111,7 @@ public WorkflowExecutor( this.executionDAO = executionDAO; this.queueDAO = queueDAO; this.config = config; + this.metadataMapperService = metadataMapperService; activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); } @@ -266,6 +270,8 @@ public String startWorkflow( throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); } + metadataMapperService.populateTaskDefinitionsMetadata(workflowDefinition); + // Obtain the names of the tasks with missing definitions: // - Are not system tasks // - Don't have embedded definitions @@ -273,7 +279,6 @@ public String startWorkflow( .filter(workflowTask -> (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null)) .map(workflowTask -> workflowTask.getName()) - .filter(task -> metadataDAO.getTaskDef(task) == null) .collect(Collectors.toSet()); if (!missingTaskDefinitionNames.isEmpty()) { @@ -299,8 +304,10 @@ public String startWorkflow( wf.setUpdateTime(null); wf.setEvent(event); wf.setTaskToDomain(taskToDomain); + executionDAO.createWorkflow(wf); logger.info("A new instance of workflow {} created with workflow id {}", wf.getWorkflowName(), workflowId); + //then decide to see if anything needs to be done as part of the workflow decide(workflowId); @@ -354,7 +361,8 @@ public void rewind(String workflowId) { throw new ApplicationException(CONFLICT, "Workflow is still running. status=" + workflow.getStatus()); } - WorkflowDef workflowDef = metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion()).get(); + WorkflowDef workflowDef = Optional.ofNullable(workflow.getWorkflowDefinition()) + .orElse(metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion()).get()); if (!workflowDef.isRestartable() && workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { // Can only restart non completed workflows when the configuration is set to false throw new ApplicationException(CONFLICT, String.format("WorkflowId: %s is an instance of WorkflowDef: %s and version: %d and is non restartable", workflowId, workflowDef.getName(), workflowDef.getVersion())); @@ -573,10 +581,12 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo try { - WorkflowDef latestFailureWorkflow = metadataDAO.getLatest(failureWorkflow) - .orElseThrow(() -> - new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) - ); + WorkflowDef latestFailureWorkflow = Optional.of(workflow.getWorkflowDefinition()).orElse( + metadataDAO.getLatest(failureWorkflow) + .orElseThrow(() -> + new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) + ) + ); String failureWFId = startWorkflow( latestFailureWorkflow, @@ -840,7 +850,8 @@ public void pauseWorkflow(String workflowId) { public void resumeWorkflow(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, false); if (!workflow.getStatus().equals(WorkflowStatus.PAUSED)) { - throw new IllegalStateException("The workflow " + workflowId + " is PAUSED so cannot resume"); + throw new IllegalStateException("The workflow " + workflowId + " is not PAUSED so cannot resume. " + + "Current status is " + workflow.getStatus().name()); } workflow.setStatus(WorkflowStatus.RUNNING); executionDAO.updateWorkflow(workflow); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java index d482e07e8e..572ebfe510 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java @@ -17,24 +17,21 @@ package com.netflix.conductor.core.execution.mapper; import com.google.common.annotations.VisibleForTesting; - import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.events.ScriptEvaluator; import com.netflix.conductor.core.execution.SystemTaskType; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.script.ScriptException; import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Map; -import javax.script.ScriptException; - /** * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#DECISION} @@ -104,8 +101,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { if (selectedTasks != null && !selectedTasks.isEmpty()) { WorkflowTask selectedTask = selectedTasks.get(0); //Schedule the first task to be executed... //TODO break out this recursive call using function composition of what needs to be done and then walk back the condition tree - List caseTasks = taskMapperContext.getDeciderService() - .getTasksToBeScheduled(workflowInstance, selectedTask, retryCount, taskMapperContext.getRetryTaskId()); + List caseTasks = taskMapperContext.getDeciderService().getTasksToBeScheduled(workflowInstance, selectedTask, retryCount, taskMapperContext.getRetryTaskId()); tasksToBeScheduled.addAll(caseTasks); decisionTask.getInputData().put("hasChildren", "true"); } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java index 6027ac42ea..b554c3ed0e 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java @@ -24,7 +24,6 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; -import com.netflix.conductor.dao.MetadataDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -42,12 +41,9 @@ public class DynamicTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(DynamicTaskMapper.class); - private MetadataDAO metadataDAO; - private ParametersUtils parametersUtils; - public DynamicTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - this.metadataDAO = metadataDAO; + public DynamicTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; } @@ -116,23 +112,20 @@ String getDynamicTaskName(Map taskInput, String taskNameParam) t } /** - * This method gets the TaskDefinition from the MetadataDao based on the {@link WorkflowTask#getName()} + * This method gets the TaskDefinition for a specific {@link WorkflowTask} * * @param taskToSchedule: An instance of {@link WorkflowTask} which has the name of the using which the {@link TaskDef} can be retrieved. - * @throws TerminateWorkflowException : in case of no work flow definition available in the {@link MetadataDAO} + * @throws TerminateWorkflowException : in case of no work flow definition available * @return: An instance of TaskDefinition */ @VisibleForTesting TaskDef getDynamicTaskDefinition(WorkflowTask taskToSchedule) throws TerminateWorkflowException { //TODO this is a common pattern in code base can be moved to DAO - TaskDef taskDefinition = taskToSchedule.getTaskDefinition(); - if (taskDefinition == null) { - taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) - .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", - taskToSchedule.getName()); - return new TerminateWorkflowException(reason); - }); - } + TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()) + .orElseThrow(() -> { + String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", + taskToSchedule.getName()); + return new TerminateWorkflowException(reason); + }); return taskDefinition; } } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java index 51e9aee7fb..b694196650 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java @@ -24,7 +24,6 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; -import com.netflix.conductor.dao.MetadataDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -43,20 +42,17 @@ public class SimpleTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(SimpleTaskMapper.class); private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - public SimpleTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { + public SimpleTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; } - /** * This method maps a {@link WorkflowTask} of type {@link WorkflowTask.Type#SIMPLE} * to a {@link Task} * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId - * @throws TerminateWorkflowException In case if the task definition does not exist in the {@link MetadataDAO} + * @throws TerminateWorkflowException In case if the task definition does not exist * @return: a List with just one simple task */ @Override @@ -69,14 +65,11 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter int retryCount = taskMapperContext.getRetryCount(); String retriedTaskId = taskMapperContext.getRetryTaskId(); - TaskDef taskDefinition = taskToSchedule.getTaskDefinition(); - if (taskDefinition == null) { - taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) - .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); - return new TerminateWorkflowException(reason); - }); - } + TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()) + .orElseThrow(() -> { + String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); + return new TerminateWorkflowException(reason); + }); Map input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, taskDefinition, taskMapperContext.getTaskId()); Task simpleTask = new Task(); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java index ebda28ef6b..8a1c948c8e 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java @@ -23,7 +23,6 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; -import com.netflix.conductor.dao.MetadataDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,11 +40,9 @@ public class UserDefinedTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(UserDefinedTaskMapper.class); private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - public UserDefinedTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { + public UserDefinedTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; } /** @@ -53,7 +50,7 @@ public UserDefinedTaskMapper(ParametersUtils parametersUtils, MetadataDAO metada * to a {@link Task} in a {@link Task.Status#SCHEDULED} state * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId - * @throws TerminateWorkflowException In case if the task definition does not exist in the {@link MetadataDAO} + * @throws TerminateWorkflowException In case if the task definition does not exist * @return: a List with just one User defined task */ @Override @@ -66,14 +63,11 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter String taskId = taskMapperContext.getTaskId(); int retryCount = taskMapperContext.getRetryCount(); - TaskDef taskDefinition = taskToSchedule.getTaskDefinition(); - if (taskDefinition == null) { - taskDefinition = Optional.ofNullable(metadataDAO.getTaskDef(taskToSchedule.getName())) - .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); - return new TerminateWorkflowException(reason); - }); - } + TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()) + .orElseThrow(() -> { + String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); + return new TerminateWorkflowException(reason); + }); Map input = parametersUtils.getTaskInputV2(taskToSchedule.getInputParameters(), workflowInstance, taskId, taskDefinition); diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java index bfed855653..784c847a79 100644 --- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java +++ b/core/src/main/java/com/netflix/conductor/service/MetadataService.java @@ -24,6 +24,8 @@ import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.core.execution.ApplicationException; @@ -32,6 +34,7 @@ import java.util.List; import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import javax.inject.Inject; @@ -45,11 +48,11 @@ @Trace public class MetadataService { - private MetadataDAO metadata; + private MetadataDAO metadataDAO; @Inject - public MetadataService(MetadataDAO metadata) { - this.metadata = metadata; + public MetadataService(MetadataDAO metadataDAO) { + this.metadataDAO = metadataDAO; } /** @@ -62,7 +65,7 @@ public void registerTaskDef(List taskDefinitions) { taskDefinition.setCreateTime(System.currentTimeMillis()); taskDefinition.setUpdatedBy(null); taskDefinition.setUpdateTime(null); - metadata.createTaskDef(taskDefinition); + metadataDAO.createTaskDef(taskDefinition); } } @@ -71,13 +74,13 @@ public void registerTaskDef(List taskDefinitions) { * @param taskDefinition Task Definition to be updated */ public void updateTaskDef(TaskDef taskDefinition) { - TaskDef existing = metadata.getTaskDef(taskDefinition.getName()); + TaskDef existing = metadataDAO.getTaskDef(taskDefinition.getName()); if (existing == null) { throw new ApplicationException(Code.NOT_FOUND, "No such task by name " + taskDefinition.getName()); } taskDefinition.setUpdatedBy(WorkflowContext.get().getClientApp()); taskDefinition.setUpdateTime(System.currentTimeMillis()); - metadata.updateTaskDef(taskDefinition); + metadataDAO.updateTaskDef(taskDefinition); } /** @@ -85,7 +88,7 @@ public void updateTaskDef(TaskDef taskDefinition) { * @param taskType Remove task definition */ public void unregisterTaskDef(String taskType) { - metadata.removeTaskDef(taskType); + metadataDAO.removeTaskDef(taskType); } /** @@ -93,7 +96,7 @@ public void unregisterTaskDef(String taskType) { * @return List of all the registered tasks */ public List getTaskDefs() { - return metadata.getAllTaskDefs(); + return metadataDAO.getAllTaskDefs(); } /** @@ -102,7 +105,7 @@ public List getTaskDefs() { * @return Task Definition */ public TaskDef getTaskDef(String taskType) { - return metadata.getTaskDef(taskType); + return metadataDAO.getTaskDef(taskType); } /** @@ -110,7 +113,7 @@ public TaskDef getTaskDef(String taskType) { * @param def Workflow definition to be updated */ public void updateWorkflowDef(WorkflowDef def) { - metadata.update(def); + metadataDAO.update(def); } /** @@ -119,7 +122,7 @@ public void updateWorkflowDef(WorkflowDef def) { */ public void updateWorkflowDef(List wfs) { for (WorkflowDef wf : wfs) { - metadata.update(wf); + metadataDAO.update(wf); } } @@ -133,7 +136,7 @@ public Optional getWorkflowDef(String name, Integer version) { if (version == null) { return getLatestWorkflow(name); } - return metadata.get(name, version); + return metadataDAO.get(name, version); } /** @@ -142,11 +145,11 @@ public Optional getWorkflowDef(String name, Integer version) { * @return Latest version of the workflow definition */ public Optional getLatestWorkflow(String name) { - return metadata.getLatest(name); + return metadataDAO.getLatest(name); } public List getWorkflowDefs() { - return metadata.getAll(); + return metadataDAO.getAll(); } public void registerWorkflowDef(WorkflowDef def) { @@ -156,7 +159,7 @@ public void registerWorkflowDef(WorkflowDef def) { if(def.getSchemaVersion() < 1 || def.getSchemaVersion() > 2) { def.setSchemaVersion(2); } - metadata.create(def); + metadataDAO.create(def); } /** @@ -166,7 +169,7 @@ public void registerWorkflowDef(WorkflowDef def) { */ public void addEventHandler(EventHandler eventHandler) { validateEvent(eventHandler); - metadata.addEventHandler(eventHandler); + metadataDAO.addEventHandler(eventHandler); } /** @@ -175,7 +178,7 @@ public void addEventHandler(EventHandler eventHandler) { */ public void updateEventHandler(EventHandler eventHandler) { validateEvent(eventHandler); - metadata.updateEventHandler(eventHandler); + metadataDAO.updateEventHandler(eventHandler); } /** @@ -183,7 +186,7 @@ public void updateEventHandler(EventHandler eventHandler) { * @param name Removes the event handler from the system */ public void removeEventHandlerStatus(String name) { - metadata.removeEventHandlerStatus(name); + metadataDAO.removeEventHandlerStatus(name); } /** @@ -191,7 +194,7 @@ public void removeEventHandlerStatus(String name) { * @return All the event handlers registered in the system */ public List getEventHandlers() { - return metadata.getEventHandlers(); + return metadataDAO.getEventHandlers(); } /** @@ -201,7 +204,7 @@ public List getEventHandlers() { * @return Returns the list of all the event handlers for a given event */ public List getEventHandlersForEvent(String event, boolean activeOnly) { - return metadata.getEventHandlersForEvent(event, activeOnly); + return metadataDAO.getEventHandlersForEvent(event, activeOnly); } private void validateEvent(EventHandler eh) { diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java index 46f5fbb6aa..dab5c4f4b2 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java @@ -1,13 +1,5 @@ package com.netflix.conductor.dao.mysql; -import java.sql.Connection; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.stream.Collectors; - -import javax.inject.Inject; -import javax.sql.DataSource; - import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -21,9 +13,19 @@ import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.metrics.Monitors; +import javax.inject.Inject; +import javax.sql.DataSource; +import java.sql.Connection; +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Comparator; +import java.util.Date; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Collectors; + public class MySQLExecutionDAO extends MySQLBaseDAO implements ExecutionDAO { private static final String ARCHIVED_FIELD = "archived"; @@ -31,13 +33,10 @@ public class MySQLExecutionDAO extends MySQLBaseDAO implements ExecutionDAO { private IndexDAO indexer; - private MetadataDAO metadataDAO; - @Inject - public MySQLExecutionDAO(IndexDAO indexer, MetadataDAO metadataDAO, ObjectMapper om, DataSource dataSource) { + public MySQLExecutionDAO(IndexDAO indexer, ObjectMapper om, DataSource dataSource) { super(om, dataSource); this.indexer = indexer; - this.metadataDAO = metadataDAO; } private static String dateStr(Long timeInMs) { @@ -126,8 +125,7 @@ public void updateTask(Task task) { @Override public boolean exceedsInProgressLimit(Task task) { - TaskDef taskDef = Optional.ofNullable(task.getTaskDefinition()) - .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); + TaskDef taskDef = task.getTaskDefinition(); if (taskDef == null) { return false; @@ -500,8 +498,7 @@ private void updateTask(Connection connection, Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = Optional.ofNullable(task.getTaskDefinition()) - .orElse(metadataDAO.getTaskDef(task.getTaskDefName())); + TaskDef taskDef = task.getTaskDefinition(); if (taskDef != null && taskDef.concurrencyLimit() > 0) { boolean inProgress = task.getStatus() != null && task.getStatus().equals(Task.Status.IN_PROGRESS); diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index fd701c4d94..1f0df0c3e1 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -15,11 +15,10 @@ */ package com.netflix.conductor.dao.dynomite; +import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.inject.Singleton; - -import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventExecution; import com.netflix.conductor.common.metadata.tasks.PollData; @@ -35,15 +34,14 @@ import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dyno.DynoProxy; import com.netflix.conductor.metrics.Monitors; - import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.inject.Inject; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Calendar; @@ -58,8 +56,6 @@ import java.util.Set; import java.util.stream.Collectors; -import javax.inject.Inject; - @Singleton @Trace public class RedisExecutionDAO extends BaseDynoDAO implements ExecutionDAO { @@ -89,18 +85,15 @@ public class RedisExecutionDAO extends BaseDynoDAO implements ExecutionDAO { private IndexDAO indexDAO; - private MetadataDAO metadataDA0; - private long taskPayloadThreshold; private long workflowInputPayloadThreshold; @Inject public RedisExecutionDAO(DynoProxy dynoClient, ObjectMapper objectMapper, - IndexDAO indexDAO, MetadataDAO metadataDA0, Configuration config) { + IndexDAO indexDAO, Configuration config) { super(dynoClient, objectMapper, config); this.indexDAO = indexDAO; - this.metadataDA0 = metadataDA0; this.taskPayloadThreshold = config.getLongProperty(WORKFLOW_DYNOMITE_TASK_PAYLOAD_THRESHOLD,5 * FileUtils.ONE_MB); this.workflowInputPayloadThreshold = config.getLongProperty(WORKFLOW_DYNOMITE_WORKFLOW_INPUT_THRESHOLD,5 * FileUtils.ONE_MB); } @@ -194,13 +187,7 @@ public void updateTask(Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef; - if (task.getWorkflowTask() != null) { - taskDef = Optional.ofNullable(task.getWorkflowTask().getTaskDefinition()) - .orElse(metadataDA0.getTaskDef(task.getTaskDefName())); - } else { - taskDef = metadataDA0.getTaskDef(task.getTaskDefName()); - } + TaskDef taskDef = task.getTaskDefinition(); if(taskDef != null && taskDef.concurrencyLimit() > 0) { @@ -252,7 +239,7 @@ public void updateTask(Task task) { @Override public boolean exceedsInProgressLimit(Task task) { - TaskDef taskDef = metadataDA0.getTaskDef(task.getTaskDefName()); + TaskDef taskDef = task.getTaskDefinition(); if(taskDef == null) { return false; } From b2f4caee277d9d9eb6e0d66c709c8a49675139fa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 3 Aug 2018 13:00:54 -0700 Subject: [PATCH 118/163] Refactored sub workflow task mapper in order to not use metadataDAO --- .../metadata/workflow/SubWorkflowParams.java | 7 ++-- .../conductor/core/config/CoreModule.java | 5 ++- .../core/execution/MetadataMapperService.java | 30 +++++++++++++++- .../core/execution/WorkflowExecutor.java | 2 +- .../mapper/SubWorkflowTaskMapper.java | 35 +++---------------- .../conductor/grpc/AbstractProtoMapper.java | 6 ++-- .../main/proto/model/subworkflowparams.proto | 3 +- 7 files changed, 43 insertions(+), 45 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java index 4380a8d268..cb9a8d99c5 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/SubWorkflowParams.java @@ -30,9 +30,8 @@ public class SubWorkflowParams { @ProtoField(id = 1) private String name; - //QQ why is this an object ?? @ProtoField(id = 2) - private Object version; + private Integer version; /** * @return the name @@ -51,14 +50,14 @@ public void setName(String name) { /** * @return the version */ - public Object getVersion() { + public Integer getVersion() { return version; } /** * @param version the version to set */ - public void setVersion(Object version) { + public void setVersion(Integer version) { this.version = version; } diff --git a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java index 2fbb0db142..bfd3c9f35b 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java +++ b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java @@ -47,7 +47,6 @@ import com.netflix.conductor.core.execution.tasks.SubWorkflow; import com.netflix.conductor.core.execution.tasks.SystemTaskWorkerCoordinator; import com.netflix.conductor.core.execution.tasks.Wait; -import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; @@ -137,8 +136,8 @@ public TaskMapper getWaitTaskMapper(ParametersUtils parametersUtils) { @Singleton @StringMapKey("SUB_WORKFLOW") @Named("TaskMappers") - public TaskMapper getSubWorkflowTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { - return new SubWorkflowTaskMapper(parametersUtils, metadataDAO); + public TaskMapper getSubWorkflowTaskMapper(ParametersUtils parametersUtils) { + return new SubWorkflowTaskMapper(parametersUtils); } @ProvidesIntoMap diff --git a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java index 8e778b9135..aa8f4de289 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java @@ -1,14 +1,21 @@ package com.netflix.conductor.core.execution; +import com.google.inject.Singleton; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.dao.MetadataDAO; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.util.List; +@Singleton public class MetadataMapperService { + public static final Logger logger = LoggerFactory.getLogger(MetadataMapperService.class); + private MetadataDAO metadataDAO; @Inject @@ -16,15 +23,36 @@ public MetadataMapperService(MetadataDAO metadataDAO) { this.metadataDAO = metadataDAO; } - public WorkflowDef populateTaskDefinitionsMetadata(WorkflowDef workflowDefinition) { + public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { List workflowTasks = workflowDefinition.all(); for (WorkflowTask workflowTask : workflowTasks) { + if (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); + } else if (workflowTask.getType().equals(WorkflowTask.Type.SUB_WORKFLOW.name())) { + populateVersionForSubWorkflow(workflowTask); } } return workflowDefinition; } + private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { + SubWorkflowParams subworkflowParams = workflowTask.getSubWorkflowParam(); + if (subworkflowParams.getVersion() == null) { + String subWorkflowName = subworkflowParams.getName(); + Integer subWorkflowVersion = + metadataDAO.getLatest(subWorkflowName) + .map(WorkflowDef::getVersion) + .orElseThrow( + () -> { + String reason = String.format("The Task %s defined as a sub-workflow has no workflow definition available ", subWorkflowName); + logger.error(reason); + return new TerminateWorkflowException(reason); + } + ); + subworkflowParams.setVersion(subWorkflowVersion); + } + } + } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 22572e7861..c21f859da6 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -270,7 +270,7 @@ public String startWorkflow( throw new ApplicationException(Code.INVALID_INPUT, "NULL input passed when starting workflow"); } - metadataMapperService.populateTaskDefinitionsMetadata(workflowDefinition); + metadataMapperService.populateTaskDefinitions(workflowDefinition); // Obtain the names of the tasks with missing definitions: // - Are not system tasks diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java index 378e6d8658..4d16ba9300 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java @@ -15,40 +15,32 @@ package com.netflix.conductor.core.execution.mapper; import com.google.common.annotations.VisibleForTesting; - import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.execution.tasks.SubWorkflow; -import com.netflix.conductor.dao.MetadataDAO; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.inject.Inject; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; -import javax.inject.Inject; - public class SubWorkflowTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(SubWorkflowTaskMapper.class); private ParametersUtils parametersUtils; - private MetadataDAO metadataDAO; - @Inject - public SubWorkflowTaskMapper(ParametersUtils parametersUtils, MetadataDAO metadataDAO) { + public SubWorkflowTaskMapper(ParametersUtils parametersUtils) { this.parametersUtils = parametersUtils; - this.metadataDAO = metadataDAO; } @Override @@ -64,8 +56,6 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { String subWorkflowName = resolvedParams.get("name").toString(); - Integer subWorkflowVersion = getSubWorkflowVersion(resolvedParams, subWorkflowName); - Task subWorkflowTask = new Task(); subWorkflowTask.setTaskType(SubWorkflow.NAME); subWorkflowTask.setTaskDefName(taskToSchedule.getName()); @@ -76,7 +66,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) { subWorkflowTask.setScheduledTime(System.currentTimeMillis()); subWorkflowTask.setEndTime(System.currentTimeMillis()); subWorkflowTask.getInputData().put("subWorkflowName", subWorkflowName); - subWorkflowTask.getInputData().put("subWorkflowVersion", subWorkflowVersion); + subWorkflowTask.getInputData().put("subWorkflowVersion", subWorkflowParams.getVersion()); subWorkflowTask.getInputData().put("workflowInput", taskMapperContext.getTaskInput()); subWorkflowTask.setTaskId(taskId); subWorkflowTask.setStatus(Task.Status.SCHEDULED); @@ -101,26 +91,11 @@ Map getSubWorkflowInputParameters(Workflow workflowInstance, Sub Map params = new HashMap<>(); params.put("name", subWorkflowParams.getName()); - Object version = subWorkflowParams.getVersion(); + Integer version = subWorkflowParams.getVersion(); if (version != null) { - params.put("version", version.toString()); + params.put("version", version); } return parametersUtils.getTaskInputV2(params, workflowInstance, null, null); } - @VisibleForTesting - Integer getSubWorkflowVersion(Map resolvedParams, String subWorkflowName) throws TerminateWorkflowException { - return Optional.ofNullable(resolvedParams.get("version")) - .map(Object::toString) - .map(Integer::parseInt) - .orElseGet( - () -> metadataDAO.getLatest(subWorkflowName) - .map(WorkflowDef::getVersion) - .orElseThrow(() -> { - String reason = String.format("The Task %s defined as a sub-workflow has no workflow definition available ", subWorkflowName); - logger.error(reason); - return new TerminateWorkflowException(reason); - })); - } - } diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 3cabbe1f13..86ef1b1908 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -785,7 +785,7 @@ public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { to.setName( from.getName() ); } if (from.getVersion() != null) { - to.setVersion( toProto( from.getVersion() ) ); + to.setVersion( from.getVersion() ); } return to.build(); } @@ -793,9 +793,7 @@ public SubWorkflowParamsPb.SubWorkflowParams toProto(SubWorkflowParams from) { public SubWorkflowParams fromProto(SubWorkflowParamsPb.SubWorkflowParams from) { SubWorkflowParams to = new SubWorkflowParams(); to.setName( from.getName() ); - if (from.hasVersion()) { - to.setVersion( fromProto( from.getVersion() ) ); - } + to.setVersion( from.getVersion() ); return to; } diff --git a/grpc/src/main/proto/model/subworkflowparams.proto b/grpc/src/main/proto/model/subworkflowparams.proto index 6720f2341b..4a2005c37f 100644 --- a/grpc/src/main/proto/model/subworkflowparams.proto +++ b/grpc/src/main/proto/model/subworkflowparams.proto @@ -1,7 +1,6 @@ syntax = "proto3"; package conductor.proto; -import "google/protobuf/struct.proto"; option java_package = "com.netflix.conductor.proto"; option java_outer_classname = "SubWorkflowParamsPb"; @@ -9,5 +8,5 @@ option go_package = "github.com/netflix/conductor/client/gogrpc/conductor/model" message SubWorkflowParams { string name = 1; - google.protobuf.Value version = 2; + int32 version = 2; } From f81b88d8a9ba4b8478eb5431f598e0892579ada3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 3 Aug 2018 16:37:11 -0700 Subject: [PATCH 119/163] Metadata access refactoring and addressing data access suggestions - Use of Optional instead of null checking when possible - Import statements sorted and deleted unused ones --- .../conductor/common/metadata/tasks/Task.java | 13 ++++++------- .../common/metadata/workflow/WorkflowDef.java | 15 +++++++-------- .../common/metadata/workflow/WorkflowTask.java | 10 +++++----- .../conductor/core/execution/DeciderService.java | 11 ++++++----- .../core/execution/MetadataMapperService.java | 2 +- .../core/execution/WorkflowExecutor.java | 6 ++---- .../core/execution/mapper/DynamicTaskMapper.java | 3 +-- .../conductor/service/MetadataService.java | 10 ++-------- .../core/execution/TestDeciderService.java | 7 ++----- .../grpc/server/service/WorkflowServiceImpl.java | 10 ++++------ .../conductor/dao/mysql/MySQLExecutionDAO.java | 11 +++++++---- .../conductor/dao/dynomite/RedisExecutionDAO.java | 12 ++++++------ 12 files changed, 49 insertions(+), 61 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index 2c30236608..6dd899aaf1 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.Optional; @ProtoMessage public class Task { @@ -594,14 +595,12 @@ public void setOutputMessage(Any outputMessage) { } /** - * @return the task definition associated to the running task if available + * @return {@link Optional} containing the task definition if available */ - public TaskDef getTaskDefinition() { - TaskDef taskDefinition = null; - if (this.getWorkflowTask() != null) { - taskDefinition = this.getWorkflowTask().getTaskDefinition(); - } - return taskDefinition; + public Optional getTaskDefinition() { + return this.getWorkflowTask() != null ? + Optional.ofNullable(this.getWorkflowTask().getTaskDefinition()) : + Optional.empty(); } public Task copy() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 70f5844456..00614b4f43 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -29,7 +29,6 @@ import com.github.vmg.protogen.annotations.ProtoField; import com.github.vmg.protogen.annotations.ProtoMessage; import com.netflix.conductor.common.metadata.Auditable; -import com.netflix.conductor.common.metadata.tasks.TaskDef; /** * @author Viren @@ -229,7 +228,7 @@ public WorkflowTask getNextTask(String taskReferenceName){ } public WorkflowTask getTaskByRefName(String taskReferenceName){ - Optional found = all().stream() + Optional found = collectTasks().stream() .filter(workflowTask -> workflowTask.getTaskReferenceName().equals(taskReferenceName)) .findFirst(); if(found.isPresent()){ @@ -237,13 +236,13 @@ public WorkflowTask getTaskByRefName(String taskReferenceName){ } return null; } - - public List all(){ - List all = new LinkedList<>(); - for(WorkflowTask workflowTask : tasks){ - all.addAll(workflowTask.all()); + + public List collectTasks() { + List tasks = new LinkedList<>(); + for (WorkflowTask workflowTask : this.tasks) { + tasks.addAll(workflowTask.collectTasks()); } - return all; + return tasks; } @Override diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index a580f72cbb..1f09b1deed 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -443,15 +443,15 @@ private Collection> children() { } - public List all() { - List all = new LinkedList<>(); - all.add(this); + public List collectTasks() { + List tasks = new LinkedList<>(); + tasks.add(this); for (List workflowTaskList : children()) { for (WorkflowTask workflowTask : workflowTaskList) { - all.addAll(workflowTask.all()); + tasks.addAll(workflowTask.collectTasks()); } } - return all; + return tasks; } public WorkflowTask next(String taskReferenceName, WorkflowTask parent) { diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index b082200017..9160f6971e 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -42,6 +42,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -138,12 +139,12 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa executedTaskRefNames.remove(pendingTask.getReferenceTaskName()); } - TaskDef taskDefinition = pendingTask.getTaskDefinition(); + Optional taskDefinition = pendingTask.getTaskDefinition(); - if (taskDefinition != null) { - checkForTimeout(taskDefinition, pendingTask); + if (taskDefinition.isPresent()) { + checkForTimeout(taskDefinition.get(), pendingTask); // If the task has not been updated for "responseTimeout" then mark task as TIMED_OUT - if (isResponseTimedOut(taskDefinition, pendingTask)) { + if (isResponseTimedOut(taskDefinition.get(), pendingTask)) { timeoutTask(pendingTask); } } @@ -156,7 +157,7 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa if (workflowTask != null && workflowTask.isOptional()) { pendingTask.setStatus(COMPLETED_WITH_ERRORS); } else { - Task retryTask = retry(taskDefinition, workflowTask, pendingTask, workflow); + Task retryTask = retry(taskDefinition.get(), workflowTask, pendingTask, workflow); tasksToBeScheduled.put(retryTask.getReferenceTaskName(), retryTask); executedTaskRefNames.remove(retryTask.getReferenceTaskName()); outcome.tasksToBeUpdated.add(pendingTask); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java index aa8f4de289..37d496ec5c 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java @@ -24,7 +24,7 @@ public MetadataMapperService(MetadataDAO metadataDAO) { } public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { - List workflowTasks = workflowDefinition.all(); + List workflowTasks = workflowDefinition.collectTasks(); for (WorkflowTask workflowTask : workflowTasks) { diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index c21f859da6..ba411be6b0 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -272,10 +272,8 @@ public String startWorkflow( metadataMapperService.populateTaskDefinitions(workflowDefinition); - // Obtain the names of the tasks with missing definitions: - // - Are not system tasks - // - Don't have embedded definitions - Set missingTaskDefinitionNames = workflowDefinition.all().stream() + // Obtain the names of the tasks with missing definitions + Set missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() .filter(workflowTask -> (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null)) .map(workflowTask -> workflowTask.getName()) diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java index b554c3ed0e..df0318b870 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java @@ -120,12 +120,11 @@ String getDynamicTaskName(Map taskInput, String taskNameParam) t */ @VisibleForTesting TaskDef getDynamicTaskDefinition(WorkflowTask taskToSchedule) throws TerminateWorkflowException { //TODO this is a common pattern in code base can be moved to DAO - TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()) + return Optional.ofNullable(taskToSchedule.getTaskDefinition()) .orElseThrow(() -> { String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); return new TerminateWorkflowException(reason); }); - return taskDefinition; } } diff --git a/core/src/main/java/com/netflix/conductor/service/MetadataService.java b/core/src/main/java/com/netflix/conductor/service/MetadataService.java index 784c847a79..bc862148a9 100644 --- a/core/src/main/java/com/netflix/conductor/service/MetadataService.java +++ b/core/src/main/java/com/netflix/conductor/service/MetadataService.java @@ -19,26 +19,20 @@ package com.netflix.conductor.service; import com.google.common.base.Preconditions; - import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.events.EventHandler; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.MetadataDAO; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; - import javax.inject.Inject; import javax.inject.Singleton; +import java.util.List; +import java.util.Optional; /** * @author Viren diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index 17f8bc626b..f611891500 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -156,7 +156,6 @@ public void testGetTaskInputV2() throws Exception { assertNull(taskInput.get("taskOutputParam3")); assertNull(taskInput.get("nullValue")); assertEquals(workflow.getTasks().get(0).getStatus().name(), taskInput.get("task2Status")); //task2 and task3 are the tasks respectively - System.out.println(taskInput); } @Test @@ -221,8 +220,6 @@ public void testGetTaskInputV2Partial() throws Exception { assertEquals("The Doors", taskInput.get("secondName")); assertEquals("The Band is: The Doors-\ti-123abcdef990", taskInput.get("concatenatedName")); - System.out.println(new ObjectMapper().enable(SerializationFeature.INDENT_OUTPUT).writeValueAsString(taskInput)); - assertEquals("request id 001", taskInput.get("workflowInputParam")); assertEquals("http://location", taskInput.get("taskOutputParam")); assertNull(taskInput.get("taskOutputParam3")); @@ -265,7 +262,7 @@ public void testGetTaskInput() throws Exception { task.getOutputData().put("isPersonActive", true); workflow.getTasks().add(task); Map taskInput = parametersUtils.getTaskInput(ip, workflow, null, null); - System.out.println(taskInput.get("complexJson")); + assertNotNull(taskInput); assertTrue(taskInput.containsKey("workflowInputParam")); assertTrue(taskInput.containsKey("taskOutputParam")); @@ -628,7 +625,7 @@ public void testDecideSuccessfulWorkflow() throws Exception { DeciderOutcome deciderOutcome = deciderService.decide(workflow); assertNotNull(deciderOutcome); -System.out.println(workflow); + assertFalse(workflow.getTaskByRefName("s1").isRetried()); assertEquals(1, deciderOutcome.tasksToBeUpdated.size()); assertEquals("s1", deciderOutcome.tasksToBeUpdated.get(0).getReferenceTaskName()); diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java index 64f6413ac5..e11a59bc1c 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/service/WorkflowServiceImpl.java @@ -17,19 +17,16 @@ import com.netflix.conductor.proto.WorkflowPb; import com.netflix.conductor.service.ExecutionService; import com.netflix.conductor.service.MetadataService; - +import io.grpc.Status; +import io.grpc.stub.StreamObserver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.inject.Inject; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import javax.inject.Inject; - -import io.grpc.Status; -import io.grpc.stub.StreamObserver; - public class WorkflowServiceImpl extends WorkflowServiceGrpc.WorkflowServiceImplBase { private static final Logger LOGGER = LoggerFactory.getLogger(TaskServiceImpl.class); private static final ProtoMapper PROTO_MAPPER = ProtoMapper.INSTANCE; @@ -54,6 +51,7 @@ public void startWorkflow(StartWorkflowRequestPb.StartWorkflowRequest pbRequest, final StartWorkflowRequest request = PROTO_MAPPER.fromProto(pbRequest); try { + // TODO When moving to Java 9: Use ifPresentOrElse(Consumer action, Runnable emptyAction) String id; if (request.getWorkflowDef() == null) { id = executor.startWorkflow( diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java index dab5c4f4b2..1d971ef626 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAO.java @@ -24,6 +24,7 @@ import java.util.Date; import java.util.LinkedList; import java.util.List; +import java.util.Optional; import java.util.stream.Collectors; public class MySQLExecutionDAO extends MySQLBaseDAO implements ExecutionDAO { @@ -125,12 +126,14 @@ public void updateTask(Task task) { @Override public boolean exceedsInProgressLimit(Task task) { - TaskDef taskDef = task.getTaskDefinition(); - if (taskDef == null) { + Optional taskDefinition = task.getTaskDefinition(); + if (!taskDefinition.isPresent()) { return false; } + TaskDef taskDef = taskDefinition.get(); + int limit = taskDef.concurrencyLimit(); if (limit <= 0) { return false; @@ -498,9 +501,9 @@ private void updateTask(Connection connection, Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = task.getTaskDefinition(); + Optional taskDefinition = task.getTaskDefinition(); - if (taskDef != null && taskDef.concurrencyLimit() > 0) { + if (taskDefinition.isPresent() && taskDefinition.get().concurrencyLimit() > 0) { boolean inProgress = task.getStatus() != null && task.getStatus().equals(Task.Status.IN_PROGRESS); updateInProgressStatus(connection, task, inProgress); } diff --git a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java index 1f0df0c3e1..d5487fb5d7 100644 --- a/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java +++ b/redis-persistence/src/main/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAO.java @@ -187,9 +187,9 @@ public void updateTask(Task task) { task.setEndTime(System.currentTimeMillis()); } - TaskDef taskDef = task.getTaskDefinition(); + Optional taskDefinition = task.getTaskDefinition(); - if(taskDef != null && taskDef.concurrencyLimit() > 0) { + if(taskDefinition.isPresent() && taskDefinition.get().concurrencyLimit() > 0) { if(task.getStatus() != null && task.getStatus().equals(Status.IN_PROGRESS)) { dynoClient.sadd(nsKey(TASKS_IN_PROGRESS_STATUS, task.getTaskDefName()), task.getTaskId()); @@ -207,7 +207,7 @@ public void updateTask(Task task) { } String payload = toJson(task); - recordRedisDaoPayloadSize("updateTask", payload.length(), Optional.ofNullable(taskDef) + recordRedisDaoPayloadSize("updateTask", payload.length(), taskDefinition .map(TaskDef::getName) .orElse("n/a"), task.getWorkflowType()); //The payload is verified and @@ -239,11 +239,11 @@ public void updateTask(Task task) { @Override public boolean exceedsInProgressLimit(Task task) { - TaskDef taskDef = task.getTaskDefinition(); - if(taskDef == null) { + Optional taskDefinition = task.getTaskDefinition(); + if(!taskDefinition.isPresent()) { return false; } - int limit = taskDef.concurrencyLimit(); + int limit = taskDefinition.get().concurrencyLimit(); if(limit <= 0) { return false; } From c5ccac6df651cfde4a17cc815bc77fbe972e8830 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sun, 5 Aug 2018 13:25:58 -0700 Subject: [PATCH 120/163] Updated tests in order to consider metadata prefetching at workflow execution level --- .../conductor/common/tasks/TestTask.java | 8 +- .../conductor/contribs/http/TestHttpTask.java | 14 +- .../core/execution/DeciderService.java | 2 +- .../core/execution/TestDeciderOutcomes.java | 63 ++++----- .../core/execution/TestDeciderService.java | 30 ++++- .../core/execution/TestWorkflowExecutor.java | 13 +- .../mapper/DynamicTaskMapperTest.java | 13 +- .../mapper/SimpleTaskMapperTest.java | 13 +- .../mapper/SubWorkflowTaskMapperTest.java | 45 +------ .../mapper/UserDefinedTaskMapperTest.java | 13 +- .../conductor/dao/ExecutionDAOTest.java | 17 ++- core/src/test/resources/conditional_flow.json | 126 +++++++++++++++++- .../dao/mysql/MySQLExecutionDAOTest.java | 17 +-- .../dao/dynomite/RedisExecutionDAOTest.java | 15 +-- .../integration/WorkflowServiceTest.java | 18 ++- 15 files changed, 235 insertions(+), 172 deletions(-) diff --git a/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java b/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java index 99fd8f6731..1485bf849a 100644 --- a/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java +++ b/common/src/test/java/com/netflix/conductor/common/tasks/TestTask.java @@ -19,7 +19,9 @@ package com.netflix.conductor.common.tasks; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Set; @@ -68,14 +70,16 @@ public void testTaskDefinitionIfAvailable() { Task task = new Task(); task.setStatus(Status.FAILED); assertEquals(Status.FAILED, task.getStatus()); + assertNull(task.getWorkflowTask()); - assertNull(task.getTaskDefinition()); + assertFalse(task.getTaskDefinition().isPresent()); WorkflowTask workflowTask = new WorkflowTask(); TaskDef taskDefinition = new TaskDef(); workflowTask.setTaskDefinition(taskDefinition); task.setWorkflowTask(workflowTask); - assertEquals(taskDefinition, task.getTaskDefinition()); + assertTrue(task.getTaskDefinition().isPresent()); + assertEquals(taskDefinition, task.getTaskDefinition().get()); } } diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index 50b8696531..68f2f18ca5 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -39,8 +39,6 @@ import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; -import com.netflix.conductor.dao.MetadataDAO; - import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; @@ -50,6 +48,9 @@ import org.junit.BeforeClass; import org.junit.Test; +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.PrintWriter; @@ -59,10 +60,6 @@ import java.util.Set; import java.util.stream.Collectors; -import javax.servlet.ServletException; -import javax.servlet.http.HttpServletRequest; -import javax.servlet.http.HttpServletResponse; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; @@ -296,7 +293,6 @@ public void testOptional() throws Exception { workflow.setWorkflowDefinition(def); workflow.getTasks().add(task); - MetadataDAO metadataDAO = mock(MetadataDAO.class); ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); @@ -306,10 +302,10 @@ public void testOptional() throws Exception { taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); - new DeciderService(metadataDAO, taskMappers).decide(workflow); + new DeciderService(taskMappers).decide(workflow); System.out.println(workflow.getTasks()); System.out.println(workflow.getStatus()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index 9160f6971e..9d3144333b 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -157,7 +157,7 @@ private DeciderOutcome decide(final Workflow workflow, List preScheduledTa if (workflowTask != null && workflowTask.isOptional()) { pendingTask.setStatus(COMPLETED_WITH_ERRORS); } else { - Task retryTask = retry(taskDefinition.get(), workflowTask, pendingTask, workflow); + Task retryTask = retry(taskDefinition.orElse(null), workflowTask, pendingTask, workflow); tasksToBeScheduled.put(retryTask.getReferenceTaskName(), retryTask); executedTaskRefNames.remove(retryTask.getReferenceTaskName()); outcome.tasksToBeUpdated.add(pendingTask); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java index f7ff8f18f5..3d2ab661e5 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java @@ -38,8 +38,6 @@ import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; import com.netflix.conductor.core.execution.tasks.Join; -import com.netflix.conductor.dao.MetadataDAO; - import org.junit.Before; import org.junit.Test; @@ -55,9 +53,6 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; -import static org.mockito.Matchers.any; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; /** * @author Viren @@ -79,27 +74,20 @@ public class TestDeciderOutcomes { @Before public void init() throws Exception { - - MetadataDAO metadataDAO = mock(MetadataDAO.class); - TaskDef taskDef = new TaskDef(); - taskDef.setRetryCount(1); - taskDef.setName("mockTaskDef"); - taskDef.setResponseTimeoutSeconds(0); - when(metadataDAO.getTaskDef(any())).thenReturn(taskDef); ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); + taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); + taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); - this.deciderService = new DeciderService(metadataDAO, taskMappers); + this.deciderService = new DeciderService(taskMappers); } @Test @@ -137,14 +125,15 @@ public void testRetries() { WorkflowDef def = new WorkflowDef(); def.setName("test"); - WorkflowTask task = new WorkflowTask(); - task.setName("test_task"); - task.setType("USER_TASK"); - task.setTaskReferenceName("t0"); - task.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); - task.getInputParameters().put("requestId", "${workflow.input.requestId}"); + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName("test_task"); + workflowTask.setType("USER_TASK"); + workflowTask.setTaskReferenceName("t0"); + workflowTask.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); + workflowTask.getInputParameters().put("requestId", "${workflow.input.requestId}"); + workflowTask.setTaskDefinition(new TaskDef("test_task")); - def.getTasks().add(task); + def.getTasks().add(workflowTask); def.setSchemaVersion(2); Workflow workflow = new Workflow(); @@ -155,7 +144,7 @@ public void testRetries() { assertNotNull(outcome); assertEquals(1, outcome.tasksToBeScheduled.size()); - assertEquals(task.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); + assertEquals(workflowTask.getTaskReferenceName(), outcome.tasksToBeScheduled.get(0).getReferenceTaskName()); String task1Id = outcome.tasksToBeScheduled.get(0).getTaskId(); assertEquals(task1Id, outcome.tasksToBeScheduled.get(0).getInputData().get("taskId")); @@ -204,6 +193,7 @@ public void testRetries() { wft.setWorkflowTaskType(Type.SIMPLE); wft.getInputParameters().put("requestId", "${workflow.input.requestId}"); wft.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); + wft.setTaskDefinition(new TaskDef("f" + i)); forks.add(wft); Map input = new HashMap<>(); input.put("k", "v"); @@ -255,11 +245,13 @@ public void testOptional() { task1.setTaskReferenceName("t0"); task1.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); task1.setOptional(true); + task1.setTaskDefinition(new TaskDef("task0")); WorkflowTask task2 = new WorkflowTask(); task2.setName("task1"); task2.setType("SIMPLE"); task2.setTaskReferenceName("t1"); + task2.setTaskDefinition(new TaskDef("task1")); def.getTasks().add(task1); def.getTasks().add(task2); @@ -327,14 +319,15 @@ public void testOptionalWithDynamicFork() throws Exception { Map> forkedInputs = new HashMap<>(); for (int i = 0; i < 3; i++) { - WorkflowTask wft = new WorkflowTask(); - wft.setName("f" + i); - wft.setTaskReferenceName("f" + i); - wft.setWorkflowTaskType(Type.SIMPLE); - wft.setOptional(true); - forks.add(wft); - - forkedInputs.put(wft.getTaskReferenceName(), new HashMap<>()); + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName("f" + i); + workflowTask.setTaskReferenceName("f" + i); + workflowTask.setWorkflowTaskType(Type.SIMPLE); + workflowTask.setOptional(true); + workflowTask.setTaskDefinition(new TaskDef("f" + i)); + forks.add(workflowTask); + + forkedInputs.put(workflowTask.getTaskReferenceName(), new HashMap<>()); } workflow.getInput().put("forks", forks); workflow.getInput().put("forkedInputs", forkedInputs); @@ -381,17 +374,19 @@ public void testDecisionCases() { even.setName("even"); even.setType("SIMPLE"); even.setTaskReferenceName("even"); + even.setTaskDefinition(new TaskDef("even")); WorkflowTask odd = new WorkflowTask(); odd.setName("odd"); odd.setType("SIMPLE"); odd.setTaskReferenceName("odd"); + odd.setTaskDefinition(new TaskDef("odd")); WorkflowTask defaultt = new WorkflowTask(); defaultt.setName("defaultt"); defaultt.setType("SIMPLE"); defaultt.setTaskReferenceName("defaultt"); - + defaultt.setTaskDefinition(new TaskDef("defaultt")); WorkflowTask decide = new WorkflowTask(); decide.setName("decide"); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index f611891500..57fca8795f 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -16,7 +16,6 @@ package com.netflix.conductor.core.execution; import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -45,7 +44,6 @@ import com.netflix.spectator.api.DefaultRegistry; import com.netflix.spectator.api.Registry; import com.netflix.spectator.api.Spectator; - import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @@ -111,17 +109,17 @@ public void setup() { parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); + taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); + taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); - deciderService = new DeciderService(metadataDAO, taskMappers); + deciderService = new DeciderService(taskMappers); } @Test @@ -670,6 +668,12 @@ public void testDecideFailedTask() throws Exception { task.setExecuted(false); task.setStatus(Status.FAILED); + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setTaskReferenceName("s1"); + workflowTask.setName("junit_task_l1"); + workflowTask.setTaskDefinition(new TaskDef("junit_task_l1")); + task.setWorkflowTask(workflowTask); + workflow.getTasks().add(task); DeciderOutcome deciderOutcome = deciderService.decide(workflow); @@ -693,8 +697,10 @@ public void testGetTasksToBeScheduled() throws Exception { workflow.setStatus(WorkflowStatus.RUNNING); WorkflowTask workflowTask1 = new WorkflowTask(); + workflowTask1.setName("s1"); workflowTask1.setTaskReferenceName("s1"); workflowTask1.setType(Type.SIMPLE.name()); + workflowTask1.setTaskDefinition(new TaskDef("s1")); List tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask1, 0, null); assertNotNull(tasksToBeScheduled); @@ -702,8 +708,10 @@ public void testGetTasksToBeScheduled() throws Exception { assertEquals("s1", tasksToBeScheduled.get(0).getReferenceTaskName()); WorkflowTask workflowTask2 = new WorkflowTask(); + workflowTask2.setName("s2"); workflowTask2.setTaskReferenceName("s2"); workflowTask2.setType(Type.SIMPLE.name()); + workflowTask2.setTaskDefinition(new TaskDef("s2")); tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask2, 0, null); assertNotNull(tasksToBeScheduled); assertEquals(1, tasksToBeScheduled.size()); @@ -736,6 +744,7 @@ private WorkflowDef createConditionalWF() throws Exception { inputParams1.put("p2", "workflow.input.param2"); workflowTask1.setInputParameters(inputParams1); workflowTask1.setTaskReferenceName("t1"); + workflowTask1.setTaskDefinition(new TaskDef("junit_task_1")); WorkflowTask workflowTask2 = new WorkflowTask(); workflowTask2.setName("junit_task_2"); @@ -743,6 +752,7 @@ private WorkflowDef createConditionalWF() throws Exception { inputParams2.put("tp1", "workflow.input.param1"); workflowTask2.setInputParameters(inputParams2); workflowTask2.setTaskReferenceName("t2"); + workflowTask2.setTaskDefinition(new TaskDef("junit_task_2")); WorkflowTask workflowTask3 = new WorkflowTask(); workflowTask3.setName("junit_task_3"); @@ -750,6 +760,7 @@ private WorkflowDef createConditionalWF() throws Exception { inputParams2.put("tp3", "workflow.input.param2"); workflowTask3.setInputParameters(inputParams3); workflowTask3.setTaskReferenceName("t3"); + workflowTask3.setTaskDefinition(new TaskDef("junit_task_3")); WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName("Conditional Workflow"); @@ -784,6 +795,7 @@ private WorkflowDef createConditionalWF() throws Exception { WorkflowTask notifyTask = new WorkflowTask(); notifyTask.setName("junit_task_4"); notifyTask.setTaskReferenceName("junit_task_4"); + notifyTask.setTaskDefinition(new TaskDef("junit_task_4")); WorkflowTask finalDecisionTask = new WorkflowTask(); finalDecisionTask.setName("finalcondition"); @@ -809,11 +821,13 @@ private WorkflowDef createLinearWorkflow() { workflowTask1.setName("junit_task_l1"); workflowTask1.setInputParameters(inputParams); workflowTask1.setTaskReferenceName("s1"); + workflowTask1.setTaskDefinition(new TaskDef("junit_task_l1")); WorkflowTask workflowTask2 = new WorkflowTask(); workflowTask2.setName("junit_task_l2"); workflowTask2.setInputParameters(inputParams); workflowTask2.setTaskReferenceName("s2"); + workflowTask2.setTaskDefinition(new TaskDef("junit_task_l2")); WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setSchemaVersion(2); @@ -886,6 +900,7 @@ private WorkflowDef createNestedWorkflow() { workflowTask.setName("junit_task_" + i); workflowTask.setInputParameters(inputParams); workflowTask.setTaskReferenceName("t" + i); + workflowTask.setTaskDefinition(new TaskDef("junit_task_" + i)); tasks.add(workflowTask); } @@ -939,4 +954,5 @@ private WorkflowDef createNestedWorkflow() { return workflowDef; } + } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java index 1b0168978c..f47facf9a1 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java @@ -83,17 +83,18 @@ public void init() { ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils, metadataDAO)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils, metadataDAO)); + taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); + taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); + taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); - DeciderService deciderService = new DeciderService(metadataDAO, taskMappers); - workflowExecutor = new WorkflowExecutor(deciderService, metadataDAO, executionDAO, queueDAO, config); + DeciderService deciderService = new DeciderService(taskMappers); + MetadataMapperService metadataMapperService = new MetadataMapperService(metadataDAO); + workflowExecutor = new WorkflowExecutor(deciderService, metadataDAO, executionDAO, queueDAO, metadataMapperService, config); } @Test diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java index 245d7fd327..b2dc7580b3 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java @@ -8,7 +8,6 @@ import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.dao.MetadataDAO; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -18,7 +17,7 @@ import java.util.List; import java.util.Map; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyMap; import static org.mockito.Matchers.anyString; @@ -27,7 +26,6 @@ public class DynamicTaskMapperTest { - MetadataDAO metadataDAO; ParametersUtils parametersUtils; DynamicTaskMapper dynamicTaskMapper; @@ -36,9 +34,8 @@ public class DynamicTaskMapperTest { @Before public void setUp() throws Exception { - metadataDAO = mock(MetadataDAO.class); parametersUtils = mock(ParametersUtils.class); - dynamicTaskMapper = new DynamicTaskMapper(parametersUtils, metadataDAO); + dynamicTaskMapper = new DynamicTaskMapper(parametersUtils); } @Test @@ -49,11 +46,11 @@ public void getMappedTasks() throws Exception { workflowTask.setDynamicTaskNameParam("dynamicTaskName"); TaskDef taskDef = new TaskDef(); taskDef.setName("DynoTask"); + workflowTask.setTaskDefinition(taskDef); Map taskInput = new HashMap<>(); taskInput.put("dynamicTaskName", "DynoTask"); - when(metadataDAO.getTaskDef("DynoTask")).thenReturn(taskDef); when(parametersUtils.getTaskInput(anyMap(), any(Workflow.class), any(TaskDef.class), anyString())).thenReturn(taskInput); String taskId = IDGenerator.generate(); @@ -101,7 +98,7 @@ public void getDynamicTaskDefinition() throws Exception { workflowTask.setName("Foo"); TaskDef taskDef = new TaskDef(); taskDef.setName("Foo"); - when(metadataDAO.getTaskDef("Foo")).thenReturn(taskDef); + workflowTask.setTaskDefinition(taskDef); //when TaskDef dynamicTaskDefinition = dynamicTaskMapper.getDynamicTaskDefinition(workflowTask); @@ -116,8 +113,6 @@ public void getDynamicTaskDefinitionNull() { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName("Foo"); - when(metadataDAO.getTaskDef("Foo")).thenReturn(null); - expectedException.expect(TerminateWorkflowException.class); expectedException.expectMessage(String.format("Invalid task specified. Cannot find task by name %s in the task definitions", workflowTask.getName())); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java index 1e1b20a544..9736fae407 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java @@ -8,7 +8,6 @@ import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.dao.MetadataDAO; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -17,14 +16,13 @@ import java.util.HashMap; import java.util.List; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class SimpleTaskMapperTest { ParametersUtils parametersUtils; - MetadataDAO metadataDAO; //subject SimpleTaskMapper simpleTaskMapper; @@ -35,8 +33,7 @@ public class SimpleTaskMapperTest { @Before public void setUp() throws Exception { parametersUtils = mock(ParametersUtils.class); - metadataDAO = mock(MetadataDAO.class); - simpleTaskMapper = new SimpleTaskMapper(parametersUtils, metadataDAO); + simpleTaskMapper = new SimpleTaskMapper(parametersUtils); } @Test @@ -44,12 +41,11 @@ public void getMappedTasks() throws Exception { WorkflowTask taskToSchedule = new WorkflowTask(); taskToSchedule.setName("simple_task"); + taskToSchedule.setTaskDefinition(new TaskDef("simple_task")); String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); - when(metadataDAO.getTaskDef("simple_task")).thenReturn(new TaskDef()); - WorkflowDef wd = new WorkflowDef(); Workflow w = new Workflow(); w.setWorkflowDefinition(wd); @@ -76,7 +72,6 @@ public void getMappedTasksException() throws Exception { TaskMapperContext taskMapperContext = new TaskMapperContext(w, taskToSchedule, new HashMap<>(), 0, retriedTaskId, taskId, null); - when(metadataDAO.getTaskDef("simple_task")).thenReturn(null); //then expectedException.expect(TerminateWorkflowException.class); expectedException.expectMessage(String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName())); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java index dbb0f52fa2..d53e2e4612 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapperTest.java @@ -20,7 +20,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Optional; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -44,7 +43,7 @@ public class SubWorkflowTaskMapperTest { public void setUp() throws Exception { parametersUtils = mock(ParametersUtils.class); metadataDAO = mock(MetadataDAO.class); - subWorkflowTaskMapper = new SubWorkflowTaskMapper(parametersUtils, metadataDAO); + subWorkflowTaskMapper = new SubWorkflowTaskMapper(parametersUtils); deciderService = mock(DeciderService.class); } @@ -57,13 +56,13 @@ public void getMappedTasks() throws Exception { WorkflowTask taskToSchedule = new WorkflowTask(); SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); subWorkflowParams.setName("Foo"); - subWorkflowParams.setVersion("2"); + subWorkflowParams.setVersion(2); taskToSchedule.setSubWorkflowParam(subWorkflowParams); Map taskInput = new HashMap<>(); Map subWorkflowParamMap = new HashMap<>(); subWorkflowParamMap.put("name","FooWorkFlow"); - subWorkflowParamMap.put("version","2"); + subWorkflowParamMap.put("version",2); when(parametersUtils.getTaskInputV2(anyMap(), any(Workflow.class), anyString(), any(TaskDef.class))) .thenReturn(subWorkflowParamMap); @@ -87,7 +86,7 @@ public void getSubWorkflowParams() throws Exception { WorkflowTask workflowTask = new WorkflowTask(); SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); subWorkflowParams.setName("Foo"); - subWorkflowParams.setVersion("2"); + subWorkflowParams.setVersion(2); workflowTask.setSubWorkflowParam(subWorkflowParams); assertEquals(subWorkflowParams, subWorkflowTaskMapper.getSubWorkflowParams(workflowTask)); @@ -105,40 +104,4 @@ public void getExceptionWhenNoSubWorkflowParamsPassed() throws Exception { subWorkflowTaskMapper.getSubWorkflowParams(workflowTask); } - - @Test - public void getSubWorkflowVersion() throws Exception { - Map subWorkflowParamMap = new HashMap<>(); - subWorkflowParamMap.put("name","FooWorkFlow"); - subWorkflowParamMap.put("version","2"); - - Integer version = subWorkflowTaskMapper.getSubWorkflowVersion(subWorkflowParamMap, "FooWorkFlow"); - - assertEquals(version, Integer.valueOf(2)); - } - - @Test - public void getSubworkflowVersionFromMeta() throws Exception { - Map subWorkflowParamMap = new HashMap<>(); - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("FooWorkFlow"); - workflowDef.setVersion(2); - when(metadataDAO.getLatest(any())).thenReturn(Optional.of(workflowDef)); - - Integer version = subWorkflowTaskMapper.getSubWorkflowVersion(subWorkflowParamMap, "FooWorkFlow"); - - assertEquals(version, Integer.valueOf(2)); - } - - @Test - public void getSubworkflowVersionFromMetaException() throws Exception { - Map subWorkflowParamMap = new HashMap<>(); - when(metadataDAO.getLatest(any())).thenReturn(Optional.empty()); - - expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage(String.format("The Task %s defined as a sub-workflow has no workflow definition available ", "FooWorkFlow")); - - subWorkflowTaskMapper.getSubWorkflowVersion(subWorkflowParamMap, "FooWorkFlow"); - } - } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java index c7f38a1d56..257d062716 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java @@ -8,7 +8,6 @@ import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; -import com.netflix.conductor.dao.MetadataDAO; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -17,16 +16,12 @@ import java.util.HashMap; import java.util.List; -import static org.junit.Assert.*; +import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class UserDefinedTaskMapperTest { ParametersUtils parametersUtils; - MetadataDAO metadataDAO; - - //subject UserDefinedTaskMapper userDefinedTaskMapper; @Rule @@ -35,8 +30,7 @@ public class UserDefinedTaskMapperTest { @Before public void setUp() throws Exception { parametersUtils = mock(ParametersUtils.class); - metadataDAO = mock(MetadataDAO.class); - userDefinedTaskMapper = new UserDefinedTaskMapper(parametersUtils, metadataDAO); + userDefinedTaskMapper = new UserDefinedTaskMapper(parametersUtils); } @Test @@ -45,9 +39,9 @@ public void getMappedTasks() throws Exception { WorkflowTask taskToSchedule = new WorkflowTask(); taskToSchedule.setName("user_task"); taskToSchedule.setType(WorkflowTask.Type.USER_DEFINED.name()); + taskToSchedule.setTaskDefinition(new TaskDef("user_task")); String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); - when(metadataDAO.getTaskDef("user_task")).thenReturn(new TaskDef()); WorkflowDef wd = new WorkflowDef(); Workflow w = new Workflow(); @@ -71,7 +65,6 @@ public void getMappedTasksException() throws Exception { taskToSchedule.setType(WorkflowTask.Type.USER_DEFINED.name()); String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); - when(metadataDAO.getTaskDef("user_task")).thenReturn(null); WorkflowDef wd = new WorkflowDef(); Workflow w = new Workflow(); diff --git a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java index 0b480b16c8..f38b2f2369 100644 --- a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java +++ b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java @@ -4,9 +4,9 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ApplicationException; - import org.apache.commons.lang3.builder.EqualsBuilder; import org.junit.Rule; import org.junit.Test; @@ -31,17 +31,19 @@ public abstract class ExecutionDAOTest { abstract protected ExecutionDAO getExecutionDAO(); - abstract protected MetadataDAO getMetadataDAO(); - @Rule public ExpectedException expected = ExpectedException.none(); @Test public void testTaskExceedsLimit() throws Exception { - TaskDef def = new TaskDef(); - def.setName("task1"); - def.setConcurrentExecLimit(1); - getMetadataDAO().createTaskDef(def); + TaskDef taskDefinition = new TaskDef(); + taskDefinition.setName("task1"); + taskDefinition.setConcurrentExecLimit(1); + + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName("task1"); + workflowTask.setTaskDefinition(taskDefinition); + workflowTask.setTaskDefinition(taskDefinition); List tasks = new LinkedList<>(); for (int i = 0; i < 15; i++) { @@ -54,6 +56,7 @@ public void testTaskExceedsLimit() throws Exception { task.setTaskDefName("task1"); tasks.add(task); task.setStatus(Task.Status.SCHEDULED); + task.setWorkflowTask(workflowTask); } getExecutionDAO().createTasks(tasks); diff --git a/core/src/test/resources/conditional_flow.json b/core/src/test/resources/conditional_flow.json index 2f057b756b..d3345892e2 100644 --- a/core/src/test/resources/conditional_flow.json +++ b/core/src/test/resources/conditional_flow.json @@ -31,13 +31,51 @@ "p2": "workflow.input.param2" }, "type": "SIMPLE", - "startDelay": 0 + "startDelay": 0, + "taskDefinition": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "junit_task_1", + "description": "junit_task_1", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } }, { "name": "junit_task_3", "taskReferenceName": "t3", "type": "SIMPLE", - "startDelay": 0 + "startDelay": 0, + "taskDefinition": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "junit_task_3", + "description": "junit_task_3", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } } ], "two": [ @@ -49,7 +87,26 @@ "tp3": "workflow.input.param2" }, "type": "SIMPLE", - "startDelay": 0 + "startDelay": 0, + "taskDefinition": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "junit_task_2", + "description": "junit_task_2", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } } ] }, @@ -61,7 +118,26 @@ "name": "junit_task_3", "taskReferenceName": "t3", "type": "SIMPLE", - "startDelay": 0 + "startDelay": 0, + "taskDefinition": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "junit_task_3", + "description": "junit_task_3", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } } ] }, @@ -74,7 +150,26 @@ "tp3": "workflow.input.param2" }, "type": "SIMPLE", - "startDelay": 0 + "startDelay": 0, + "taskDefinition": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "junit_task_2", + "description": "junit_task_2", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } } ], "startDelay": 0 @@ -93,7 +188,26 @@ "name": "junit_task_4", "taskReferenceName": "junit_task_4", "type": "SIMPLE", - "startDelay": 0 + "startDelay": 0, + "taskDefinition": { + "ownerApp": null, + "createTime": null, + "updateTime": null, + "createdBy": null, + "updatedBy": null, + "name": "junit_task_4", + "description": "junit_task_4", + "retryCount": 1, + "timeoutSeconds": 0, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 3600, + "concurrentExecLimit": null, + "inputTemplate": {} + } } ] }, diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java index 3445494598..d6971ce719 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java @@ -20,19 +20,12 @@ public class MySQLExecutionDAOTest extends ExecutionDAOTest { private final MySQLDAOTestUtil testMySQL = new MySQLDAOTestUtil(); - private MySQLMetadataDAO metadata; - private MySQLExecutionDAO dao; + private MySQLExecutionDAO executionDAO; @Before public void setup() throws Exception { - metadata = new MySQLMetadataDAO( - testMySQL.getObjectMapper(), - testMySQL.getDataSource(), - testMySQL.getTestConfiguration() - ); - dao = new MySQLExecutionDAO( + executionDAO = new MySQLExecutionDAO( mock(IndexDAO.class), - metadata, testMySQL.getObjectMapper(), testMySQL.getDataSource() ); @@ -59,11 +52,7 @@ public void testPendingByCorrelationId() throws Exception { @Override public ExecutionDAO getExecutionDAO() { - return dao; + return executionDAO; } - @Override - public MetadataDAO getMetadataDAO() { - return metadata; - } } diff --git a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java index 8cdb9f6497..c93af8e43e 100644 --- a/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java +++ b/redis-persistence/src/test/java/com/netflix/conductor/dao/dynomite/RedisExecutionDAOTest.java @@ -25,21 +25,18 @@ import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.ExecutionDAOTest; import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.redis.JedisMock; import com.netflix.conductor.dyno.DynoProxy; - import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; +import redis.clients.jedis.JedisCommands; import java.util.Collections; import java.util.List; -import redis.clients.jedis.JedisCommands; - import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.mockito.Matchers.any; @@ -53,8 +50,6 @@ @RunWith(MockitoJUnitRunner.class) public class RedisExecutionDAOTest extends ExecutionDAOTest { - private RedisMetadataDAO metadataDAO; - private RedisExecutionDAO executionDAO; @Mock @@ -69,8 +64,7 @@ public void init() throws Exception { JedisCommands jedisMock = new JedisMock(); DynoProxy dynoClient = new DynoProxy(jedisMock); - metadataDAO = new RedisMetadataDAO(dynoClient, objectMapper, config); - executionDAO = new RedisExecutionDAO(dynoClient, objectMapper, mock(IndexDAO.class), metadataDAO, config); + executionDAO = new RedisExecutionDAO(dynoClient, objectMapper, mock(IndexDAO.class), config); // Ignore indexing in Redis tests. doNothing().when(indexDAO).indexTask(any(Task.class)); @@ -86,7 +80,6 @@ public void testCorrelateTaskToWorkflowInDS() throws Exception { TaskDef def = new TaskDef(); def.setName("task1"); def.setConcurrentExecLimit(1); - metadataDAO.createTaskDef(def); Task task = new Task(); task.setTaskId(taskId); @@ -111,8 +104,4 @@ protected ExecutionDAO getExecutionDAO() { return executionDAO; } - @Override - protected MetadataDAO getMetadataDAO() { - return metadataDAO; - } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index 563e5bd266..ab4cc9858f 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -38,6 +38,7 @@ import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.execution.ApplicationException; +import com.netflix.conductor.core.execution.MetadataMapperService; import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.core.execution.WorkflowSweeper; @@ -124,6 +125,9 @@ public class WorkflowServiceTest { @Inject private WorkflowExecutor workflowExecutor; + @Inject + private MetadataMapperService metadataMapperService; + private static boolean registered; private static List taskDefs; @@ -623,17 +627,17 @@ public void testDynamicForkJoinLegacy() throws Exception { Task t1 = workflowExecutionService.poll("junit_task_1", "test"); //assertTrue(ess.ackTaskRecieved(t1.getTaskId(), "test")); - DynamicForkJoinTaskList dtasks = new DynamicForkJoinTaskList(); + DynamicForkJoinTaskList dynamicForkJoinTasks = new DynamicForkJoinTaskList(); input = new HashMap(); input.put("k1", "v1"); - dtasks.add("junit_task_2", null, "xdt1", input); + dynamicForkJoinTasks.add("junit_task_2", null, "xdt1", input); HashMap input2 = new HashMap(); input2.put("k2", "v2"); - dtasks.add("junit_task_3", null, "xdt2", input2); + dynamicForkJoinTasks.add("junit_task_3", null, "xdt2", input2); - t1.getOutputData().put("dynamicTasks", dtasks); + t1.getOutputData().put("dynamicTasks", dynamicForkJoinTasks); t1.setStatus(COMPLETED); workflowExecutionService.updateTask(t1); @@ -1056,6 +1060,8 @@ private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { def.getTasks().add(fanout); def.getTasks().add(join); + metadataMapperService.populateTaskDefinitions(def) + metadataService.updateWorkflowDef(def); } @@ -1069,6 +1075,7 @@ private void createConditionalWF() throws Exception { ip1.put("p2", "workflow.input.param2"); wft1.setInputParameters(ip1); wft1.setTaskReferenceName("t1"); + wft1.setTaskDefinition(new TaskDef("junit_task_1")); WorkflowTask wft2 = new WorkflowTask(); wft2.setName("junit_task_2"); @@ -1076,6 +1083,7 @@ private void createConditionalWF() throws Exception { ip2.put("tp1", "workflow.input.param1"); wft2.setInputParameters(ip2); wft2.setTaskReferenceName("t2"); + wft2.setTaskDefinition(new TaskDef("junit_task_2")); WorkflowTask wft3 = new WorkflowTask(); wft3.setName("junit_task_3"); @@ -1083,6 +1091,7 @@ private void createConditionalWF() throws Exception { ip2.put("tp3", "workflow.input.param2"); wft3.setInputParameters(ip3); wft3.setTaskReferenceName("t3"); + wft3.setTaskDefinition(new TaskDef("junit_task_3")); WorkflowDef def2 = new WorkflowDef(); def2.setName(COND_TASK_WF); @@ -1117,6 +1126,7 @@ private void createConditionalWF() throws Exception { WorkflowTask notifyTask = new WorkflowTask(); notifyTask.setName("junit_task_4"); notifyTask.setTaskReferenceName("junit_task_4"); + notifyTask.setTaskDefinition(new TaskDef("junit_task_4")); WorkflowTask finalTask = new WorkflowTask(); finalTask.setName("finalcondition"); From 4b11285ebbcf45d00c048bee85a9483ed6a75d27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sun, 5 Aug 2018 18:29:01 -0700 Subject: [PATCH 121/163] Updated tests in order to support dynamic fork join cases --- .../metadata/workflow/WorkflowTask.java | 4 ++++ .../conductor/contribs/http/TestHttpTask.java | 6 +++++- .../conductor/core/config/CoreModule.java | 5 +++-- .../core/execution/WorkflowExecutor.java | 1 + .../mapper/ForkJoinDynamicTaskMapper.java | 19 +++++++++++++++---- .../MetadataMapperService.java | 5 +++-- .../core/execution/TestDeciderOutcomes.java | 10 +++++++++- .../core/execution/TestDeciderService.java | 2 +- .../core/execution/TestWorkflowExecutor.java | 3 ++- .../mapper/ForkJoinDynamicTaskMapperTest.java | 5 ++++- .../tests/integration/End2EndTests.java | 5 ++++- .../integration/WorkflowServiceTest.java | 6 ++++-- 12 files changed, 55 insertions(+), 16 deletions(-) rename core/src/main/java/com/netflix/conductor/core/{execution => metadata}/MetadataMapperService.java (92%) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 1f09b1deed..5453600f17 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -561,6 +561,10 @@ public WorkflowTask get(String taskReferenceName){ return null; } + + public boolean shouldPopulateDefinition() { + return getType().equals(WorkflowTask.Type.SIMPLE.name()) && getTaskDefinition() == null; + } @Override public String toString() { diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index 68f2f18ca5..7d7f7d1b9d 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -39,6 +39,7 @@ import com.netflix.conductor.core.execution.mapper.TaskMapper; import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; +import com.netflix.conductor.dao.MetadataDAO; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; @@ -88,6 +89,8 @@ public class TestHttpTask { private Workflow workflow = new Workflow(); + private MetadataDAO metadataDAO; + private static Server server; private static ObjectMapper objectMapper = new ObjectMapper(); @@ -121,6 +124,7 @@ public static void cleanup() { public void setup() { RestClientManager rcm = new RestClientManager(); Configuration config = mock(Configuration.class); + metadataDAO = mock(MetadataDAO.class); when(config.getServerId()).thenReturn("test_server_id"); httpTask = new HttpTask(rcm, config); } @@ -299,7 +303,7 @@ public void testOptional() throws Exception { taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); + taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); diff --git a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java index bfd3c9f35b..08ea314104 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java +++ b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java @@ -47,6 +47,7 @@ import com.netflix.conductor.core.execution.tasks.SubWorkflow; import com.netflix.conductor.core.execution.tasks.SystemTaskWorkerCoordinator; import com.netflix.conductor.core.execution.tasks.Wait; +import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; @@ -112,8 +113,8 @@ public TaskMapper getJoinTaskMapper() { @StringMapKey("FORK_JOIN_DYNAMIC") @Singleton @Named("TaskMappers") - public TaskMapper getForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper objectMapper) { - return new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper); + public TaskMapper getForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper objectMapper, MetadataDAO metadataDAO) { + return new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO); } @ProvidesIntoMap diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index ba411be6b0..a58aeaa736 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -33,6 +33,7 @@ import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; +import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.dao.ExecutionDAO; diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java index c6d8acb3fe..3c32c2b5e6 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java @@ -15,10 +15,9 @@ */ package com.netflix.conductor.core.execution.mapper; -import com.google.common.annotations.VisibleForTesting; - import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; @@ -28,7 +27,7 @@ import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; - +import com.netflix.conductor.dao.MetadataDAO; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; @@ -54,12 +53,15 @@ public class ForkJoinDynamicTaskMapper implements TaskMapper { private ObjectMapper objectMapper; + private MetadataDAO metadataDAO; + private static final TypeReference> ListOfWorkflowTasks = new TypeReference>() { }; - public ForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper objectMapper) { + public ForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper objectMapper, MetadataDAO metadataDAO) { this.parametersUtils = parametersUtils; this.objectMapper = objectMapper; + this.metadataDAO = metadataDAO; } /** @@ -236,6 +238,11 @@ Pair, Map>> getDynamicForkTasksAn Map input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, null, null); Object dynamicForkTasksJson = input.get(dynamicForkTaskParam); List dynamicForkWorkflowTasks = objectMapper.convertValue(dynamicForkTasksJson, ListOfWorkflowTasks); + for (WorkflowTask workflowTask : dynamicForkWorkflowTasks) { + if (workflowTask.shouldPopulateDefinition()) { + workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); + } + } Object dynamicForkTasksInput = input.get(taskToSchedule.getDynamicForkTasksInputParamName()); if (!(dynamicForkTasksInput instanceof Map)) { throw new TerminateWorkflowException("Input to the dynamically forked tasks is not a map -> expecting a map of K,V but found " + dynamicForkTasksInput); @@ -275,6 +282,10 @@ Pair, Map>> getDynamicForkJoinTas dynamicForkJoinWorkflowTask.setTaskReferenceName(dynamicForkJoinTask.getReferenceName()); dynamicForkJoinWorkflowTask.setName(dynamicForkJoinTask.getTaskName()); dynamicForkJoinWorkflowTask.setType(dynamicForkJoinTask.getType()); + if (dynamicForkJoinWorkflowTask.shouldPopulateDefinition()) { + dynamicForkJoinWorkflowTask.setTaskDefinition( + metadataDAO.getTaskDef(dynamicForkJoinTask.getTaskName())); + } return dynamicForkJoinWorkflowTask; }) .collect(Collectors.toCollection(LinkedList::new)); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java similarity index 92% rename from core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java rename to core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index 37d496ec5c..cb1353f87e 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -1,9 +1,10 @@ -package com.netflix.conductor.core.execution; +package com.netflix.conductor.core.metadata; import com.google.inject.Singleton; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.dao.MetadataDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,7 +29,7 @@ public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { for (WorkflowTask workflowTask : workflowTasks) { - if (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null) { + if (workflowTask.shouldPopulateDefinition()) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); } else if (workflowTask.getType().equals(WorkflowTask.Type.SUB_WORKFLOW.name())) { populateVersionForSubWorkflow(workflowTask); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java index 3d2ab661e5..e196335a72 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java @@ -38,8 +38,10 @@ import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; import com.netflix.conductor.core.execution.tasks.Join; +import com.netflix.conductor.dao.MetadataDAO; import org.junit.Before; import org.junit.Test; +import org.mockito.Mockito; import java.io.InputStream; import java.util.Arrays; @@ -53,6 +55,9 @@ import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.when; /** * @author Viren @@ -60,6 +65,7 @@ */ public class TestDeciderOutcomes { + private MetadataDAO metadataDAO; private DeciderService deciderService; private static ObjectMapper objectMapper = new ObjectMapper(); @@ -74,13 +80,15 @@ public class TestDeciderOutcomes { @Before public void init() throws Exception { + metadataDAO = Mockito.mock(MetadataDAO.class); + when(metadataDAO.getTaskDef(anyString())).thenReturn(new TaskDef()); ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); taskMappers.put("DECISION", new DecisionTaskMapper()); taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); + taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index 57fca8795f..13de9a49c1 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -112,7 +112,7 @@ public void setup() { taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); + taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java index f47facf9a1..d1e2e6c731 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java @@ -38,6 +38,7 @@ import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; import com.netflix.conductor.core.execution.tasks.Wait; import com.netflix.conductor.core.execution.tasks.WorkflowSystemTask; +import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.MetadataDAO; @@ -86,7 +87,7 @@ public void init() { taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper)); + taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java index e4b1a69e61..623a00b174 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java @@ -13,6 +13,7 @@ import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.utils.IDGenerator; +import com.netflix.conductor.dao.MetadataDAO; import org.apache.commons.lang3.tuple.Pair; import org.junit.Before; import org.junit.Rule; @@ -36,6 +37,7 @@ public class ForkJoinDynamicTaskMapperTest { + private MetadataDAO metadataDAO; private ParametersUtils parametersUtils; private ObjectMapper objectMapper; private DeciderService deciderService; @@ -47,11 +49,12 @@ public class ForkJoinDynamicTaskMapperTest { @Before public void setUp() throws Exception { + metadataDAO = Mockito.mock(MetadataDAO.class); parametersUtils = Mockito.mock(ParametersUtils.class); objectMapper = Mockito.mock(ObjectMapper.class); deciderService = Mockito.mock(DeciderService.class); - forkJoinDynamicTaskMapper = new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper); + forkJoinDynamicTaskMapper = new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO); } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index cbfdfa0d03..6c69774ca3 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -49,6 +49,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -98,7 +99,9 @@ public static void teardown() throws Exception { public void testAll() throws Exception { List definitions = createAndRegisterTaskDefinitions("t", 5); - List found = taskClient.getTaskDef(); + List found = taskClient.getTaskDef().stream() + .filter(taskDefinition -> taskDefinition.getName().startsWith("t")) + .collect(Collectors.toList()); assertNotNull(found); assertEquals(definitions.size(), found.size()); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index ab4cc9858f..c3b3f01690 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -38,7 +38,7 @@ import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.execution.ApplicationException; -import com.netflix.conductor.core.execution.MetadataMapperService; +import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.WorkflowExecutor; import com.netflix.conductor.core.execution.WorkflowSweeper; @@ -1025,6 +1025,8 @@ private void createDynamicForkJoinWorkflowDefs() throws Exception { def.getTasks().add(join); def.getTasks().add(workflowTask4); + metadataMapperService.populateTaskDefinitions(def); + metadataService.updateWorkflowDef(def); } @@ -1060,7 +1062,7 @@ private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { def.getTasks().add(fanout); def.getTasks().add(join); - metadataMapperService.populateTaskDefinitions(def) + metadataMapperService.populateTaskDefinitions(def); metadataService.updateWorkflowDef(def); From a1e6fc12a20a41df9021774e0c6b61e2bbb80059 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sun, 5 Aug 2018 19:55:23 -0700 Subject: [PATCH 122/163] Added tests for MetadataMapperService --- .../mapper/DynamicTaskMapperTest.java | 4 +- .../execution/mapper/EventTaskMapperTest.java | 2 - .../mapper/ForkJoinTaskMapperTest.java | 4 +- .../mapper/SimpleTaskMapperTest.java | 6 +- .../mapper/UserDefinedTaskMapperTest.java | 4 +- .../execution/mapper/WaitTaskMapperTest.java | 4 - .../metadata/MetadataMapperServiceTest.java | 131 ++++++++++++++++++ 7 files changed, 139 insertions(+), 16 deletions(-) create mode 100644 core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java index b2dc7580b3..8fd5dd7961 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapperTest.java @@ -26,8 +26,8 @@ public class DynamicTaskMapperTest { - ParametersUtils parametersUtils; - DynamicTaskMapper dynamicTaskMapper; + private ParametersUtils parametersUtils; + private DynamicTaskMapper dynamicTaskMapper; @Rule public ExpectedException expectedException = ExpectedException.none(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java index ce5398f3a7..0fa2810b85 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/EventTaskMapperTest.java @@ -22,8 +22,6 @@ public class EventTaskMapperTest { - - @Test public void getMappedTasks() throws Exception { ParametersUtils parametersUtils = Mockito.mock(ParametersUtils.class); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java index fd016e2b7b..70b8378f30 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java @@ -23,9 +23,9 @@ public class ForkJoinTaskMapperTest { - DeciderService deciderService; + private DeciderService deciderService; - ForkJoinTaskMapper forkJoinTaskMapper; + private ForkJoinTaskMapper forkJoinTaskMapper; @Rule public ExpectedException expectedException = ExpectedException.none(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java index 9736fae407..91fe3d0c53 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java @@ -22,10 +22,8 @@ public class SimpleTaskMapperTest { - ParametersUtils parametersUtils; - - //subject - SimpleTaskMapper simpleTaskMapper; + private ParametersUtils parametersUtils; + private SimpleTaskMapper simpleTaskMapper; @Rule public ExpectedException expectedException = ExpectedException.none(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java index 257d062716..2cb30381fe 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java @@ -21,8 +21,8 @@ public class UserDefinedTaskMapperTest { - ParametersUtils parametersUtils; - UserDefinedTaskMapper userDefinedTaskMapper; + private ParametersUtils parametersUtils; + private UserDefinedTaskMapper userDefinedTaskMapper; @Rule public ExpectedException expectedException = ExpectedException.none(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java index 6fcf7d9e17..3bc71d988d 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java @@ -16,7 +16,6 @@ public class WaitTaskMapperTest { - @Test public void getMappedTasks() throws Exception { @@ -41,9 +40,6 @@ public void getMappedTasks() throws Exception { assertEquals(1, mappedTasks.size()); assertEquals(Wait.NAME, mappedTasks.get(0).getTaskType()); - - - } } diff --git a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java new file mode 100644 index 0000000000..d09d23b59b --- /dev/null +++ b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java @@ -0,0 +1,131 @@ +package com.netflix.conductor.metadata; + +import com.google.common.collect.ImmutableList; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.core.metadata.MetadataMapperService; +import com.netflix.conductor.dao.MetadataDAO; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.runners.MockitoJUnitRunner; + +import java.util.List; + +import static junit.framework.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.verifyZeroInteractions; +import static org.mockito.Mockito.when; + +@RunWith(MockitoJUnitRunner.class) +public class MetadataMapperServiceTest { + + @Mock + private MetadataDAO metadataDAO; + + @InjectMocks + private MetadataMapperService metadataMapperService; + + @Test + public void testMetadataPopulationOnSimpleTask() { + String nameTaskDefinition = "task1"; + TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition); + WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); + + when(metadataDAO.getTaskDef(nameTaskDefinition)).thenReturn(taskDefinition); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask)); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + + assertEquals(1, workflowDefinition.getTasks().size()); + WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0); + assertNotNull(populatedWorkflowTask.getTaskDefinition()); + verify(metadataDAO).getTaskDef(nameTaskDefinition); + } + + @Test + public void testNoMetadataPopulationOnEmbeddedTaskDefinition() { + String nameTaskDefinition = "task2"; + TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition); + WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); + workflowTask.setTaskDefinition(taskDefinition); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask)); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + + assertEquals(1, workflowDefinition.getTasks().size()); + WorkflowTask populatedWorkflowTask = workflowDefinition.getTasks().get(0); + assertNotNull(populatedWorkflowTask.getTaskDefinition()); + verifyZeroInteractions(metadataDAO); + } + + @Test + public void testMetadataPopulationOnlyOnNecessaryWorkflowTasks() { + String nameTaskDefinition1 = "task4"; + TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition1); + WorkflowTask workflowTask1 = createWorkflowTask(nameTaskDefinition1); + workflowTask1.setTaskDefinition(taskDefinition); + + String nameTaskDefinition2 = "task5"; + WorkflowTask workflowTask2 = createWorkflowTask(nameTaskDefinition2); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask1, workflowTask2)); + + when(metadataDAO.getTaskDef(nameTaskDefinition2)).thenReturn(taskDefinition); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + + assertEquals(2, workflowDefinition.getTasks().size()); + List workflowTasks = workflowDefinition.getTasks(); + assertNotNull(workflowTasks.get(0).getTaskDefinition()); + assertNotNull(workflowTasks.get(1).getTaskDefinition()); + + verify(metadataDAO).getTaskDef(nameTaskDefinition2); + verifyNoMoreInteractions(metadataDAO); + } + + @Test + public void testVersionPopulationForSubworkflowTaskIfNotAvailable() { + // TODO + } + + @Test + public void testNoVersionPopulationForSubworkflowTaskIfAvailable() { + // TODO + } + + + @Test + public void testExceptionWhenWorkflowDefinitionNotAvailable() { + // TODO + } + + + private WorkflowDef createWorkflowDefinition(String name) { + WorkflowDef workflowDefinition = new WorkflowDef(); + workflowDefinition.setName(name); + return workflowDefinition; + } + + private WorkflowTask createWorkflowTask(String name) { + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName(name); + workflowTask.setType(WorkflowTask.Type.SIMPLE.name()); + return workflowTask; + } + + private TaskDef createTaskDefinition(String name) { + TaskDef taskDefinition = new TaskDef(name); + return taskDefinition; + } + +} From 9cb5ecb695f629a240a6e0d8c38c2640d1f5b4f3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 6 Aug 2018 15:09:45 -0700 Subject: [PATCH 123/163] Internal review changes - Code style changes - Workflow termination logic change: latest workflow definition takes priority over embedded one - Test refactoring and getting rid of mocks that are not used - Better use of Optional --- .../conductor/common/metadata/tasks/Task.java | 6 +-- .../conductor/contribs/http/TestHttpTask.java | 26 ------------ .../core/execution/WorkflowExecutor.java | 40 +++++++++---------- .../mapper/ForkJoinDynamicTaskMapper.java | 6 +-- .../execution/mapper/SimpleTaskMapper.java | 2 +- .../mapper/SubWorkflowTaskMapper.java | 3 +- .../core/metadata/MetadataMapperService.java | 19 +++++---- .../mapper/SimpleTaskMapperTest.java | 2 +- 8 files changed, 38 insertions(+), 66 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index 6dd899aaf1..ec04f4e887 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -598,9 +598,9 @@ public void setOutputMessage(Any outputMessage) { * @return {@link Optional} containing the task definition if available */ public Optional getTaskDefinition() { - return this.getWorkflowTask() != null ? - Optional.ofNullable(this.getWorkflowTask().getTaskDefinition()) : - Optional.empty(); + return Optional.ofNullable(this.getWorkflowTask()) + .map(workflowTask -> Optional.ofNullable(workflowTask.getTaskDefinition())) + .orElse(Optional.empty()); } public Task copy() { diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index 7d7f7d1b9d..dda28a16b7 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -26,20 +26,8 @@ import com.netflix.conductor.contribs.http.HttpTask.Input; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.execution.DeciderService; -import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper; -import com.netflix.conductor.core.execution.mapper.DynamicTaskMapper; -import com.netflix.conductor.core.execution.mapper.EventTaskMapper; -import com.netflix.conductor.core.execution.mapper.ForkJoinDynamicTaskMapper; -import com.netflix.conductor.core.execution.mapper.ForkJoinTaskMapper; -import com.netflix.conductor.core.execution.mapper.JoinTaskMapper; -import com.netflix.conductor.core.execution.mapper.SimpleTaskMapper; -import com.netflix.conductor.core.execution.mapper.SubWorkflowTaskMapper; import com.netflix.conductor.core.execution.mapper.TaskMapper; -import com.netflix.conductor.core.execution.mapper.UserDefinedTaskMapper; -import com.netflix.conductor.core.execution.mapper.WaitTaskMapper; -import com.netflix.conductor.dao.MetadataDAO; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.AbstractHandler; @@ -89,8 +77,6 @@ public class TestHttpTask { private Workflow workflow = new Workflow(); - private MetadataDAO metadataDAO; - private static Server server; private static ObjectMapper objectMapper = new ObjectMapper(); @@ -124,7 +110,6 @@ public static void cleanup() { public void setup() { RestClientManager rcm = new RestClientManager(); Configuration config = mock(Configuration.class); - metadataDAO = mock(MetadataDAO.class); when(config.getServerId()).thenReturn("test_server_id"); httpTask = new HttpTask(rcm, config); } @@ -297,18 +282,7 @@ public void testOptional() throws Exception { workflow.setWorkflowDefinition(def); workflow.getTasks().add(task); - ParametersUtils parametersUtils = new ParametersUtils(); Map taskMappers = new HashMap<>(); - taskMappers.put("DECISION", new DecisionTaskMapper()); - taskMappers.put("DYNAMIC", new DynamicTaskMapper(parametersUtils)); - taskMappers.put("FORK_JOIN", new ForkJoinTaskMapper()); - taskMappers.put("JOIN", new JoinTaskMapper()); - taskMappers.put("FORK_JOIN_DYNAMIC", new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO)); - taskMappers.put("USER_DEFINED", new UserDefinedTaskMapper(parametersUtils)); - taskMappers.put("SIMPLE", new SimpleTaskMapper(parametersUtils)); - taskMappers.put("SUB_WORKFLOW", new SubWorkflowTaskMapper(parametersUtils)); - taskMappers.put("EVENT", new EventTaskMapper(parametersUtils)); - taskMappers.put("WAIT", new WaitTaskMapper(parametersUtils)); new DeciderService(taskMappers).decide(workflow); System.out.println(workflow.getTasks()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index a58aeaa736..b1baecc146 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -17,7 +17,6 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; - import com.netflix.conductor.annotations.Trace; import com.netflix.conductor.common.metadata.tasks.PollData; import com.netflix.conductor.common.metadata.tasks.Task; @@ -40,11 +39,11 @@ import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; import com.netflix.conductor.metrics.Monitors; - import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.inject.Inject; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -56,8 +55,6 @@ import java.util.function.Predicate; import java.util.stream.Collectors; -import javax.inject.Inject; - import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.netflix.conductor.common.metadata.tasks.Task.Status.CANCELED; @@ -79,23 +76,23 @@ public class WorkflowExecutor { private static final Logger logger = LoggerFactory.getLogger(WorkflowExecutor.class); - private MetadataDAO metadataDAO; + private final MetadataDAO metadataDAO; - private ExecutionDAO executionDAO; + private final ExecutionDAO executionDAO; - private QueueDAO queueDAO; + private final QueueDAO queueDAO; - private DeciderService deciderService; + private final DeciderService deciderService; - private Configuration config; + private final Configuration config; - private ParametersUtils parametersUtils = new ParametersUtils(); + private final MetadataMapperService metadataMapperService; - private int activeWorkerLastPollnSecs; + private final ParametersUtils parametersUtils = new ParametersUtils(); - public static final String DECIDER_QUEUE = "_deciderQueue"; + private int activeWorkerLastPollInSecs; - public MetadataMapperService metadataMapperService; + public static final String DECIDER_QUEUE = "_deciderQueue"; @Inject @@ -113,7 +110,7 @@ public WorkflowExecutor( this.queueDAO = queueDAO; this.config = config; this.metadataMapperService = metadataMapperService; - activeWorkerLastPollnSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); + activeWorkerLastPollInSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); } /** @@ -245,7 +242,7 @@ public String startWorkflow( ); } - private final Predicate validateLastPolledTime = pd -> pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollnSecs * 1000); + private final Predicate validateLastPolledTime = pd -> pd.getLastPollTime() > System.currentTimeMillis() - (activeWorkerLastPollInSecs * 1000); private final Predicate isSystemTask = task -> SystemTaskType.is(task.getTaskType()); @@ -580,12 +577,13 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo try { - WorkflowDef latestFailureWorkflow = Optional.of(workflow.getWorkflowDefinition()).orElse( - metadataDAO.getLatest(failureWorkflow) - .orElseThrow(() -> - new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) - ) - ); + WorkflowDef latestFailureWorkflow = + metadataDAO.getLatest(failureWorkflow).orElse( + Optional.ofNullable(workflow.getWorkflowDefinition()) + .orElseThrow(() -> + new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) + ) + ); String failureWFId = startWorkflow( latestFailureWorkflow, diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java index 3c32c2b5e6..d94a83371b 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java @@ -49,11 +49,11 @@ public class ForkJoinDynamicTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(ForkJoinDynamicTaskMapper.class); - private ParametersUtils parametersUtils; + private final ParametersUtils parametersUtils; - private ObjectMapper objectMapper; + private final ObjectMapper objectMapper; - private MetadataDAO metadataDAO; + private final MetadataDAO metadataDAO; private static final TypeReference> ListOfWorkflowTasks = new TypeReference>() { }; diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java index b694196650..4e09834fa4 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java @@ -67,7 +67,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter TaskDef taskDefinition = Optional.ofNullable(taskToSchedule.getTaskDefinition()) .orElseThrow(() -> { - String reason = String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName()); + String reason = String.format("Invalid task. Task %s does not have a definition", taskToSchedule.getName()); return new TerminateWorkflowException(reason); }); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java index 4d16ba9300..84a9501f85 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SubWorkflowTaskMapper.java @@ -25,13 +25,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.inject.Inject; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; +import javax.inject.Inject; + public class SubWorkflowTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(SubWorkflowTaskMapper.class); diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index cb1353f87e..446af3c24e 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -25,16 +25,15 @@ public MetadataMapperService(MetadataDAO metadataDAO) { } public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { - List workflowTasks = workflowDefinition.collectTasks(); - for (WorkflowTask workflowTask : workflowTasks) { - - - if (workflowTask.shouldPopulateDefinition()) { - workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); - } else if (workflowTask.getType().equals(WorkflowTask.Type.SUB_WORKFLOW.name())) { - populateVersionForSubWorkflow(workflowTask); - } - } + workflowDefinition.collectTasks().stream().forEach( + workflowTask -> { + if (workflowTask.shouldPopulateDefinition()) { + workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); + } else if (workflowTask.getType().equals(WorkflowTask.Type.SUB_WORKFLOW.name())) { + populateVersionForSubWorkflow(workflowTask); + } + } + ); return workflowDefinition; } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java index 91fe3d0c53..59fea819b1 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapperTest.java @@ -72,7 +72,7 @@ public void getMappedTasksException() throws Exception { //then expectedException.expect(TerminateWorkflowException.class); - expectedException.expectMessage(String.format("Invalid task specified. Cannot find task by name %s in the task definitions", taskToSchedule.getName())); + expectedException.expectMessage(String.format("Invalid task. Task %s does not have a definition", taskToSchedule.getName())); //when simpleTaskMapper.getMappedTasks(taskMapperContext); From aeac7c2e06463d6019541e2c716038881d227432 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 6 Aug 2018 16:45:01 -0700 Subject: [PATCH 124/163] Moved out WorklowTask.Type to TaskType - Several classes use the type and it no longer has only domain on the WorkflowTask class --- .../metadata/workflow/TestWorkflowTask.java | 6 +- .../conductor/common/metadata/tasks/Task.java | 3 +- .../workflow/DynamicForkJoinTask.java | 3 +- .../common/metadata/workflow/TaskType.java | 29 ++++++++++ .../metadata/workflow/WorkflowTask.java | 49 ++++------------ .../common/workflow/TestWorkflowDef.java | 6 +- .../common/workflow/TestWorkflowTask.java | 6 +- .../conductor/contribs/http/TestHttpTask.java | 4 +- .../core/execution/DeciderService.java | 8 +-- .../core/execution/WorkflowExecutor.java | 9 +-- .../execution/mapper/DecisionTaskMapper.java | 5 +- .../execution/mapper/DynamicTaskMapper.java | 3 +- .../mapper/ForkJoinDynamicTaskMapper.java | 15 ++--- .../execution/mapper/ForkJoinTaskMapper.java | 9 +-- .../core/execution/mapper/JoinTaskMapper.java | 5 +- .../execution/mapper/SimpleTaskMapper.java | 5 +- .../mapper/UserDefinedTaskMapper.java | 7 ++- .../core/execution/mapper/WaitTaskMapper.java | 3 +- .../core/metadata/MetadataMapperService.java | 4 +- .../core/execution/TestDeciderOutcomes.java | 12 ++-- .../core/execution/TestDeciderService.java | 26 ++++----- .../core/execution/TestWorkflowDef.java | 6 +- .../core/execution/TestWorkflowExecutor.java | 8 +-- .../mapper/DecisionTaskMapperTest.java | 9 +-- .../mapper/ForkJoinDynamicTaskMapperTest.java | 17 +++--- .../mapper/ForkJoinTaskMapperTest.java | 9 +-- .../execution/mapper/JoinTaskMapperTest.java | 3 +- .../mapper/UserDefinedTaskMapperTest.java | 7 ++- .../execution/mapper/WaitTaskMapperTest.java | 3 +- .../core/execution/tasks/TestEvent.java | 4 +- .../metadata/MetadataMapperServiceTest.java | 3 +- .../conductor/grpc/AbstractProtoMapper.java | 36 ------------ grpc/src/main/proto/model/workflowtask.proto | 12 ---- .../tests/integration/End2EndGrpcTests.java | 6 +- .../tests/integration/End2EndTests.java | 8 +-- .../integration/WorkflowServiceTest.java | 56 +++++++++---------- 36 files changed, 188 insertions(+), 216 deletions(-) create mode 100644 common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java diff --git a/client/src/test/java/com/netflix/conductor/client/metadata/workflow/TestWorkflowTask.java b/client/src/test/java/com/netflix/conductor/client/metadata/workflow/TestWorkflowTask.java index d1a2f9da73..d5f786b9fc 100644 --- a/client/src/test/java/com/netflix/conductor/client/metadata/workflow/TestWorkflowTask.java +++ b/client/src/test/java/com/netflix/conductor/client/metadata/workflow/TestWorkflowTask.java @@ -17,11 +17,11 @@ import static org.junit.Assert.*; +import com.netflix.conductor.common.metadata.workflow.TaskType; import org.junit.Test; import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; /** * @@ -45,7 +45,7 @@ public void test() throws Exception { assertEquals(task.getType(), read.getType()); task = new WorkflowTask(); - task.setWorkflowTaskType(Type.SUB_WORKFLOW); + task.setWorkflowTaskType(TaskType.SUB_WORKFLOW); task.setName("name"); json = om.writeValueAsString(task); @@ -54,6 +54,6 @@ public void test() throws Exception { assertNotNull(read); assertEquals(task.getName(), read.getName()); assertEquals(task.getType(), read.getType()); - assertEquals(Type.SUB_WORKFLOW.name(), read.getType()); + assertEquals(TaskType.SUB_WORKFLOW.name(), read.getType()); } } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index ec04f4e887..f262812dc3 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -17,6 +17,7 @@ import com.google.protobuf.Any; import com.github.vmg.protogen.annotations.*; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import java.util.HashMap; @@ -173,7 +174,7 @@ public Task() { /** * @return Type of the task - * @see WorkflowTask.Type + * @see TaskType */ public String getTaskType() { return taskType; diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java index 94d8aaaec4..655f6bad1a 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/DynamicForkJoinTask.java @@ -19,7 +19,6 @@ import java.util.Map; import com.github.vmg.protogen.annotations.*; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; @ProtoMessage public class DynamicForkJoinTask { @@ -37,7 +36,7 @@ public class DynamicForkJoinTask { private Map input = new HashMap<>(); @ProtoField(id = 5) - private String type = Type.SIMPLE.name(); + private String type = TaskType.SIMPLE.name(); public DynamicForkJoinTask() { } diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java new file mode 100644 index 0000000000..b81891a041 --- /dev/null +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java @@ -0,0 +1,29 @@ +package com.netflix.conductor.common.metadata.workflow; + +import com.github.vmg.protogen.annotations.ProtoEnum; + +import java.util.HashSet; +import java.util.Set; + +@ProtoEnum +public enum TaskType { + SIMPLE, DYNAMIC, FORK_JOIN, FORK_JOIN_DYNAMIC, DECISION, JOIN, SUB_WORKFLOW, EVENT, WAIT, USER_DEFINED; + + private static Set systemTasks = new HashSet<>(); + static { + systemTasks.add(TaskType.SIMPLE.name()); + systemTasks.add(TaskType.DYNAMIC.name()); + systemTasks.add(TaskType.FORK_JOIN.name()); + systemTasks.add(TaskType.FORK_JOIN_DYNAMIC.name()); + systemTasks.add(TaskType.DECISION.name()); + systemTasks.add(TaskType.JOIN.name()); + systemTasks.add(TaskType.SUB_WORKFLOW.name()); + systemTasks.add(TaskType.EVENT.name()); + systemTasks.add(TaskType.WAIT.name()); + //Do NOT add USER_DEFINED here... + } + + public static boolean isSystemTask(String name) { + return systemTasks.contains(name); + } +} diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 5453600f17..0dbd47b333 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -23,14 +23,12 @@ import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; /** * @author Viren @@ -39,29 +37,6 @@ @ProtoMessage public class WorkflowTask { - @ProtoEnum - public enum Type { - SIMPLE, DYNAMIC, FORK_JOIN, FORK_JOIN_DYNAMIC, DECISION, JOIN, SUB_WORKFLOW, EVENT, WAIT, USER_DEFINED; - - private static Set systemTasks = new HashSet<>(); - static { - systemTasks.add(Type.SIMPLE.name()); - systemTasks.add(Type.DYNAMIC.name()); - systemTasks.add(Type.FORK_JOIN.name()); - systemTasks.add(Type.FORK_JOIN_DYNAMIC.name()); - systemTasks.add(Type.DECISION.name()); - systemTasks.add(Type.JOIN.name()); - systemTasks.add(Type.SUB_WORKFLOW.name()); - systemTasks.add(Type.EVENT.name()); - systemTasks.add(Type.WAIT.name()); - //Do NOT add USER_DEFINED here... - } - - public static boolean isSystemTask(String name) { - return systemTasks.contains(name); - } - } - @ProtoField(id = 1) private String name; @@ -77,7 +52,7 @@ public static boolean isSystemTask(String name) { private Map inputParameters = new HashMap(); @ProtoField(id = 5) - private String type = Type.SIMPLE.name(); + private String type = TaskType.SIMPLE.name(); @ProtoField(id = 6) private String dynamicTaskNameParam; @@ -202,7 +177,7 @@ public String getType() { return type; } - public void setWorkflowTaskType(Type type) { + public void setWorkflowTaskType(TaskType type) { this.type = type.name(); } @@ -423,9 +398,9 @@ public void setOptional(boolean optional) { private Collection> children() { Collection> workflowTaskLists = new LinkedList<>(); - Type taskType = Type.USER_DEFINED; - if (Type.isSystemTask(type)) { - taskType = Type.valueOf(type); + TaskType taskType = TaskType.USER_DEFINED; + if (TaskType.isSystemTask(type)) { + taskType = TaskType.valueOf(type); } switch (taskType) { @@ -455,9 +430,9 @@ public List collectTasks() { } public WorkflowTask next(String taskReferenceName, WorkflowTask parent) { - Type taskType = Type.USER_DEFINED; - if (Type.isSystemTask(type)) { - taskType = Type.valueOf(type); + TaskType taskType = TaskType.USER_DEFINED; + if (TaskType.isSystemTask(type)) { + taskType = TaskType.valueOf(type); } switch (taskType) { @@ -520,9 +495,9 @@ public boolean has(String taskReferenceName){ return true; } - Type tt = Type.USER_DEFINED; - if(Type.isSystemTask(type)) { - tt = Type.valueOf(type); + TaskType tt = TaskType.USER_DEFINED; + if(TaskType.isSystemTask(type)) { + tt = TaskType.valueOf(type); } switch(tt){ @@ -563,7 +538,7 @@ public WorkflowTask get(String taskReferenceName){ } public boolean shouldPopulateDefinition() { - return getType().equals(WorkflowTask.Type.SIMPLE.name()) && getTaskDefinition() == null; + return getType().equals(TaskType.SIMPLE.name()) && getTaskDefinition() == null; } @Override diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowDef.java b/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowDef.java index 21903f39b6..26693e3c1a 100644 --- a/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowDef.java +++ b/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowDef.java @@ -27,11 +27,11 @@ import java.util.List; import java.util.Map; +import com.netflix.conductor.common.metadata.workflow.TaskType; import org.junit.Test; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; /** * @author Viren @@ -61,7 +61,7 @@ public void test() throws Exception { wf.setDescription(COND_TASK_WF); WorkflowTask subCaseTask = new WorkflowTask(); - subCaseTask.setType(Type.DECISION.name()); + subCaseTask.setType(TaskType.DECISION.name()); subCaseTask.setCaseValueParam("case2"); subCaseTask.setName("case2"); subCaseTask.setTaskReferenceName("case2"); @@ -72,7 +72,7 @@ public void test() throws Exception { WorkflowTask caseTask = new WorkflowTask(); - caseTask.setType(Type.DECISION.name()); + caseTask.setType(TaskType.DECISION.name()); caseTask.setCaseValueParam("case"); caseTask.setName("case"); caseTask.setTaskReferenceName("case"); diff --git a/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowTask.java b/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowTask.java index c38e0e0fbd..46eb8353c6 100644 --- a/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowTask.java +++ b/common/src/test/java/com/netflix/conductor/common/workflow/TestWorkflowTask.java @@ -21,10 +21,10 @@ import static org.junit.Assert.*; +import com.netflix.conductor.common.metadata.workflow.TaskType; import org.junit.Test; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; /** * @author Viren @@ -35,10 +35,10 @@ public class TestWorkflowTask { @Test public void test() { WorkflowTask wt = new WorkflowTask(); - wt.setWorkflowTaskType(Type.DECISION); + wt.setWorkflowTaskType(TaskType.DECISION); assertNotNull(wt.getType()); - assertEquals(Type.DECISION.name(), wt.getType()); + assertEquals(TaskType.DECISION.name(), wt.getType()); } @Test diff --git a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java index dda28a16b7..b98d1a969d 100644 --- a/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java +++ b/contribs/src/test/java/com/netflix/conductor/contribs/http/TestHttpTask.java @@ -19,9 +19,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.contribs.http.HttpTask.Input; import com.netflix.conductor.core.config.Configuration; @@ -272,7 +272,7 @@ public void testOptional() throws Exception { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setOptional(true); workflowTask.setName("HTTP"); - workflowTask.setWorkflowTaskType(Type.USER_DEFINED); + workflowTask.setWorkflowTaskType(TaskType.USER_DEFINED); workflowTask.setTaskReferenceName("t1"); WorkflowDef def = new WorkflowDef(); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index 9d3144333b..4137c95dde 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -22,9 +22,9 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.core.execution.mapper.TaskMapper; @@ -442,10 +442,10 @@ public List getTasksToBeScheduled(Workflow workflowInstance, Map input = parametersUtils.getTaskInput(taskToSchedule.getInputParameters(), workflowInstance, null, null); - Type taskType = Type.USER_DEFINED; + TaskType taskType = TaskType.USER_DEFINED; String type = taskToSchedule.getType(); - if (Type.isSystemTask(type)) { - taskType = Type.valueOf(type); + if (TaskType.isSystemTask(type)) { + taskType = TaskType.valueOf(type); } // get in progress tasks for this workflow instance diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index b1baecc146..8a022bd86f 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -23,6 +23,7 @@ import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SkipTaskRequest; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -273,7 +274,7 @@ public String startWorkflow( // Obtain the names of the tasks with missing definitions Set missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() .filter(workflowTask -> - (workflowTask.getType().equals(WorkflowTask.Type.SIMPLE.name()) && workflowTask.getTaskDefinition() == null)) + (workflowTask.getType().equals(TaskType.SIMPLE.name()) && workflowTask.getTaskDefinition() == null)) .map(workflowTask -> workflowTask.getName()) .collect(Collectors.toSet()); @@ -422,7 +423,7 @@ public void retry(String workflowId) { // Reschedule the cancelled task but if the join is cancelled set that to in progress cancelledTasks.forEach(task -> { - if (task.getTaskType().equalsIgnoreCase(WorkflowTask.Type.JOIN.toString())) { + if (task.getTaskType().equalsIgnoreCase(TaskType.JOIN.toString())) { task.setStatus(IN_PROGRESS); executionDAO.updateTask(task); } else { @@ -1000,7 +1001,7 @@ public void setTaskDomains(List tasks, Workflow wf) { String[] domains = domainstr.split(","); tasks.forEach(task -> { // Filter out SystemTask - if (!WorkflowTask.Type.isSystemTask(task.getTaskType())) { + if (!TaskType.isSystemTask(task.getTaskType())) { // Check which domain worker is polling // Set the task domain task.setDomain(getActiveDomain(task.getTaskType(), domains)); @@ -1009,7 +1010,7 @@ public void setTaskDomains(List tasks, Workflow wf) { } else { tasks.forEach(task -> { - if (!WorkflowTask.Type.isSystemTask(task.getTaskType())) { + if (!TaskType.isSystemTask(task.getTaskType())) { String taskDomainstr = taskToDomain.get(task.getTaskType()); if (taskDomainstr != null) { task.setDomain(getActiveDomain(task.getTaskType(), taskDomainstr.split(","))); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java index 572ebfe510..f6627ad733 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapper.java @@ -18,6 +18,7 @@ import com.google.common.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -34,7 +35,7 @@ /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#DECISION} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#DECISION} * to a List {@link Task} starting with Task of type {@link SystemTaskType#DECISION} which is marked as IN_PROGRESS, * followed by the list of {@link Task} based on the case expression evaluation in the Decision task. */ @@ -43,7 +44,7 @@ public class DecisionTaskMapper implements TaskMapper { Logger logger = LoggerFactory.getLogger(DecisionTaskMapper.class); /** - * This method gets the list of tasks that need to scheduled when the the task to scheduled is of type {@link WorkflowTask.Type#DECISION}. + * This method gets the list of tasks that need to scheduled when the the task to scheduled is of type {@link TaskType#DECISION}. * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId * @return List of tasks in the following order: diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java index df0318b870..0618dfb23c 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/DynamicTaskMapper.java @@ -19,6 +19,7 @@ import com.google.common.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -34,7 +35,7 @@ /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#DYNAMIC} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#DYNAMIC} * to a {@link Task} based on definition derived from the dynamic task name defined in {@link WorkflowTask#getInputParameters()} */ public class DynamicTaskMapper implements TaskMapper { diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java index d94a83371b..f548b0af83 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java @@ -20,6 +20,7 @@ import com.google.common.annotations.VisibleForTesting; import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -41,7 +42,7 @@ import java.util.stream.Collectors; /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#FORK_JOIN_DYNAMIC} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#FORK_JOIN_DYNAMIC} * to a LinkedList of {@link Task} beginning with a {@link SystemTaskType#FORK}, followed by the user defined dynamic tasks and * a {@link SystemTaskType#JOIN} at the end */ @@ -65,7 +66,7 @@ public ForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper o } /** - * This method gets the list of tasks that need to scheduled when the the task to scheduled is of type {@link WorkflowTask.Type#FORK_JOIN_DYNAMIC}. + * This method gets the list of tasks that need to scheduled when the the task to scheduled is of type {@link TaskType#FORK_JOIN_DYNAMIC}. * Creates a Fork Task, followed by the Dynamic tasks and a final JOIN task. *

The definitions of the dynamic forks that need to be scheduled are available in the {@link WorkflowTask#getInputParameters()} * which are accessed using the {@link TaskMapperContext#getTaskToSchedule()}. The dynamic fork task definitions are referred by a key value either by @@ -78,7 +79,7 @@ public ForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper o *

  • If the input parameter representing the Dynamic fork tasks is available as part of {@link WorkflowTask#getDynamicForkJoinTasksParam()} then * the input for the dynamic tasks is available in the payload of the tasks definition. *
  • - *
  • A check is performed that the next following task in the {@link WorkflowDef} is a {@link WorkflowTask.Type#JOIN}
  • + *
  • A check is performed that the next following task in the {@link WorkflowDef} is a {@link TaskType#JOIN}
  • * * * @@ -86,7 +87,7 @@ public ForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper o * @throws TerminateWorkflowException In case of: *
      *
    • - * When the task after {@link WorkflowTask.Type#FORK_JOIN_DYNAMIC} is not a {@link WorkflowTask.Type#JOIN} + * When the task after {@link TaskType#FORK_JOIN_DYNAMIC} is not a {@link TaskType#JOIN} *
    • *
    • * When the input parameters for the dynamic tasks are not of type {@link Map} @@ -150,7 +151,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter .getWorkflowDefinition() .getNextTask(taskToSchedule.getTaskReferenceName()); - if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(WorkflowTask.Type.JOIN.name())) { + if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) { throw new TerminateWorkflowException("Dynamic join definition is not followed by a join task. Check the blueprint"); } @@ -168,7 +169,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter * This method creates a FORK task and adds the list of dynamic fork tasks keyed by "forkedTaskDefs" and * their names keyed by "forkedTasks" into {@link Task#getInputData()} * - * @param taskToSchedule A {@link WorkflowTask} representing {@link WorkflowTask.Type#FORK_JOIN_DYNAMIC} + * @param taskToSchedule A {@link WorkflowTask} representing {@link TaskType#FORK_JOIN_DYNAMIC} * @param workflowInstance: A instance of the {@link Workflow} which represents the workflow being executed. * @param taskId: The string representation of {@link java.util.UUID} which will be set as the taskId. * @param dynForkTasks: The list of dynamic forked tasks, the reference names of these tasks will be added to the forkDynamicTask @@ -200,7 +201,7 @@ Task createDynamicForkTask(WorkflowTask taskToSchedule, Workflow workflowInstanc * at the end to add a join task to be scheduled after all the fork tasks * * @param workflowInstance: A instance of the {@link Workflow} which represents the workflow being executed. - * @param joinWorkflowTask: A instance of {@link WorkflowTask} which is of type {@link WorkflowTask.Type#JOIN} + * @param joinWorkflowTask: A instance of {@link WorkflowTask} which is of type {@link TaskType#JOIN} * @param joinInput: The input which is set in the {@link Task#setInputData(Map)} * @return: a new instance of {@link Task} representing a {@link SystemTaskType#JOIN} */ diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java index 6a6d368bea..3f4b85ab9c 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapper.java @@ -17,6 +17,7 @@ package com.netflix.conductor.core.execution.mapper; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -30,7 +31,7 @@ import java.util.Map; /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#FORK_JOIN} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#FORK_JOIN} * to a LinkedList of {@link Task} beginning with a completed {@link SystemTaskType#FORK}, followed by the user defined fork tasks */ public class ForkJoinTaskMapper implements TaskMapper { @@ -38,7 +39,7 @@ public class ForkJoinTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(ForkJoinTaskMapper.class); /** - * This method gets the list of tasks that need to scheduled when the the task to scheduled is of type {@link WorkflowTask.Type#FORK_JOIN}. + * This method gets the list of tasks that need to scheduled when the the task to scheduled is of type {@link TaskType#FORK_JOIN}. * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId * @return List of tasks in the following order: @@ -50,7 +51,7 @@ public class ForkJoinTaskMapper implements TaskMapper { * Might be any kind of task, but in most cases is a UserDefinedTask with {@link Task.Status#SCHEDULED} *
    • *
    - * @throws TerminateWorkflowException When the task after {@link WorkflowTask.Type#FORK_JOIN} is not a {@link WorkflowTask.Type#JOIN} + * @throws TerminateWorkflowException When the task after {@link TaskType#FORK_JOIN} is not a {@link TaskType#JOIN} */ @Override public List getMappedTasks(TaskMapperContext taskMapperContext) throws TerminateWorkflowException { @@ -92,7 +93,7 @@ public List getMappedTasks(TaskMapperContext taskMapperContext) throws Ter .getWorkflowDefinition() .getNextTask(taskToSchedule.getTaskReferenceName()); - if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(WorkflowTask.Type.JOIN.name())) { + if (joinWorkflowTask == null || !joinWorkflowTask.getType().equals(TaskType.JOIN.name())) { throw new TerminateWorkflowException("Dynamic join definition is not followed by a join task. Check the blueprint"); } return tasksToBeScheduled; diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java index 5f11beac8d..991ec6d80d 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapper.java @@ -17,6 +17,7 @@ package com.netflix.conductor.core.execution.mapper; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -30,7 +31,7 @@ import java.util.Map; /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#JOIN} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#JOIN} * to a {@link Task} of type {@link SystemTaskType#JOIN} */ public class JoinTaskMapper implements TaskMapper { @@ -38,7 +39,7 @@ public class JoinTaskMapper implements TaskMapper { public static final Logger logger = LoggerFactory.getLogger(JoinTaskMapper.class); /** - * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#JOIN} to a {@link Task} of type {@link SystemTaskType#JOIN} + * This method maps {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#JOIN} to a {@link Task} of type {@link SystemTaskType#JOIN} * with a status of {@link Task.Status#IN_PROGRESS} * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java index 4e09834fa4..21b05123fb 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/SimpleTaskMapper.java @@ -19,6 +19,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -34,7 +35,7 @@ /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#SIMPLE} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#SIMPLE} * to a {@link Task} with status {@link Task.Status#SCHEDULED}. NOTE: There is not type defined for simples task. */ public class SimpleTaskMapper implements TaskMapper { @@ -48,7 +49,7 @@ public SimpleTaskMapper(ParametersUtils parametersUtils) { } /** - * This method maps a {@link WorkflowTask} of type {@link WorkflowTask.Type#SIMPLE} + * This method maps a {@link WorkflowTask} of type {@link TaskType#SIMPLE} * to a {@link Task} * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java index 8a1c948c8e..01e712507b 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapper.java @@ -18,6 +18,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -32,8 +33,8 @@ import java.util.Optional; /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#USER_DEFINED} - * to a {@link Task} of type {@link WorkflowTask.Type#USER_DEFINED} with {@link Task.Status#SCHEDULED} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#USER_DEFINED} + * to a {@link Task} of type {@link TaskType#USER_DEFINED} with {@link Task.Status#SCHEDULED} */ public class UserDefinedTaskMapper implements TaskMapper { @@ -46,7 +47,7 @@ public UserDefinedTaskMapper(ParametersUtils parametersUtils) { } /** - * This method maps a {@link WorkflowTask} of type {@link WorkflowTask.Type#USER_DEFINED} + * This method maps a {@link WorkflowTask} of type {@link TaskType#USER_DEFINED} * to a {@link Task} in a {@link Task.Status#SCHEDULED} state * * @param taskMapperContext: A wrapper class containing the {@link WorkflowTask}, {@link WorkflowDef}, {@link Workflow} and a string representation of the TaskId diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java index b880f9b295..9bd6921854 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapper.java @@ -17,6 +17,7 @@ package com.netflix.conductor.core.execution.mapper; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.ParametersUtils; @@ -30,7 +31,7 @@ /** - * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link WorkflowTask.Type#WAIT} + * An implementation of {@link TaskMapper} to map a {@link WorkflowTask} of type {@link TaskType#WAIT} * to a {@link Task} of type {@link Wait} with {@link Task.Status#IN_PROGRESS} */ public class WaitTaskMapper implements TaskMapper { diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index 446af3c24e..a0b062d852 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -2,6 +2,7 @@ import com.google.inject.Singleton; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.execution.TerminateWorkflowException; @@ -10,7 +11,6 @@ import org.slf4j.LoggerFactory; import javax.inject.Inject; -import java.util.List; @Singleton public class MetadataMapperService { @@ -29,7 +29,7 @@ public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { workflowTask -> { if (workflowTask.shouldPopulateDefinition()) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); - } else if (workflowTask.getType().equals(WorkflowTask.Type.SUB_WORKFLOW.name())) { + } else if (workflowTask.getType().equals(TaskType.SUB_WORKFLOW.name())) { populateVersionForSubWorkflow(workflowTask); } } diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java index e196335a72..13a9c242de 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderOutcomes.java @@ -21,9 +21,9 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.DeciderService.DeciderOutcome; import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper; @@ -175,7 +175,7 @@ public void testRetries() { WorkflowTask fork = new WorkflowTask(); fork.setName("fork0"); - fork.setWorkflowTaskType(Type.FORK_JOIN_DYNAMIC); + fork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); fork.setTaskReferenceName("fork0"); fork.setDynamicForkTasksInputParamName("forkedInputs"); fork.setDynamicForkTasksParam("forks"); @@ -198,7 +198,7 @@ public void testRetries() { WorkflowTask wft = new WorkflowTask(); wft.setName("f" + i); wft.setTaskReferenceName("f" + i); - wft.setWorkflowTaskType(Type.SIMPLE); + wft.setWorkflowTaskType(TaskType.SIMPLE); wft.getInputParameters().put("requestId", "${workflow.input.requestId}"); wft.getInputParameters().put("taskId", "${CPEWF_TASK_ID}"); wft.setTaskDefinition(new TaskDef("f" + i)); @@ -304,7 +304,7 @@ public void testOptionalWithDynamicFork() throws Exception { WorkflowTask task1 = new WorkflowTask(); task1.setName("fork0"); - task1.setWorkflowTaskType(Type.FORK_JOIN_DYNAMIC); + task1.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); task1.setTaskReferenceName("fork0"); task1.setDynamicForkTasksInputParamName("forkedInputs"); task1.setDynamicForkTasksParam("forks"); @@ -330,7 +330,7 @@ public void testOptionalWithDynamicFork() throws Exception { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName("f" + i); workflowTask.setTaskReferenceName("f" + i); - workflowTask.setWorkflowTaskType(Type.SIMPLE); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); workflowTask.setOptional(true); workflowTask.setTaskDefinition(new TaskDef("f" + i)); forks.add(workflowTask); @@ -398,7 +398,7 @@ public void testDecisionCases() { WorkflowTask decide = new WorkflowTask(); decide.setName("decide"); - decide.setWorkflowTaskType(Type.DECISION); + decide.setWorkflowTaskType(TaskType.DECISION); decide.setTaskReferenceName("d0"); decide.getInputParameters().put("Id", "${workflow.input.Id}"); decide.getInputParameters().put("location", "${workflow.input.location}"); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java index 13de9a49c1..567dc8df7b 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestDeciderService.java @@ -21,9 +21,9 @@ import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.common.utils.JsonMapperProvider; @@ -329,7 +329,7 @@ public void testGetNextTask() { WorkflowTask taskAfterT3 = def.getNextTask("t3"); assertNotNull(taskAfterT3); - assertEquals(Type.DECISION.name(), taskAfterT3.getType()); + assertEquals(TaskType.DECISION.name(), taskAfterT3.getType()); assertEquals("d1", taskAfterT3.getTaskReferenceName()); WorkflowTask taskAfterT4 = def.getNextTask("t4"); @@ -699,7 +699,7 @@ public void testGetTasksToBeScheduled() throws Exception { WorkflowTask workflowTask1 = new WorkflowTask(); workflowTask1.setName("s1"); workflowTask1.setTaskReferenceName("s1"); - workflowTask1.setType(Type.SIMPLE.name()); + workflowTask1.setType(TaskType.SIMPLE.name()); workflowTask1.setTaskDefinition(new TaskDef("s1")); List tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask1, 0, null); @@ -710,7 +710,7 @@ public void testGetTasksToBeScheduled() throws Exception { WorkflowTask workflowTask2 = new WorkflowTask(); workflowTask2.setName("s2"); workflowTask2.setTaskReferenceName("s2"); - workflowTask2.setType(Type.SIMPLE.name()); + workflowTask2.setType(TaskType.SIMPLE.name()); workflowTask2.setTaskDefinition(new TaskDef("s2")); tasksToBeScheduled = deciderService.getTasksToBeScheduled(workflow, workflowTask2, 0, null); assertNotNull(tasksToBeScheduled); @@ -768,7 +768,7 @@ private WorkflowDef createConditionalWF() throws Exception { workflowDef.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask decisionTask2 = new WorkflowTask(); - decisionTask2.setType(Type.DECISION.name()); + decisionTask2.setType(TaskType.DECISION.name()); decisionTask2.setCaseValueParam("case"); decisionTask2.setName("conditional2"); decisionTask2.setTaskReferenceName("conditional2"); @@ -780,7 +780,7 @@ private WorkflowDef createConditionalWF() throws Exception { WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(Type.DECISION.name()); + decisionTask.setType(TaskType.DECISION.name()); decisionTask.setCaseValueParam("case"); decisionTask.setName("conditional"); decisionTask.setTaskReferenceName("conditional"); @@ -800,7 +800,7 @@ private WorkflowDef createConditionalWF() throws Exception { WorkflowTask finalDecisionTask = new WorkflowTask(); finalDecisionTask.setName("finalcondition"); finalDecisionTask.setTaskReferenceName("tf"); - finalDecisionTask.setType(Type.DECISION.name()); + finalDecisionTask.setType(TaskType.DECISION.name()); finalDecisionTask.setCaseValueParam("finalCase"); Map fi = new HashMap<>(); fi.put("finalCase", "workflow.input.finalCase"); @@ -905,7 +905,7 @@ private WorkflowDef createNestedWorkflow() { } WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(Type.DECISION.name()); + decisionTask.setType(TaskType.DECISION.name()); decisionTask.setName("Decision"); decisionTask.setTaskReferenceName("d1"); decisionTask.setDefaultCase(Collections.singletonList(tasks.get(8))); @@ -917,26 +917,26 @@ private WorkflowDef createNestedWorkflow() { WorkflowDef subWorkflowDef = createLinearWorkflow(); WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(Type.SUB_WORKFLOW.name()); + subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); subWorkflowParams.setName(subWorkflowDef.getName()); subWorkflow.setSubWorkflowParam(subWorkflowParams); subWorkflow.setTaskReferenceName("sw1"); WorkflowTask forkTask2 = new WorkflowTask(); - forkTask2.setType(Type.FORK_JOIN.name()); + forkTask2.setType(TaskType.FORK_JOIN.name()); forkTask2.setName("second fork"); forkTask2.setTaskReferenceName("fork2"); forkTask2.getForkTasks().add(Arrays.asList(tasks.get(2), tasks.get(4))); forkTask2.getForkTasks().add(Arrays.asList(tasks.get(3), decisionTask)); WorkflowTask joinTask2 = new WorkflowTask(); - joinTask2.setType(Type.JOIN.name()); + joinTask2.setType(TaskType.JOIN.name()); joinTask2.setTaskReferenceName("join2"); joinTask2.setJoinOn(Arrays.asList("t4", "d1")); WorkflowTask forkTask1 = new WorkflowTask(); - forkTask1.setType(Type.FORK_JOIN.name()); + forkTask1.setType(TaskType.FORK_JOIN.name()); forkTask1.setTaskReferenceName("fork1"); forkTask1.getForkTasks().add(Collections.singletonList(tasks.get(1))); forkTask1.getForkTasks().add(Arrays.asList(forkTask2, joinTask2)); @@ -944,7 +944,7 @@ private WorkflowDef createNestedWorkflow() { WorkflowTask joinTask1 = new WorkflowTask(); - joinTask1.setType(Type.JOIN.name()); + joinTask1.setType(TaskType.JOIN.name()); joinTask1.setTaskReferenceName("join1"); joinTask1.setJoinOn(Arrays.asList("t1", "fork2")); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java index 0eb5b9caff..5282837cdc 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowDef.java @@ -31,7 +31,7 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; +import com.netflix.conductor.common.metadata.workflow.TaskType; /** * @author Viren @@ -50,7 +50,7 @@ public void test(){ WorkflowTask task3 = create("decision_task_1"); def.getTasks().add(task3); - task3.setType(Type.DECISION.name()); + task3.setType(TaskType.DECISION.name()); task3.getDecisionCases().put("Case1", Arrays.asList(create("case_1_task_1"), create("case_1_task_2"))); task3.getDecisionCases().put("Case2", Arrays.asList(create("case_2_task_1"), create("case_2_task_2"))); task3.getDecisionCases().put("Case3", Arrays.asList(deciderTask("decision_task_2", toMap("Case31", "case31_task_1", "case_31_task_2"), Arrays.asList("case3_def_task")))); @@ -98,7 +98,7 @@ private WorkflowTask create(String name){ private WorkflowTask deciderTask(String name, Map> decisions, List defaultTasks){ WorkflowTask task = create(name); - task.setType(Type.DECISION.name()); + task.setType(TaskType.DECISION.name()); decisions.entrySet().forEach(e -> { List tasks = new LinkedList<>(); e.getValue().forEach(taskName -> tasks.add(create(taskName))); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java index d1e2e6c731..88dbf5c24a 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/TestWorkflowExecutor.java @@ -23,7 +23,7 @@ import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.execution.mapper.DecisionTaskMapper; import com.netflix.conductor.core.execution.mapper.DynamicTaskMapper; @@ -137,15 +137,15 @@ public void start(Workflow workflow, Task task, WorkflowExecutor executor) throw List tasks = new LinkedList<>(); WorkflowTask taskToSchedule = new WorkflowTask(); - taskToSchedule.setWorkflowTaskType(Type.USER_DEFINED); + taskToSchedule.setWorkflowTaskType(TaskType.USER_DEFINED); taskToSchedule.setType("HTTP"); WorkflowTask taskToSchedule2 = new WorkflowTask(); - taskToSchedule2.setWorkflowTaskType(Type.USER_DEFINED); + taskToSchedule2.setWorkflowTaskType(TaskType.USER_DEFINED); taskToSchedule2.setType("HTTP2"); WorkflowTask wait = new WorkflowTask(); - wait.setWorkflowTaskType(Type.WAIT); + wait.setWorkflowTaskType(TaskType.WAIT); wait.setType("WAIT"); wait.setTaskReferenceName("wait"); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java index 0e12f8b966..ed56968e17 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/DecisionTaskMapperTest.java @@ -2,6 +2,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -79,7 +80,7 @@ public void getMappedTasks() throws Exception { //Decision task instance WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(WorkflowTask.Type.DECISION.name()); + decisionTask.setType(TaskType.DECISION.name()); decisionTask.setName("Decision"); decisionTask.setTaskReferenceName("decisionTask"); decisionTask.setDefaultCase(Arrays.asList(task1)); @@ -131,7 +132,7 @@ public void getMappedTasks() throws Exception { public void getEvaluatedCaseValue() throws Exception { WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(WorkflowTask.Type.DECISION.name()); + decisionTask.setType(TaskType.DECISION.name()); decisionTask.setName("Decision"); decisionTask.setTaskReferenceName("decisionTask"); decisionTask.setInputParameters(ip1); @@ -169,7 +170,7 @@ public void getEvaluatedCaseValueUsingExpression() throws Exception { //Decision task instance WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(WorkflowTask.Type.DECISION.name()); + decisionTask.setType(TaskType.DECISION.name()); decisionTask.setName("Decision"); decisionTask.setTaskReferenceName("decisionTask"); decisionTask.setDefaultCase(Arrays.asList(task1)); @@ -215,7 +216,7 @@ public void getEvaluatedCaseValueException() { //Decision task instance WorkflowTask decisionTask = new WorkflowTask(); - decisionTask.setType(WorkflowTask.Type.DECISION.name()); + decisionTask.setType(TaskType.DECISION.name()); decisionTask.setName("Decision"); decisionTask.setTaskReferenceName("decisionTask"); decisionTask.setDefaultCase(Arrays.asList(task1)); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java index 623a00b174..b5a1c2ac60 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapperTest.java @@ -5,6 +5,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -71,7 +72,7 @@ public void getMappedTasksException() throws Exception { workflowInstance.setWorkflowDefinition(def); WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); + dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); @@ -80,7 +81,7 @@ public void getMappedTasksException() throws Exception { WorkflowTask join = new WorkflowTask(); - join.setType(WorkflowTask.Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("dynamictask_join"); def.getTasks().add(dynamicForkJoinToSchedule); @@ -142,7 +143,7 @@ public void getMappedTasks() throws Exception { workflowInstance.setWorkflowDefinition(def); WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); + dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); @@ -151,7 +152,7 @@ public void getMappedTasks() throws Exception { WorkflowTask join = new WorkflowTask(); - join.setType(WorkflowTask.Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("dynamictask_join"); def.getTasks().add(dynamicForkJoinToSchedule); @@ -212,7 +213,7 @@ public void getMappedTasks() throws Exception { public void getDynamicForkJoinTasksAndInput() throws Exception { //Given WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); + dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks"); dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); @@ -250,7 +251,7 @@ public void getDynamicForkJoinTasksAndInput() throws Exception { public void getDynamicForkJoinTasksAndInputException() throws Exception { //Given WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); + dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); dynamicForkJoinToSchedule.setDynamicForkJoinTasksParam("dynamicTasks"); dynamicForkJoinToSchedule.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); @@ -287,7 +288,7 @@ public void getDynamicForkJoinTasksAndInputException() throws Exception { public void getDynamicForkTasksAndInput() throws Exception { //Given WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); + dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); @@ -330,7 +331,7 @@ public void getDynamicForkTasksAndInputException() throws Exception { //Given WorkflowTask dynamicForkJoinToSchedule = new WorkflowTask(); - dynamicForkJoinToSchedule.setType(WorkflowTask.Type.FORK_JOIN_DYNAMIC.name()); + dynamicForkJoinToSchedule.setType(TaskType.FORK_JOIN_DYNAMIC.name()); dynamicForkJoinToSchedule.setTaskReferenceName("dynamicfanouttask"); dynamicForkJoinToSchedule.setDynamicForkTasksParam("dynamicTasks"); dynamicForkJoinToSchedule.setDynamicForkTasksInputParamName("dynamicTasksInput"); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java index 70b8378f30..53c56965ca 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/ForkJoinTaskMapperTest.java @@ -1,6 +1,7 @@ package com.netflix.conductor.core.execution.mapper; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -46,7 +47,7 @@ public void getMappedTasks() throws Exception { def.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask forkTask = new WorkflowTask(); - forkTask.setType(WorkflowTask.Type.FORK_JOIN.name()); + forkTask.setType(TaskType.FORK_JOIN.name()); forkTask.setTaskReferenceName("forktask"); WorkflowTask wft1 = new WorkflowTask(); @@ -80,7 +81,7 @@ public void getMappedTasks() throws Exception { def.getTasks().add(forkTask); WorkflowTask join = new WorkflowTask(); - join.setType(WorkflowTask.Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("forktask_join"); join.setJoinOn(Arrays.asList("t3","t2")); @@ -120,7 +121,7 @@ public void getMappedTasksException() throws Exception { def.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask forkTask = new WorkflowTask(); - forkTask.setType(WorkflowTask.Type.FORK_JOIN.name()); + forkTask.setType(TaskType.FORK_JOIN.name()); forkTask.setTaskReferenceName("forktask"); WorkflowTask wft1 = new WorkflowTask(); @@ -154,7 +155,7 @@ public void getMappedTasksException() throws Exception { def.getTasks().add(forkTask); WorkflowTask join = new WorkflowTask(); - join.setType(WorkflowTask.Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("forktask_join"); join.setJoinOn(Arrays.asList("t3","t2")); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java index e6ade3bf4e..5a224b611e 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/JoinTaskMapperTest.java @@ -1,6 +1,7 @@ package com.netflix.conductor.core.execution.mapper; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -20,7 +21,7 @@ public class JoinTaskMapperTest { public void getMappedTasks() throws Exception { WorkflowTask taskToSchedule = new WorkflowTask(); - taskToSchedule.setType(WorkflowTask.Type.JOIN.name()); + taskToSchedule.setType(TaskType.JOIN.name()); taskToSchedule.setJoinOn(Arrays.asList("task1, task2")); String taskId = IDGenerator.generate(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java index 2cb30381fe..d0859155e1 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/UserDefinedTaskMapperTest.java @@ -2,6 +2,7 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -38,7 +39,7 @@ public void getMappedTasks() throws Exception { //Given WorkflowTask taskToSchedule = new WorkflowTask(); taskToSchedule.setName("user_task"); - taskToSchedule.setType(WorkflowTask.Type.USER_DEFINED.name()); + taskToSchedule.setType(TaskType.USER_DEFINED.name()); taskToSchedule.setTaskDefinition(new TaskDef("user_task")); String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); @@ -54,7 +55,7 @@ public void getMappedTasks() throws Exception { //Then assertEquals(1, mappedTasks.size()); - assertEquals(WorkflowTask.Type.USER_DEFINED.name(), mappedTasks.get(0).getTaskType()); + assertEquals(TaskType.USER_DEFINED.name(), mappedTasks.get(0).getTaskType()); } @Test @@ -62,7 +63,7 @@ public void getMappedTasksException() throws Exception { //Given WorkflowTask taskToSchedule = new WorkflowTask(); taskToSchedule.setName("user_task"); - taskToSchedule.setType(WorkflowTask.Type.USER_DEFINED.name()); + taskToSchedule.setType(TaskType.USER_DEFINED.name()); String taskId = IDGenerator.generate(); String retriedTaskId = IDGenerator.generate(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java index 3bc71d988d..f4b49bb3ab 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/mapper/WaitTaskMapperTest.java @@ -1,6 +1,7 @@ package com.netflix.conductor.core.execution.mapper; import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; @@ -22,7 +23,7 @@ public void getMappedTasks() throws Exception { //Given WorkflowTask taskToSchedule = new WorkflowTask(); taskToSchedule.setName("Wait_task"); - taskToSchedule.setType(WorkflowTask.Type.WAIT.name()); + taskToSchedule.setType(TaskType.WAIT.name()); String taskId = IDGenerator.generate(); ParametersUtils parametersUtils = new ParametersUtils(); diff --git a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java index 48efcc5e54..293a0cc835 100644 --- a/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java +++ b/core/src/test/java/com/netflix/conductor/core/execution/tasks/TestEvent.java @@ -17,8 +17,8 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.events.EventQueues; import com.netflix.conductor.core.events.MockQueueProvider; @@ -104,7 +104,7 @@ public void testSinkParam() { Task task = new Task(); task.setReferenceTaskName("event"); task.getInputData().put("sink", sink); - task.setTaskType(WorkflowTask.Type.EVENT.name()); + task.setTaskType(TaskType.EVENT.name()); workflow.getTasks().add(task); Event event = new Event(); diff --git a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java index d09d23b59b..5abda96dd1 100644 --- a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java +++ b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java @@ -2,6 +2,7 @@ import com.google.common.collect.ImmutableList; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.metadata.MetadataMapperService; @@ -119,7 +120,7 @@ private WorkflowDef createWorkflowDefinition(String name) { private WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); - workflowTask.setType(WorkflowTask.Type.SIMPLE.name()); + workflowTask.setType(TaskType.SIMPLE.name()); return workflowTask; } diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index 86ef1b1908..cd29b9c539 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -931,42 +931,6 @@ public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { return to; } - public WorkflowTaskPb.WorkflowTask.Type toProto(WorkflowTask.Type from) { - WorkflowTaskPb.WorkflowTask.Type to; - switch (from) { - case SIMPLE: to = WorkflowTaskPb.WorkflowTask.Type.SIMPLE; break; - case DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.DYNAMIC; break; - case FORK_JOIN: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN; break; - case FORK_JOIN_DYNAMIC: to = WorkflowTaskPb.WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; - case DECISION: to = WorkflowTaskPb.WorkflowTask.Type.DECISION; break; - case JOIN: to = WorkflowTaskPb.WorkflowTask.Type.JOIN; break; - case SUB_WORKFLOW: to = WorkflowTaskPb.WorkflowTask.Type.SUB_WORKFLOW; break; - case EVENT: to = WorkflowTaskPb.WorkflowTask.Type.EVENT; break; - case WAIT: to = WorkflowTaskPb.WorkflowTask.Type.WAIT; break; - case USER_DEFINED: to = WorkflowTaskPb.WorkflowTask.Type.USER_DEFINED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - - public WorkflowTask.Type fromProto(WorkflowTaskPb.WorkflowTask.Type from) { - WorkflowTask.Type to; - switch (from) { - case SIMPLE: to = WorkflowTask.Type.SIMPLE; break; - case DYNAMIC: to = WorkflowTask.Type.DYNAMIC; break; - case FORK_JOIN: to = WorkflowTask.Type.FORK_JOIN; break; - case FORK_JOIN_DYNAMIC: to = WorkflowTask.Type.FORK_JOIN_DYNAMIC; break; - case DECISION: to = WorkflowTask.Type.DECISION; break; - case JOIN: to = WorkflowTask.Type.JOIN; break; - case SUB_WORKFLOW: to = WorkflowTask.Type.SUB_WORKFLOW; break; - case EVENT: to = WorkflowTask.Type.EVENT; break; - case WAIT: to = WorkflowTask.Type.WAIT; break; - case USER_DEFINED: to = WorkflowTask.Type.USER_DEFINED; break; - default: throw new IllegalArgumentException("Unexpected enum constant: " + from); - } - return to; - } - public TaskSummaryPb.TaskSummary toProto(TaskSummary from) { TaskSummaryPb.TaskSummary.Builder to = TaskSummaryPb.TaskSummary.newBuilder(); if (from.getWorkflowId() != null) { diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto index 22c2c69874..b88df17a17 100644 --- a/grpc/src/main/proto/model/workflowtask.proto +++ b/grpc/src/main/proto/model/workflowtask.proto @@ -13,18 +13,6 @@ message WorkflowTask { message WorkflowTaskList { repeated WorkflowTask tasks = 1; } - enum Type { - SIMPLE = 0; - DYNAMIC = 1; - FORK_JOIN = 2; - FORK_JOIN_DYNAMIC = 3; - DECISION = 4; - JOIN = 5; - SUB_WORKFLOW = 6; - EVENT = 7; - WAIT = 8; - USER_DEFINED = 9; - } string name = 1; string task_reference_name = 2; string description = 3; diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 26ba401cb9..83271886f6 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -31,7 +31,7 @@ import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; @@ -116,12 +116,12 @@ public void testAll() throws Exception { def.setName("test"); WorkflowTask t0 = new WorkflowTask(); t0.setName("t0"); - t0.setWorkflowTaskType(Type.SIMPLE); + t0.setWorkflowTaskType(TaskType.SIMPLE); t0.setTaskReferenceName("t0"); WorkflowTask t1 = new WorkflowTask(); t1.setName("t1"); - t1.setWorkflowTaskType(Type.SIMPLE); + t1.setWorkflowTaskType(TaskType.SIMPLE); t1.setTaskReferenceName("t1"); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 6c69774ca3..3551ae33a9 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -28,9 +28,9 @@ import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; @@ -109,12 +109,12 @@ public void testAll() throws Exception { def.setName("test"); WorkflowTask t0 = new WorkflowTask(); t0.setName("t0"); - t0.setWorkflowTaskType(Type.SIMPLE); + t0.setWorkflowTaskType(TaskType.SIMPLE); t0.setTaskReferenceName("t0"); WorkflowTask t1 = new WorkflowTask(); t1.setName("t1"); - t1.setWorkflowTaskType(Type.SIMPLE); + t1.setWorkflowTaskType(TaskType.SIMPLE); t1.setTaskReferenceName("t1"); @@ -286,7 +286,7 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { private WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); - workflowTask.setWorkflowTaskType(Type.SIMPLE); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); workflowTask.setTaskReferenceName(name); return workflowTask; } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index c3b3f01690..b81fcd2bbc 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -31,9 +31,9 @@ import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask.Type; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; import com.netflix.conductor.core.WorkflowContext; @@ -295,7 +295,7 @@ public void testTaskDefTemplate() throws Exception { templateWf.setName("template_workflow"); WorkflowTask wft = new WorkflowTask(); wft.setName(templatedTask.getName()); - wft.setWorkflowTaskType(Type.SIMPLE); + wft.setWorkflowTaskType(TaskType.SIMPLE); wft.setTaskReferenceName("t0"); templateWf.getTasks().add(wft); templateWf.setSchemaVersion(2); @@ -815,7 +815,7 @@ private void createForkJoinWorkflow() throws Exception { workflowDef.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask fanoutTask = new WorkflowTask(); - fanoutTask.setType(Type.FORK_JOIN.name()); + fanoutTask.setType(TaskType.FORK_JOIN.name()); fanoutTask.setTaskReferenceName("fanouttask"); WorkflowTask workflowTask1 = new WorkflowTask(); @@ -849,7 +849,7 @@ private void createForkJoinWorkflow() throws Exception { workflowDef.getTasks().add(fanoutTask); WorkflowTask joinTask = new WorkflowTask(); - joinTask.setType(Type.JOIN.name()); + joinTask.setType(TaskType.JOIN.name()); joinTask.setTaskReferenceName("fanouttask_join"); joinTask.setJoinOn(Arrays.asList("t3", "t2")); @@ -868,7 +868,7 @@ private void createForkJoinWorkflowWithZeroRetry() throws Exception { def.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask fanout = new WorkflowTask(); - fanout.setType(Type.FORK_JOIN.name()); + fanout.setType(TaskType.FORK_JOIN.name()); fanout.setTaskReferenceName("fanouttask"); WorkflowTask wft1 = new WorkflowTask(); @@ -902,7 +902,7 @@ private void createForkJoinWorkflowWithZeroRetry() throws Exception { def.getTasks().add(fanout); WorkflowTask join = new WorkflowTask(); - join.setType(Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("fanouttask_join"); join.setJoinOn(Arrays.asList("t3", "t2")); @@ -936,7 +936,7 @@ private void createForkJoinNestedWorkflow() throws Exception { } WorkflowTask d1 = new WorkflowTask(); - d1.setType(Type.DECISION.name()); + d1.setType(TaskType.DECISION.name()); d1.setName("Decision"); d1.setTaskReferenceName("d1"); d1.setInputParameters(ip1); @@ -948,26 +948,26 @@ private void createForkJoinNestedWorkflow() throws Exception { d1.setDecisionCases(decisionCases); WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(Type.SUB_WORKFLOW.name()); + subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); SubWorkflowParams sw = new SubWorkflowParams(); sw.setName(LINEAR_WORKFLOW_T1_T2); subWorkflow.setSubWorkflowParam(sw); subWorkflow.setTaskReferenceName("sw1"); WorkflowTask fork2 = new WorkflowTask(); - fork2.setType(Type.FORK_JOIN.name()); + fork2.setType(TaskType.FORK_JOIN.name()); fork2.setName("fork2"); fork2.setTaskReferenceName("fork2"); fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14])); fork2.getForkTasks().add(Arrays.asList(tasks[13], d1)); WorkflowTask join2 = new WorkflowTask(); - join2.setType(Type.JOIN.name()); + join2.setType(TaskType.JOIN.name()); join2.setTaskReferenceName("join2"); join2.setJoinOn(Arrays.asList("t14", "t20")); WorkflowTask fork1 = new WorkflowTask(); - fork1.setType(Type.FORK_JOIN.name()); + fork1.setType(TaskType.FORK_JOIN.name()); fork1.setTaskReferenceName("fork1"); fork1.getForkTasks().add(Arrays.asList(tasks[11])); fork1.getForkTasks().add(Arrays.asList(fork2, join2)); @@ -975,7 +975,7 @@ private void createForkJoinNestedWorkflow() throws Exception { WorkflowTask join1 = new WorkflowTask(); - join1.setType(Type.JOIN.name()); + join1.setType(TaskType.JOIN.name()); join1.setTaskReferenceName("join1"); join1.setJoinOn(Arrays.asList("t11", "join2", "sw1")); @@ -1005,7 +1005,7 @@ private void createDynamicForkJoinWorkflowDefs() throws Exception { workflowTask1.setTaskReferenceName("dt1"); WorkflowTask fanout = new WorkflowTask(); - fanout.setType(Type.FORK_JOIN_DYNAMIC.name()); + fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); fanout.setTaskReferenceName("dynamicfanouttask"); fanout.setDynamicForkTasksParam("dynamicTasks"); fanout.setDynamicForkTasksInputParamName("dynamicTasksInput"); @@ -1013,7 +1013,7 @@ private void createDynamicForkJoinWorkflowDefs() throws Exception { fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); WorkflowTask join = new WorkflowTask(); - join.setType(Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("dynamicfanouttask_join"); WorkflowTask workflowTask4 = new WorkflowTask(); @@ -1048,14 +1048,14 @@ private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { wft1.setTaskReferenceName("dt1"); WorkflowTask fanout = new WorkflowTask(); - fanout.setType(Type.FORK_JOIN_DYNAMIC.name()); + fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); fanout.setTaskReferenceName("dynamicfanouttask"); fanout.setDynamicForkJoinTasksParam("dynamicTasks"); fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); WorkflowTask join = new WorkflowTask(); - join.setType(Type.JOIN.name()); + join.setType(TaskType.JOIN.name()); join.setTaskReferenceName("dynamicfanouttask_join"); def.getTasks().add(wft1); @@ -1101,7 +1101,7 @@ private void createConditionalWF() throws Exception { def2.setInputParameters(Arrays.asList("param1", "param2")); WorkflowTask c2 = new WorkflowTask(); - c2.setType(Type.DECISION.name()); + c2.setType(TaskType.DECISION.name()); c2.setCaseValueParam("case"); c2.setName("conditional2"); c2.setTaskReferenceName("conditional2"); @@ -1113,7 +1113,7 @@ private void createConditionalWF() throws Exception { WorkflowTask condition = new WorkflowTask(); - condition.setType(Type.DECISION.name()); + condition.setType(TaskType.DECISION.name()); condition.setCaseValueParam("case"); condition.setName("conditional"); condition.setTaskReferenceName("conditional"); @@ -1133,7 +1133,7 @@ private void createConditionalWF() throws Exception { WorkflowTask finalTask = new WorkflowTask(); finalTask.setName("finalcondition"); finalTask.setTaskReferenceName("tf"); - finalTask.setType(Type.DECISION.name()); + finalTask.setType(TaskType.DECISION.name()); finalTask.setCaseValueParam("finalCase"); Map fi = new HashMap<>(); fi.put("finalCase", "workflow.input.finalCase"); @@ -3350,7 +3350,7 @@ public void testSubWorkflow() throws Exception { assertNotNull(es); assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(Type.SUB_WORKFLOW.name().toString())).findAny().get(); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); assertNotNull(task); assertNotNull(task.getOutputData()); assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getOutputData().get("subWorkflowId")); @@ -3416,7 +3416,7 @@ public void testSubWorkflowFailure() throws Exception { es = workflowExecutionService.getExecutionStatus(wfId, true); assertNotNull(es); assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(Type.SUB_WORKFLOW.name().toString())).findAny().get(); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); assertNotNull(task); assertNotNull(task.getOutputData()); assertNotNull(task.getOutputData().get("subWorkflowId")); @@ -3480,7 +3480,7 @@ public void testSubWorkflowFailureInverse() throws Exception { es = workflowExecutionService.getExecutionStatus(wfId, true); assertNotNull(es); assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(Type.SUB_WORKFLOW.name().toString())).findAny().get(); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); assertNotNull(task); assertNotNull(task.getOutputData()); assertNotNull(task.getOutputData().get("subWorkflowId")); @@ -3510,7 +3510,7 @@ public void testWait() throws Exception { workflowDef.setSchemaVersion(2); WorkflowTask waitWorkflowTask = new WorkflowTask(); - waitWorkflowTask.setWorkflowTaskType(Type.WAIT); + waitWorkflowTask.setWorkflowTaskType(TaskType.WAIT); waitWorkflowTask.setName("wait"); waitWorkflowTask.setTaskReferenceName("wait0"); @@ -3529,7 +3529,7 @@ public void testWait() throws Exception { assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); Task waitTask = workflow.getTasks().get(0); - assertEquals(WorkflowTask.Type.WAIT.name(), waitTask.getTaskType()); + assertEquals(TaskType.WAIT.name(), waitTask.getTaskType()); waitTask.setStatus(COMPLETED); workflowExecutor.updateTask(new TaskResult(waitTask)); @@ -3557,7 +3557,7 @@ public void testEventWorkflow() throws Exception { workflowDef.setSchemaVersion(2); WorkflowTask eventWorkflowTask = new WorkflowTask(); - eventWorkflowTask.setWorkflowTaskType(Type.EVENT); + eventWorkflowTask.setWorkflowTaskType(TaskType.EVENT); eventWorkflowTask.setName("eventX"); eventWorkflowTask.setTaskReferenceName("wait0"); eventWorkflowTask.setSink("conductor"); @@ -3576,7 +3576,7 @@ public void testEventWorkflow() throws Exception { assertNotNull(workflow); Task eventTask = workflow.getTasks().get(0); - assertEquals(Type.EVENT.name(), eventTask.getTaskType()); + assertEquals(TaskType.EVENT.name(), eventTask.getTaskType()); assertEquals(COMPLETED, eventTask.getStatus()); assertTrue(!eventTask.getOutputData().isEmpty()); assertNotNull(eventTask.getOutputData().get("event_produced")); @@ -3735,7 +3735,7 @@ private void createSubWorkflow() throws Exception { WorkflowTask wft2 = new WorkflowTask(); wft2.setName("subWorkflowTask"); - wft2.setType(Type.SUB_WORKFLOW.name()); + wft2.setType(TaskType.SUB_WORKFLOW.name()); SubWorkflowParams swp = new SubWorkflowParams(); swp.setName(LINEAR_WORKFLOW_T1_T2); wft2.setSubWorkflowParam(swp); @@ -3822,7 +3822,7 @@ private void createWorkflowDefForDomain() { wft1.setTaskReferenceName("t1"); WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(Type.SUB_WORKFLOW.name()); + subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); SubWorkflowParams sw = new SubWorkflowParams(); sw.setName(LINEAR_WORKFLOW_T1_T2); subWorkflow.setSubWorkflowParam(sw); From d9677335c2c5acc01f38a80b3756bdf8b0e539a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 6 Aug 2018 17:00:38 -0700 Subject: [PATCH 125/163] Refactored CoreModule in order to use constants for @StringMapKey annotations --- .../conductor/core/config/CoreModule.java | 59 ++++++++++++------- 1 file changed, 37 insertions(+), 22 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java index 08ea314104..d7e3a070d4 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java +++ b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java @@ -56,6 +56,21 @@ */ public class CoreModule extends AbstractModule { + private static final String CONDUCTOR_QUALIFIER = "conductor"; + private static final String PROVIDER_EVENT_QUEUE = "EventQueueProviders"; + + private static final String TASK_MAPPERS_QUALIFIER = "TaskMappers"; + private static final String TASK_TYPE_DECISION = "DECISION"; + private static final String TASK_TYPE_DYNAMIC = "DYNAMIC"; + private static final String TASK_TYPE_JOIN = "JOIN"; + private static final String TASK_TYPE_FORK_JOIN_DYNAMIC = "FORK_JOIN_DYNAMIC"; + private static final String TASK_TYPE_EVENT = "EVENT"; + private static final String TASK_TYPE_WAIT = "WAIT"; + private static final String TASK_TYPE_SUB_WORKFLOW = "SUB_WORKFLOW"; + private static final String TASK_TYPE_FORK_JOIN = "FORK_JOIN"; + private static final String TASK_TYPE_USER_DEFINED = "USER_DEFINED"; + private static final String TASK_TYPE_SIMPLE = "SIMPLE"; + @Override protected void configure() { install(MultibindingsScanner.asModule()); @@ -76,33 +91,33 @@ public ParametersUtils getParameterUtils() { @ProvidesIntoMap - @StringMapKey("conductor") + @StringMapKey(CONDUCTOR_QUALIFIER) @Singleton - @Named("EventQueueProviders") + @Named(PROVIDER_EVENT_QUEUE) public EventQueueProvider getDynoEventQueueProvider(QueueDAO queueDAO, Configuration configuration) { return new DynoEventQueueProvider(queueDAO, configuration); } @ProvidesIntoMap - @StringMapKey("DECISION") + @StringMapKey(TASK_TYPE_DECISION) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getDecisionTaskMapper() { return new DecisionTaskMapper(); } @ProvidesIntoMap - @StringMapKey("DYNAMIC") + @StringMapKey(TASK_TYPE_DYNAMIC) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getDynamicTaskMapper(ParametersUtils parametersUtils) { return new DynamicTaskMapper(parametersUtils); } @ProvidesIntoMap - @StringMapKey("JOIN") + @StringMapKey(TASK_TYPE_JOIN) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getJoinTaskMapper() { return new JoinTaskMapper(); } @@ -110,57 +125,57 @@ public TaskMapper getJoinTaskMapper() { @ProvidesIntoMap - @StringMapKey("FORK_JOIN_DYNAMIC") + @StringMapKey(TASK_TYPE_FORK_JOIN_DYNAMIC) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getForkJoinDynamicTaskMapper(ParametersUtils parametersUtils, ObjectMapper objectMapper, MetadataDAO metadataDAO) { return new ForkJoinDynamicTaskMapper(parametersUtils, objectMapper, metadataDAO); } @ProvidesIntoMap - @StringMapKey("EVENT") + @StringMapKey(TASK_TYPE_EVENT) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getEventTaskMapper(ParametersUtils parametersUtils) { return new EventTaskMapper(parametersUtils); } @ProvidesIntoMap - @StringMapKey("WAIT") + @StringMapKey(TASK_TYPE_WAIT) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getWaitTaskMapper(ParametersUtils parametersUtils) { return new WaitTaskMapper(parametersUtils); } @ProvidesIntoMap + @StringMapKey(TASK_TYPE_SUB_WORKFLOW) @Singleton - @StringMapKey("SUB_WORKFLOW") - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getSubWorkflowTaskMapper(ParametersUtils parametersUtils) { return new SubWorkflowTaskMapper(parametersUtils); } @ProvidesIntoMap + @StringMapKey(TASK_TYPE_FORK_JOIN) @Singleton - @StringMapKey("FORK_JOIN") - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getForkJoinTaskMapper() { return new ForkJoinTaskMapper(); } @ProvidesIntoMap - @StringMapKey("USER_DEFINED") + @StringMapKey(TASK_TYPE_USER_DEFINED) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getUserDefinedTaskMapper(ParametersUtils parametersUtils) { return new UserDefinedTaskMapper(parametersUtils); } @ProvidesIntoMap - @StringMapKey("SIMPLE") + @StringMapKey(TASK_TYPE_SIMPLE) @Singleton - @Named("TaskMappers") + @Named(TASK_MAPPERS_QUALIFIER) public TaskMapper getSimpleTaskMapper(ParametersUtils parametersUtils) { return new SimpleTaskMapper(parametersUtils); } From e2ea36ebd3f9660278e29d7632ddb9d5daedd050 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 6 Aug 2018 18:51:32 -0700 Subject: [PATCH 126/163] Added missing tests for MetadataMapperService - Tests related to version population for a subworkflow task --- .../metadata/MetadataMapperServiceTest.java | 81 +++++++++++++++++-- 1 file changed, 75 insertions(+), 6 deletions(-) diff --git a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java index 5abda96dd1..daa93994f4 100644 --- a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java +++ b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java @@ -2,9 +2,11 @@ import com.google.common.collect.ImmutableList; import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.dao.MetadataDAO; import org.junit.Test; @@ -14,6 +16,7 @@ import org.mockito.runners.MockitoJUnitRunner; import java.util.List; +import java.util.Optional; import static junit.framework.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -95,19 +98,86 @@ public void testMetadataPopulationOnlyOnNecessaryWorkflowTasks() { } @Test - public void testVersionPopulationForSubworkflowTaskIfNotAvailable() { - // TODO + public void testVersionPopulationForSubworkflowTaskIfVersionIsNotAvailable() { + String nameTaskDefinition = "taskSubworkflow6"; + String workflowDefinitionName = "subworkflow"; + Integer version = 3; + + WorkflowDef subWorkflowDefinition = createWorkflowDefinition("workflowDefinitionName"); + subWorkflowDefinition.setVersion(version); + + WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); + workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName(workflowDefinitionName); + workflowTask.setSubWorkflowParam(subWorkflowParams); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask)); + + when(metadataDAO.getLatest(workflowDefinitionName)).thenReturn(Optional.of(subWorkflowDefinition)); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + + assertEquals(1, workflowDefinition.getTasks().size()); + List workflowTasks = workflowDefinition.getTasks(); + SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam(); + + assertEquals(workflowDefinitionName, params.getName()); + assertEquals(version, params.getVersion()); + + verify(metadataDAO).getLatest(workflowDefinitionName); + verifyNoMoreInteractions(metadataDAO); } @Test public void testNoVersionPopulationForSubworkflowTaskIfAvailable() { - // TODO + String nameTaskDefinition = "taskSubworkflow7"; + String workflowDefinitionName = "subworkflow"; + Integer version = 2; + + WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); + workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName(workflowDefinitionName); + subWorkflowParams.setVersion(version); + workflowTask.setSubWorkflowParam(subWorkflowParams); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask)); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + + assertEquals(1, workflowDefinition.getTasks().size()); + List workflowTasks = workflowDefinition.getTasks(); + SubWorkflowParams params = workflowTasks.get(0).getSubWorkflowParam(); + + assertEquals(workflowDefinitionName, params.getName()); + assertEquals(version, params.getVersion()); + + verifyZeroInteractions(metadataDAO); } - @Test + @Test(expected = TerminateWorkflowException.class) public void testExceptionWhenWorkflowDefinitionNotAvailable() { - // TODO + String nameTaskDefinition = "taskSubworkflow8"; + String workflowDefinitionName = "subworkflow"; + + WorkflowTask workflowTask = createWorkflowTask(nameTaskDefinition); + workflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName(workflowDefinitionName); + workflowTask.setSubWorkflowParam(subWorkflowParams); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask)); + + when(metadataDAO.getLatest(workflowDefinitionName)).thenReturn(Optional.empty()); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + + verify(metadataDAO).getLatest(workflowDefinitionName); } @@ -128,5 +198,4 @@ private TaskDef createTaskDefinition(String name) { TaskDef taskDefinition = new TaskDef(name); return taskDefinition; } - } From f0e8a0129104a8dfb9b06631ae9c6545261192aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 7 Aug 2018 00:15:30 -0700 Subject: [PATCH 127/163] Code improvements related to MetadataMapperService and CoreModule constants - MetadataMapperService contains helper method for determining if a task should be populated - CoreModule uses constants from TaskType in order to specify the value for @StringMapKey on each of the taskMappers --- .../common/metadata/workflow/TaskType.java | 17 +++++++++++++ .../metadata/workflow/WorkflowTask.java | 4 ---- .../conductor/core/config/CoreModule.java | 24 +++++++++---------- .../mapper/ForkJoinDynamicTaskMapper.java | 5 ++-- .../core/metadata/MetadataMapperService.java | 7 +++++- 5 files changed, 38 insertions(+), 19 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java index b81891a041..9bc9089647 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java @@ -7,8 +7,25 @@ @ProtoEnum public enum TaskType { + SIMPLE, DYNAMIC, FORK_JOIN, FORK_JOIN_DYNAMIC, DECISION, JOIN, SUB_WORKFLOW, EVENT, WAIT, USER_DEFINED; + /** + * TaskType constants representing each of the possible enumeration values. + * Motivation: to not have any hardcoded TaskType enum value across the code. + * Example of use: CoreModule + */ + public static final String TASK_TYPE_DECISION = "DECISION"; + public static final String TASK_TYPE_DYNAMIC = "DYNAMIC"; + public static final String TASK_TYPE_JOIN = "JOIN"; + public static final String TASK_TYPE_FORK_JOIN_DYNAMIC = "FORK_JOIN_DYNAMIC"; + public static final String TASK_TYPE_EVENT = "EVENT"; + public static final String TASK_TYPE_WAIT = "WAIT"; + public static final String TASK_TYPE_SUB_WORKFLOW = "SUB_WORKFLOW"; + public static final String TASK_TYPE_FORK_JOIN = "FORK_JOIN"; + public static final String TASK_TYPE_USER_DEFINED = "USER_DEFINED"; + public static final String TASK_TYPE_SIMPLE = "SIMPLE"; + private static Set systemTasks = new HashSet<>(); static { systemTasks.add(TaskType.SIMPLE.name()); diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 0dbd47b333..d87654c30c 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -536,10 +536,6 @@ public WorkflowTask get(String taskReferenceName){ return null; } - - public boolean shouldPopulateDefinition() { - return getType().equals(TaskType.SIMPLE.name()) && getTaskDefinition() == null; - } @Override public String toString() { diff --git a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java index d7e3a070d4..19b537637f 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java +++ b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java @@ -18,6 +18,17 @@ */ package com.netflix.conductor.core.config; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_DECISION; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_DYNAMIC; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_EVENT; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_FORK_JOIN; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_FORK_JOIN_DYNAMIC; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_JOIN; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_SIMPLE; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_SUB_WORKFLOW; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_USER_DEFINED; +import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_WAIT; + import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.AbstractModule; import com.google.inject.Provides; @@ -50,7 +61,6 @@ import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.dao.QueueDAO; - /** * @author Viren */ @@ -60,16 +70,7 @@ public class CoreModule extends AbstractModule { private static final String PROVIDER_EVENT_QUEUE = "EventQueueProviders"; private static final String TASK_MAPPERS_QUALIFIER = "TaskMappers"; - private static final String TASK_TYPE_DECISION = "DECISION"; - private static final String TASK_TYPE_DYNAMIC = "DYNAMIC"; - private static final String TASK_TYPE_JOIN = "JOIN"; - private static final String TASK_TYPE_FORK_JOIN_DYNAMIC = "FORK_JOIN_DYNAMIC"; - private static final String TASK_TYPE_EVENT = "EVENT"; - private static final String TASK_TYPE_WAIT = "WAIT"; - private static final String TASK_TYPE_SUB_WORKFLOW = "SUB_WORKFLOW"; - private static final String TASK_TYPE_FORK_JOIN = "FORK_JOIN"; - private static final String TASK_TYPE_USER_DEFINED = "USER_DEFINED"; - private static final String TASK_TYPE_SIMPLE = "SIMPLE"; + @Override protected void configure() { @@ -123,7 +124,6 @@ public TaskMapper getJoinTaskMapper() { } - @ProvidesIntoMap @StringMapKey(TASK_TYPE_FORK_JOIN_DYNAMIC) @Singleton diff --git a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java index f548b0af83..45089d24ba 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/mapper/ForkJoinDynamicTaskMapper.java @@ -27,6 +27,7 @@ import com.netflix.conductor.core.execution.ParametersUtils; import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.TerminateWorkflowException; +import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.core.utils.IDGenerator; import com.netflix.conductor.dao.MetadataDAO; import org.apache.commons.lang3.tuple.ImmutablePair; @@ -240,7 +241,7 @@ Pair, Map>> getDynamicForkTasksAn Object dynamicForkTasksJson = input.get(dynamicForkTaskParam); List dynamicForkWorkflowTasks = objectMapper.convertValue(dynamicForkTasksJson, ListOfWorkflowTasks); for (WorkflowTask workflowTask : dynamicForkWorkflowTasks) { - if (workflowTask.shouldPopulateDefinition()) { + if (MetadataMapperService.shouldPopulateDefinition(workflowTask)) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); } } @@ -283,7 +284,7 @@ Pair, Map>> getDynamicForkJoinTas dynamicForkJoinWorkflowTask.setTaskReferenceName(dynamicForkJoinTask.getReferenceName()); dynamicForkJoinWorkflowTask.setName(dynamicForkJoinTask.getTaskName()); dynamicForkJoinWorkflowTask.setType(dynamicForkJoinTask.getType()); - if (dynamicForkJoinWorkflowTask.shouldPopulateDefinition()) { + if (MetadataMapperService.shouldPopulateDefinition(dynamicForkJoinWorkflowTask)) { dynamicForkJoinWorkflowTask.setTaskDefinition( metadataDAO.getTaskDef(dynamicForkJoinTask.getTaskName())); } diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index a0b062d852..f2b8673d85 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -27,7 +27,7 @@ public MetadataMapperService(MetadataDAO metadataDAO) { public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { workflowDefinition.collectTasks().stream().forEach( workflowTask -> { - if (workflowTask.shouldPopulateDefinition()) { + if (shouldPopulateDefinition(workflowTask)) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); } else if (workflowTask.getType().equals(TaskType.SUB_WORKFLOW.name())) { populateVersionForSubWorkflow(workflowTask); @@ -55,4 +55,9 @@ private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { } } + public static boolean shouldPopulateDefinition(WorkflowTask workflowTask) { + return workflowTask.getType().equals(TaskType.SIMPLE.name()) && + workflowTask.getTaskDefinition() == null; + } + } From c8f28aff5fe3aaca43eab595626d4ccd64f14892 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 7 Aug 2018 10:59:30 -0700 Subject: [PATCH 128/163] Made qualifiers under CoreModule public --- .../conductor/common/metadata/workflow/TaskType.java | 2 +- .../com/netflix/conductor/contribs/ContribsModule.java | 4 +++- .../com/netflix/conductor/contribs/NatsModule.java | 4 +++- .../netflix/conductor/contribs/NatsStreamModule.java | 6 +++--- .../com/netflix/conductor/core/config/CoreModule.java | 10 ++++------ .../com/netflix/conductor/core/events/EventQueues.java | 6 ++++-- 6 files changed, 18 insertions(+), 14 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java index 9bc9089647..4e11f6a3c4 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java @@ -12,7 +12,7 @@ public enum TaskType { /** * TaskType constants representing each of the possible enumeration values. - * Motivation: to not have any hardcoded TaskType enum value across the code. + * Motivation: to not have any hardcoded/inline strings used in the code. * Example of use: CoreModule */ public static final String TASK_TYPE_DECISION = "DECISION"; diff --git a/contribs/src/main/java/com/netflix/conductor/contribs/ContribsModule.java b/contribs/src/main/java/com/netflix/conductor/contribs/ContribsModule.java index 6ebdc932b0..e5c1e8b1dc 100644 --- a/contribs/src/main/java/com/netflix/conductor/contribs/ContribsModule.java +++ b/contribs/src/main/java/com/netflix/conductor/contribs/ContribsModule.java @@ -18,6 +18,8 @@ */ package com.netflix.conductor.contribs; +import static com.netflix.conductor.core.events.EventQueues.EVENT_QUEUE_PROVIDERS_QUALIFIER; + import java.util.HashMap; import java.util.Map; @@ -55,7 +57,7 @@ protected void configure() { @ProvidesIntoMap @StringMapKey("sqs") @Singleton - @Named("EventQueueProviders") + @Named(EVENT_QUEUE_PROVIDERS_QUALIFIER) public EventQueueProvider getSQSEventQueueProvider(AmazonSQSClient amazonSQSClient) { return new SQSEventQueueProvider(amazonSQSClient); } diff --git a/contribs/src/main/java/com/netflix/conductor/contribs/NatsModule.java b/contribs/src/main/java/com/netflix/conductor/contribs/NatsModule.java index 2f09cd1c21..81f6ab02a1 100644 --- a/contribs/src/main/java/com/netflix/conductor/contribs/NatsModule.java +++ b/contribs/src/main/java/com/netflix/conductor/contribs/NatsModule.java @@ -18,6 +18,8 @@ */ package com.netflix.conductor.contribs; +import static com.netflix.conductor.core.events.EventQueues.EVENT_QUEUE_PROVIDERS_QUALIFIER; + import com.google.inject.AbstractModule; import com.google.inject.Singleton; import com.google.inject.multibindings.ProvidesIntoMap; @@ -45,7 +47,7 @@ protected void configure() { @ProvidesIntoMap @StringMapKey("nats") @Singleton - @Named("EventQueueProviders") + @Named(EVENT_QUEUE_PROVIDERS_QUALIFIER) public EventQueueProvider getNATSEventQueueProvider(Configuration configuration) { return new NATSEventQueueProvider(configuration); } diff --git a/contribs/src/main/java/com/netflix/conductor/contribs/NatsStreamModule.java b/contribs/src/main/java/com/netflix/conductor/contribs/NatsStreamModule.java index 0ff8ac311d..b589588462 100644 --- a/contribs/src/main/java/com/netflix/conductor/contribs/NatsStreamModule.java +++ b/contribs/src/main/java/com/netflix/conductor/contribs/NatsStreamModule.java @@ -18,6 +18,8 @@ */ package com.netflix.conductor.contribs; +import static com.netflix.conductor.core.events.EventQueues.EVENT_QUEUE_PROVIDERS_QUALIFIER; + import com.google.inject.AbstractModule; import com.google.inject.Singleton; import com.google.inject.multibindings.ProvidesIntoMap; @@ -26,8 +28,6 @@ import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.events.EventQueueProvider; import com.netflix.conductor.core.events.nats.NATSStreamEventQueueProvider; -import com.netflix.conductor.core.events.queue.dyno.DynoEventQueueProvider; -import com.netflix.conductor.dao.QueueDAO; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -47,7 +47,7 @@ protected void configure() { @ProvidesIntoMap @StringMapKey("nats_stream") @Singleton - @Named("EventQueueProviders") + @Named(EVENT_QUEUE_PROVIDERS_QUALIFIER) public EventQueueProvider geNATSStreamEventQueueProvider(Configuration configuration) { return new NATSStreamEventQueueProvider(configuration); } diff --git a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java index 19b537637f..e2f0762b65 100644 --- a/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java +++ b/core/src/main/java/com/netflix/conductor/core/config/CoreModule.java @@ -28,6 +28,7 @@ import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_SUB_WORKFLOW; import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_USER_DEFINED; import static com.netflix.conductor.common.metadata.workflow.TaskType.TASK_TYPE_WAIT; +import static com.netflix.conductor.core.events.EventQueues.EVENT_QUEUE_PROVIDERS_QUALIFIER; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.inject.AbstractModule; @@ -66,11 +67,8 @@ */ public class CoreModule extends AbstractModule { - private static final String CONDUCTOR_QUALIFIER = "conductor"; - private static final String PROVIDER_EVENT_QUEUE = "EventQueueProviders"; - - private static final String TASK_MAPPERS_QUALIFIER = "TaskMappers"; - + public static final String CONDUCTOR_QUALIFIER = "conductor"; + public static final String TASK_MAPPERS_QUALIFIER = "TaskMappers"; @Override protected void configure() { @@ -94,7 +92,7 @@ public ParametersUtils getParameterUtils() { @ProvidesIntoMap @StringMapKey(CONDUCTOR_QUALIFIER) @Singleton - @Named(PROVIDER_EVENT_QUEUE) + @Named(EVENT_QUEUE_PROVIDERS_QUALIFIER) public EventQueueProvider getDynoEventQueueProvider(QueueDAO queueDAO, Configuration configuration) { return new DynoEventQueueProvider(queueDAO, configuration); } diff --git a/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java b/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java index 8ea9c2455e..3527471b16 100644 --- a/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java +++ b/core/src/main/java/com/netflix/conductor/core/events/EventQueues.java @@ -36,13 +36,15 @@ * Static holders for internal event queues */ public class EventQueues { - + + public static final String EVENT_QUEUE_PROVIDERS_QUALIFIER = "EventQueueProviders"; + private static Logger logger = LoggerFactory.getLogger(EventQueues.class); private static ParametersUtils parametersUtils = new ParametersUtils(); @Inject - @Named("EventQueueProviders") + @Named(EVENT_QUEUE_PROVIDERS_QUALIFIER) public static Map providers; //TODO this is a leaky abstraction, when the static injection is moved to singleton this will be fixed private EventQueues() { From 8377ed738e975ba8030971d1cba7e1c25a20d310 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 8 Aug 2018 12:11:12 -0700 Subject: [PATCH 129/163] Deleted logic related to terminateWorkflow - Assumed wrong execution flow, moving back to existing behaviour --- .../conductor/core/execution/WorkflowExecutor.java | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 8a022bd86f..cb075ce2c7 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -578,12 +578,9 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo try { - WorkflowDef latestFailureWorkflow = - metadataDAO.getLatest(failureWorkflow).orElse( - Optional.ofNullable(workflow.getWorkflowDefinition()) - .orElseThrow(() -> - new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) - ) + WorkflowDef latestFailureWorkflow = metadataDAO.getLatest(failureWorkflow) + .orElseThrow(() -> + new RuntimeException("Failure Workflow Definition not found for: " + failureWorkflow) ); String failureWFId = startWorkflow( From 0131e4b91589948e7cf40784fffefc3926622fc7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sat, 11 Aug 2018 18:32:11 -0700 Subject: [PATCH 130/163] Fixed javadoc comment for IndexDao.setup --- core/src/main/java/com/netflix/conductor/dao/IndexDAO.java | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java index a28973df8a..1c872cdb62 100644 --- a/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java +++ b/core/src/main/java/com/netflix/conductor/dao/IndexDAO.java @@ -33,9 +33,7 @@ public interface IndexDAO { /** - * This method should return an unique identifier of the indexed doc - * @param workflow Workflow to be indexed - * + * Setup method in charge or initializing/populating the index. */ void setup() throws Exception; @@ -47,8 +45,6 @@ public interface IndexDAO { void indexWorkflow(Workflow workflow); /** - * - * /** * This method should return an unique identifier of the indexed doc * @param workflow Workflow to be indexed * @return CompletableFuture of type void From 709fb365592fdb11921e2b5d4781803974eca909 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Sun, 12 Aug 2018 18:02:42 -0700 Subject: [PATCH 131/163] Updated ElasticSearchConfiguration to contain TaskLogIndexName property - Updated search dao v5 as well, so creation of metadata only happens during the setup --- .../dao/es5/index/ElasticSearchDAOV5.java | 18 ++++-------------- .../ElasticSearchConfiguration.java | 7 +++++++ 2 files changed, 11 insertions(+), 14 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java index e5f9bcb14f..078bd4b98f 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/dao/es5/index/ElasticSearchDAOV5.java @@ -28,12 +28,12 @@ import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.WorkflowSummary; import com.netflix.conductor.common.utils.RetryUtil; -import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.ApplicationException.Code; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.es5.index.query.parser.Expression; +import com.netflix.conductor.elasticsearch.ElasticSearchConfiguration; import com.netflix.conductor.elasticsearch.query.parser.ParserException; import com.netflix.conductor.metrics.Monitors; import org.apache.commons.io.IOUtils; @@ -135,21 +135,11 @@ public class ElasticSearchDAOV5 implements IndexDAO { } @Inject - public ElasticSearchDAOV5(Client elasticSearchClient, Configuration config, ObjectMapper objectMapper) { + public ElasticSearchDAOV5(Client elasticSearchClient, ElasticSearchConfiguration config, ObjectMapper objectMapper) { this.objectMapper = objectMapper; this.elasticSearchClient = elasticSearchClient; - this.indexName = config.getProperty("workflow.elasticsearch.index.name", null); - this.logIndexPrefix = config.getProperty("workflow.elasticsearch.tasklog.index.name", "task_log"); - - try { - - initIndex(); - updateIndexName(); - Executors.newScheduledThreadPool(1).scheduleAtFixedRate(() -> updateIndexName(), 0, 1, TimeUnit.HOURS); - - } catch (Exception e) { - logger.error(e.getMessage(), e); - } + this.indexName = config.getIndexName(); + this.logIndexPrefix = config.getTasklogIndexName(); int corePoolSize = 6; int maximumPoolSize = 12; diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index e77b82c4f4..9a3086a750 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -19,6 +19,9 @@ public interface ElasticSearchConfiguration extends Configuration { String ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.index.name"; String ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE = "conductor"; + String TASK_LOG_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.tasklog.index.name"; + String TASK_LOG_INDEX__NAME_DEFAULT_VALUE = "task_log"; + String EMBEDDED_DATA_PATH_PROPERTY_NAME = "workflow.elasticsearch.embedded.data.path"; String EMBEDDED_DATA_PATH_DEFAULT_VALUE = "path.data"; @@ -56,6 +59,10 @@ default String getIndexName() { return getProperty(ELASTIC_SEARCH_INDEX_NAME_PROPERTY_NAME, ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE); } + default String getTasklogIndexName() { + return getProperty(TASK_LOG_INDEX_NAME_PROPERTY_NAME, TASK_LOG_INDEX__NAME_DEFAULT_VALUE); + } + default String getEmbeddedDataPath() { return getProperty(EMBEDDED_DATA_PATH_PROPERTY_NAME, EMBEDDED_DATA_PATH_DEFAULT_VALUE); } From e524cce44db3ef291071adbeabe5d732f73ddb23 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 13 Aug 2018 18:15:11 -0700 Subject: [PATCH 132/163] Updated constant name for task log index name --- .../conductor/elasticsearch/ElasticSearchConfiguration.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java index 9a3086a750..e457bac141 100644 --- a/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java +++ b/es5-persistence/src/main/java/com/netflix/conductor/elasticsearch/ElasticSearchConfiguration.java @@ -20,7 +20,7 @@ public interface ElasticSearchConfiguration extends Configuration { String ELASTIC_SEARCH_INDEX_NAME_DEFAULT_VALUE = "conductor"; String TASK_LOG_INDEX_NAME_PROPERTY_NAME = "workflow.elasticsearch.tasklog.index.name"; - String TASK_LOG_INDEX__NAME_DEFAULT_VALUE = "task_log"; + String TASK_LOG_INDEX_NAME_DEFAULT_VALUE = "task_log"; String EMBEDDED_DATA_PATH_PROPERTY_NAME = "workflow.elasticsearch.embedded.data.path"; String EMBEDDED_DATA_PATH_DEFAULT_VALUE = "path.data"; @@ -60,7 +60,7 @@ default String getIndexName() { } default String getTasklogIndexName() { - return getProperty(TASK_LOG_INDEX_NAME_PROPERTY_NAME, TASK_LOG_INDEX__NAME_DEFAULT_VALUE); + return getProperty(TASK_LOG_INDEX_NAME_PROPERTY_NAME, TASK_LOG_INDEX_NAME_DEFAULT_VALUE); } default String getEmbeddedDataPath() { From a3a4ab5f70a03e1cbfd0aafeedb766fd2baab57e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 20 Aug 2018 12:58:47 -0700 Subject: [PATCH 133/163] MetadatMapperService contains checks for missing task definitions - As part of the populateTaskDefinitions work --- .../core/metadata/MetadataMapperService.java | 33 ++++++++++++++++++- .../metadata/MetadataMapperServiceTest.java | 20 +++++++++++ 2 files changed, 52 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index f2b8673d85..a0210ccad0 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -5,19 +5,30 @@ import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.core.WorkflowContext; +import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.dao.MetadataDAO; +import com.netflix.conductor.metrics.Monitors; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; +import java.util.Set; +import java.util.stream.Collectors; +/** + * Populates metadata definitions within workflow objects. + * Benefits of loading and populating metadata definitions upfront could be: + * - Immutable definitions within a workflow execution with the added benefit of guaranteeing consistency at runtime. + * - Stress is reduced on the storage layer + */ @Singleton public class MetadataMapperService { public static final Logger logger = LoggerFactory.getLogger(MetadataMapperService.class); - private MetadataDAO metadataDAO; + private final MetadataDAO metadataDAO; @Inject public MetadataMapperService(MetadataDAO metadataDAO) { @@ -25,6 +36,8 @@ public MetadataMapperService(MetadataDAO metadataDAO) { } public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { + + // Populate definitions on the workflow definition workflowDefinition.collectTasks().stream().forEach( workflowTask -> { if (shouldPopulateDefinition(workflowTask)) { @@ -34,6 +47,9 @@ public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { } } ); + + checkForMissingDefinitions(workflowDefinition); + return workflowDefinition; } @@ -55,6 +71,21 @@ private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { } } + private void checkForMissingDefinitions(WorkflowDef workflowDefinition) { + + // Obtain the names of the tasks with missing definitions + Set missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() + .filter(workflowTask -> shouldPopulateDefinition(workflowTask)) + .map(workflowTask -> workflowTask.getName()) + .collect(Collectors.toSet()); + + if (!missingTaskDefinitionNames.isEmpty()) { + logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefinitionNames); + Monitors.recordWorkflowStartError(workflowDefinition.getName(), WorkflowContext.get().getClientApp()); + throw new ApplicationException(ApplicationException.Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefinitionNames); + } + } + public static boolean shouldPopulateDefinition(WorkflowTask workflowTask) { return workflowTask.getType().equals(TaskType.SIMPLE.name()) && workflowTask.getTaskDefinition() == null; diff --git a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java index daa93994f4..956f8e0bda 100644 --- a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java +++ b/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java @@ -6,6 +6,7 @@ import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.dao.MetadataDAO; @@ -97,6 +98,25 @@ public void testMetadataPopulationOnlyOnNecessaryWorkflowTasks() { verifyNoMoreInteractions(metadataDAO); } + @Test(expected = ApplicationException.class) + public void testMetadataPopulationMissingDefinitions() { + String nameTaskDefinition1 = "task4"; + WorkflowTask workflowTask1 = createWorkflowTask(nameTaskDefinition1); + + String nameTaskDefinition2 = "task5"; + WorkflowTask workflowTask2 = createWorkflowTask(nameTaskDefinition2); + + TaskDef taskDefinition = createTaskDefinition(nameTaskDefinition1); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testMetadataPopulation"); + workflowDefinition.setTasks(ImmutableList.of(workflowTask1, workflowTask2)); + + when(metadataDAO.getTaskDef(nameTaskDefinition1)).thenReturn(taskDefinition); + when(metadataDAO.getTaskDef(nameTaskDefinition2)).thenReturn(null); + + metadataMapperService.populateTaskDefinitions(workflowDefinition); + } + @Test public void testVersionPopulationForSubworkflowTaskIfVersionIsNotAvailable() { String nameTaskDefinition = "taskSubworkflow6"; From c2c47c27d47f6413b2ca72c52c3efbe2ce3e350d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 20 Aug 2018 13:00:46 -0700 Subject: [PATCH 134/163] Code review changes: javadoc, tasktype minor refactoring --- .../conductor/common/metadata/tasks/Task.java | 3 +- .../common/metadata/workflow/TaskType.java | 38 ++++++++++++------- .../core/execution/WorkflowExecutor.java | 13 +------ 3 files changed, 26 insertions(+), 28 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java index 6878127826..5bf7f29973 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/Task.java @@ -613,8 +613,7 @@ public void setOutputMessage(Any outputMessage) { */ public Optional getTaskDefinition() { return Optional.ofNullable(this.getWorkflowTask()) - .map(workflowTask -> Optional.ofNullable(workflowTask.getTaskDefinition())) - .orElse(Optional.empty()); + .map(workflowTask -> workflowTask.getTaskDefinition()); } public Task copy() { diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java index 4e11f6a3c4..866b3b3ea6 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/TaskType.java @@ -8,7 +8,16 @@ @ProtoEnum public enum TaskType { - SIMPLE, DYNAMIC, FORK_JOIN, FORK_JOIN_DYNAMIC, DECISION, JOIN, SUB_WORKFLOW, EVENT, WAIT, USER_DEFINED; + SIMPLE(true), + DYNAMIC(true), + FORK_JOIN(true), + FORK_JOIN_DYNAMIC(true), + DECISION(true), + JOIN(true), + SUB_WORKFLOW(true), + EVENT(true), + WAIT(true), + USER_DEFINED(false); /** * TaskType constants representing each of the possible enumeration values. @@ -26,21 +35,22 @@ public enum TaskType { public static final String TASK_TYPE_USER_DEFINED = "USER_DEFINED"; public static final String TASK_TYPE_SIMPLE = "SIMPLE"; - private static Set systemTasks = new HashSet<>(); - static { - systemTasks.add(TaskType.SIMPLE.name()); - systemTasks.add(TaskType.DYNAMIC.name()); - systemTasks.add(TaskType.FORK_JOIN.name()); - systemTasks.add(TaskType.FORK_JOIN_DYNAMIC.name()); - systemTasks.add(TaskType.DECISION.name()); - systemTasks.add(TaskType.JOIN.name()); - systemTasks.add(TaskType.SUB_WORKFLOW.name()); - systemTasks.add(TaskType.EVENT.name()); - systemTasks.add(TaskType.WAIT.name()); - //Do NOT add USER_DEFINED here... + private boolean isSystemTask; + + TaskType(boolean isSystemTask) { + this.isSystemTask = isSystemTask; } + /* + * TODO: Update code to use only enums rather than Strings. + * This method is only used as a helper until the transition is done. + */ public static boolean isSystemTask(String name) { - return systemTasks.contains(name); + try { + TaskType taskType = TaskType.valueOf(name); + return taskType.isSystemTask; + } catch (IllegalArgumentException iae) { + return false; + } } } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 9af535a15e..c99239b95c 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -278,18 +278,6 @@ public String startWorkflow( metadataMapperService.populateTaskDefinitions(workflowDefinition); - // Obtain the names of the tasks with missing definitions - Set missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() - .filter(workflowTask -> - (workflowTask.getType().equals(TaskType.SIMPLE.name()) && workflowTask.getTaskDefinition() == null)) - .map(workflowTask -> workflowTask.getName()) - .collect(Collectors.toSet()); - - if (!missingTaskDefinitionNames.isEmpty()) { - logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefinitionNames); - Monitors.recordWorkflowStartError(workflowDefinition.getName(), WorkflowContext.get().getClientApp()); - throw new ApplicationException(Code.INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefinitionNames); - } //A random UUID is assigned to the work flow instance String workflowId = IDGenerator.generate(); @@ -510,6 +498,7 @@ void completeWorkflow(Workflow wf) { @VisibleForTesting Optional lookupWorkflowDefinition(String workflowName, int workflowVersion) { + // TODO: Update to use ServiceUtils once this is merged with dev // FIXME: Add messages. checkNotNull(workflowName); checkArgument(StringUtils.isNotBlank(workflowName)); From 70f13a57b5b2a4263eb2316cefa1ff0b8d2fe17a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 23 Aug 2018 21:37:16 -0700 Subject: [PATCH 135/163] Added scenarios for backwards compatibility tests - Scenarios represent different situations a running workflow might get into --- .../scenarios/ConditionalTaskWF.json | 178 +++++ .../scenarios/ConditionalTaskWF2.json | 259 +++++++ .../scenarios/ConditionalTaskWF3.json | 259 +++++++ .../scenarios/ConditionalTaskWF4.json | 171 +++++ .../scenarios/DynamicFanInOutTest.json | 56 ++ .../scenarios/DynamicFanInOutTestLegacy.json | 56 ++ .../scenarios/FanInOutNestedTest.json | 685 ++++++++++++++++++ .../integration/scenarios/FanInOutTest.json | 207 ++++++ .../integration/scenarios/FanInOutTest_2.json | 207 ++++++ .../integration/scenarios/RTOWF.json | 60 ++ .../scenarios/WorkflowWithSubWorkflow.json | 61 ++ .../concurrentWorkflowExecutions.json | 60 ++ .../integration/scenarios/empty_workflow.json | 14 + .../integration/scenarios/forkJoinNested.json | 628 ++++++++++++++++ .../integration/scenarios/junit_test_wf.json | 91 +++ .../integration/scenarios/junit_test_wf3.json | 60 ++ .../junit_test_wf_non_restartable.json | 60 ++ .../scenarios/junit_test_wf_sw.json | 60 ++ .../integration/scenarios/longRunningWf.json | 60 ++ .../integration/scenarios/retry.json | 60 ++ ...impleWorkflowFailureWithTerminalError.json | 60 ++ .../simpleWorkflowWithTaskSpecificDomain.json | 65 ++ .../simpleWorkflowWithTasksInOneDomain.json | 64 ++ .../scenarios/template_workflow.json | 75 ++ .../integration/scenarios/test_event.json | 101 +++ .../integration/scenarios/test_wait.json | 46 ++ .../integration/scenarios/timeout.json | 61 ++ 27 files changed, 3764 insertions(+) create mode 100644 test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF.json create mode 100644 test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF2.json create mode 100644 test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF3.json create mode 100644 test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF4.json create mode 100644 test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTest.json create mode 100644 test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTestLegacy.json create mode 100644 test-harness/src/test/resources/integration/scenarios/FanInOutNestedTest.json create mode 100644 test-harness/src/test/resources/integration/scenarios/FanInOutTest.json create mode 100644 test-harness/src/test/resources/integration/scenarios/FanInOutTest_2.json create mode 100644 test-harness/src/test/resources/integration/scenarios/RTOWF.json create mode 100644 test-harness/src/test/resources/integration/scenarios/WorkflowWithSubWorkflow.json create mode 100644 test-harness/src/test/resources/integration/scenarios/concurrentWorkflowExecutions.json create mode 100644 test-harness/src/test/resources/integration/scenarios/empty_workflow.json create mode 100644 test-harness/src/test/resources/integration/scenarios/forkJoinNested.json create mode 100644 test-harness/src/test/resources/integration/scenarios/junit_test_wf.json create mode 100644 test-harness/src/test/resources/integration/scenarios/junit_test_wf3.json create mode 100644 test-harness/src/test/resources/integration/scenarios/junit_test_wf_non_restartable.json create mode 100644 test-harness/src/test/resources/integration/scenarios/junit_test_wf_sw.json create mode 100644 test-harness/src/test/resources/integration/scenarios/longRunningWf.json create mode 100644 test-harness/src/test/resources/integration/scenarios/retry.json create mode 100644 test-harness/src/test/resources/integration/scenarios/simpleWorkflowFailureWithTerminalError.json create mode 100644 test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTaskSpecificDomain.json create mode 100644 test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTasksInOneDomain.json create mode 100644 test-harness/src/test/resources/integration/scenarios/template_workflow.json create mode 100644 test-harness/src/test/resources/integration/scenarios/test_event.json create mode 100644 test-harness/src/test/resources/integration/scenarios/test_wait.json create mode 100644 test-harness/src/test/resources/integration/scenarios/timeout.json diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF.json b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF.json new file mode 100644 index 0000000000..e05646d326 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF.json @@ -0,0 +1,178 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534990536238, + "updateTime": 1534990536390, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "xxx" + }, + "referenceTaskName": "conditional", + "retryCount": 0, + "seq": 1, + "correlationId": "testCaseStatements: 1534990536230", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1534990536269, + "startTime": 1534990536275, + "endTime": 1534990536389, + "updateTime": 1534990536389, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "8f65b5f2-da29-417f-a211-c86a9031d63a", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "xxx" + ] + }, + "workflowTask": { + "name": "conditional", + "taskReferenceName": "conditional", + "inputParameters": { + "case": "workflow.input.param1" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "nested": [ + { + "name": "conditional2", + "taskReferenceName": "conditional2", + "inputParameters": { + "case": "workflow.input.param2" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "two": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "startDelay": 0, + "optional": false + } + ], + "three": [ + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 6 + }, + { + "taskType": "junit_task_2", + "status": "SCHEDULED", + "inputData": { + "tp1": "xxx", + "tp3": "two" + }, + "referenceTaskName": "t2", + "retryCount": 0, + "seq": 2, + "correlationId": "testCaseStatements: 1534990536230", + "pollCount": 0, + "taskDefName": "junit_task_2", + "scheduledTime": 1534990536275, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990536275, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "5c00a173-d931-4f68-b3ba-c182265bc3db", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "xxx", + "param2": "two" + }, + "workflowType": "ConditionalTaskWF", + "version": 1, + "correlationId": "testCaseStatements: 1534990536230", + "schemaVersion": 1, + "startTime": 1534990536238 +} diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF2.json b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF2.json new file mode 100644 index 0000000000..ced7ddab93 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF2.json @@ -0,0 +1,259 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534999254315, + "updateTime": 1534999254327, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "nested" + }, + "referenceTaskName": "conditional", + "retryCount": 0, + "seq": 1, + "correlationId": "testCaseStatements: 1534999254108", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1534999254316, + "startTime": 1534999254317, + "endTime": 1534999254326, + "updateTime": 1534999254326, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "455c0446-edfb-492b-946d-89ec2fa34fe6", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "nested" + ] + }, + "workflowTask": { + "name": "conditional", + "taskReferenceName": "conditional", + "inputParameters": { + "case": "workflow.input.param1" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "nested": [ + { + "name": "conditional2", + "taskReferenceName": "conditional2", + "inputParameters": { + "case": "workflow.input.param2" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "two": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "startDelay": 0, + "optional": false + } + ], + "three": [ + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 1 + }, + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "one" + }, + "referenceTaskName": "conditional2", + "retryCount": 0, + "seq": 2, + "correlationId": "testCaseStatements: 1534999254108", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1534999254316, + "startTime": 1534999254318, + "endTime": 1534999254327, + "updateTime": 1534999254327, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "d8547bbd-39a2-415a-81bf-9783f563bdbe", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "one" + ] + }, + "workflowTask": { + "name": "conditional2", + "taskReferenceName": "conditional2", + "inputParameters": { + "case": "workflow.input.param2" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "two": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 2 + }, + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "nested", + "p2": "one" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 3, + "correlationId": "testCaseStatements: 1534999254108", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534999254317, + "startTime": 0, + "endTime": 0, + "updateTime": 1534999254317, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "64423d29-e6e4-4a69-a5f3-a64c62f552b0", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "nested", + "param2": "one" + }, + "workflowType": "ConditionalTaskWF", + "version": 1, + "correlationId": "testCaseStatements: 1534999254108", + "schemaVersion": 1, + "startTime": 1534999254315 +} diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF3.json b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF3.json new file mode 100644 index 0000000000..c636c9aac9 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF3.json @@ -0,0 +1,259 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534999254353, + "updateTime": 1534999254358, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "nested" + }, + "referenceTaskName": "conditional", + "retryCount": 0, + "seq": 1, + "correlationId": "testCaseStatements: 1534999254108", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1534999254354, + "startTime": 1534999254355, + "endTime": 1534999254357, + "updateTime": 1534999254357, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "5264732a-968b-4c93-a4b6-c6ad086dfd02", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "nested" + ] + }, + "workflowTask": { + "name": "conditional", + "taskReferenceName": "conditional", + "inputParameters": { + "case": "workflow.input.param1" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "nested": [ + { + "name": "conditional2", + "taskReferenceName": "conditional2", + "inputParameters": { + "case": "workflow.input.param2" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "two": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "startDelay": 0, + "optional": false + } + ], + "three": [ + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 1 + }, + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "two" + }, + "referenceTaskName": "conditional2", + "retryCount": 0, + "seq": 2, + "correlationId": "testCaseStatements: 1534999254108", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1534999254355, + "startTime": 1534999254355, + "endTime": 1534999254358, + "updateTime": 1534999254358, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "c42112d0-409b-4651-950e-4068e40d2f61", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "two" + ] + }, + "workflowTask": { + "name": "conditional2", + "taskReferenceName": "conditional2", + "inputParameters": { + "case": "workflow.input.param2" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "two": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 0 + }, + { + "taskType": "junit_task_2", + "status": "SCHEDULED", + "inputData": { + "tp1": "nested", + "tp3": "two" + }, + "referenceTaskName": "t2", + "retryCount": 0, + "seq": 3, + "correlationId": "testCaseStatements: 1534999254108", + "pollCount": 0, + "taskDefName": "junit_task_2", + "scheduledTime": 1534999254355, + "startTime": 0, + "endTime": 0, + "updateTime": 1534999254355, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "0df27bb5-0961-4ac5-8af8-28c3e49c3368", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "nested", + "param2": "two" + }, + "workflowType": "ConditionalTaskWF", + "version": 1, + "correlationId": "testCaseStatements: 1534999254108", + "schemaVersion": 1, + "startTime": 1534999254353 +} diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF4.json b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF4.json new file mode 100644 index 0000000000..b51ce9d085 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF4.json @@ -0,0 +1,171 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534999661844, + "updateTime": 1534999661847, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "DECISION", + "status": "COMPLETED", + "inputData": { + "hasChildren": "true", + "case": "three" + }, + "referenceTaskName": "conditional", + "retryCount": 0, + "seq": 1, + "correlationId": "testCaseStatements: 1534999661592", + "pollCount": 0, + "taskDefName": "DECISION", + "scheduledTime": 1534999661845, + "startTime": 1534999661845, + "endTime": 1534999661847, + "updateTime": 1534999661847, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "3a6fc7c8-07f2-4aba-86c8-3e09f0ea0cc6", + "callbackAfterSeconds": 0, + "outputData": { + "caseOutput": [ + "three" + ] + }, + "workflowTask": { + "name": "conditional", + "taskReferenceName": "conditional", + "inputParameters": { + "case": "workflow.input.param1" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "nested": [ + { + "name": "conditional2", + "taskReferenceName": "conditional2", + "inputParameters": { + "case": "workflow.input.param2" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "one": [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "two": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "startDelay": 0, + "optional": false + } + ], + "three": [ + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1", + "tp3": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 0 + }, + { + "taskType": "junit_task_3", + "status": "SCHEDULED", + "referenceTaskName": "t3", + "retryCount": 0, + "seq": 2, + "correlationId": "testCaseStatements: 1534999661592", + "pollCount": 0, + "taskDefName": "junit_task_3", + "scheduledTime": 1534999661845, + "startTime": 0, + "endTime": 0, + "updateTime": 1534999661845, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "ConditionalTaskWF", + "taskId": "0d9eaabb-23a2-4942-a130-65448f40d34d", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_3", + "taskReferenceName": "t3", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "finalCase": "notify", + "param1": "three", + "param2": "two" + }, + "workflowType": "ConditionalTaskWF", + "version": 1, + "correlationId": "testCaseStatements: 1534999661592", + "schemaVersion": 1, + "startTime": 1534999661844 +} diff --git a/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTest.json b/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTest.json new file mode 100644 index 0000000000..3d507dc716 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTest.json @@ -0,0 +1,56 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534990883111, + "updateTime": 1534990883246, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null + }, + "referenceTaskName": "dt1", + "retryCount": 0, + "seq": 1, + "correlationId": "dynfanouttest1", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534990883143, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990883143, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "DynamicFanInOutTest", + "taskId": "d2c21507-a553-465b-82ce-1eedc86f3d1d", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "dt1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "workflowType": "DynamicFanInOutTest", + "version": 1, + "correlationId": "dynfanouttest1", + "schemaVersion": 1, + "startTime": 1534990883111 +} diff --git a/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTestLegacy.json b/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTestLegacy.json new file mode 100644 index 0000000000..18ded7557d --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTestLegacy.json @@ -0,0 +1,56 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534985656537, + "updateTime": 1534985656670, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null + }, + "referenceTaskName": "dt1", + "retryCount": 0, + "seq": 1, + "correlationId": "dynfanouttest1", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534985656568, + "startTime": 0, + "endTime": 0, + "updateTime": 1534985656568, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "DynamicFanInOutTestLegacy", + "taskId": "e04365be-93c3-4e36-ad05-664e0e6a27c3", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "dt1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "workflowType": "DynamicFanInOutTestLegacy", + "version": 1, + "correlationId": "dynfanouttest1", + "schemaVersion": 1, + "startTime": 1534985656537 +} diff --git a/test-harness/src/test/resources/integration/scenarios/FanInOutNestedTest.json b/test-harness/src/test/resources/integration/scenarios/FanInOutNestedTest.json new file mode 100644 index 0000000000..f06396a4dc --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/FanInOutNestedTest.json @@ -0,0 +1,685 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534984145445, + "updateTime": 1534984145695, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "FORK", + "status": "COMPLETED", + "referenceTaskName": "fork1", + "retryCount": 0, + "seq": 1, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "FORK", + "scheduledTime": 1534984145532, + "startTime": 1534984145540, + "endTime": 1534984145687, + "updateTime": 1534984145687, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "8e4c840b-f7ab-48e6-a882-a0fa5611223b", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "fork1", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_11", + "taskReferenceName": "t11", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "fork2", + "taskReferenceName": "fork2", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_14", + "taskReferenceName": "t14", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "junit_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "Decision", + "taskReferenceName": "d1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "a": [ + { + "name": "junit_task_16", + "taskReferenceName": "t16", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_19", + "taskReferenceName": "t19", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "b": [ + { + "name": "junit_task_17", + "taskReferenceName": "t17", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_18", + "taskReferenceName": "t18", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + { + "taskReferenceName": "join2", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t14", + "t20" + ], + "optional": false + } + ], + [ + { + "taskReferenceName": "sw1", + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "junit_test_wf" + }, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 8 + }, + { + "taskType": "junit_task_11", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null, + "case": "a" + }, + "referenceTaskName": "t11", + "retryCount": 0, + "seq": 2, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "junit_task_11", + "scheduledTime": 1534984145537, + "startTime": 0, + "endTime": 0, + "updateTime": 1534984145537, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "dbce7428-23c8-4b02-a7e4-cf2f1629c44f", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_11", + "taskReferenceName": "t11", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "FORK", + "status": "COMPLETED", + "referenceTaskName": "fork2", + "retryCount": 0, + "seq": 3, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "FORK", + "scheduledTime": 1534984145538, + "startTime": 1534984145541, + "endTime": 1534984145688, + "updateTime": 1534984145688, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "49aea6d0-0073-404a-b8a5-f49ea5d51b63", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "fork2", + "taskReferenceName": "fork2", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_14", + "taskReferenceName": "t14", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "junit_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "Decision", + "taskReferenceName": "d1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "a": [ + { + "name": "junit_task_16", + "taskReferenceName": "t16", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_19", + "taskReferenceName": "t19", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "b": [ + { + "name": "junit_task_17", + "taskReferenceName": "t17", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_18", + "taskReferenceName": "t18", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 3 + }, + { + "taskType": "junit_task_12", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null, + "case": "a" + }, + "referenceTaskName": "t12", + "retryCount": 0, + "seq": 4, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "junit_task_12", + "scheduledTime": 1534984145538, + "startTime": 0, + "endTime": 0, + "updateTime": 1534984145538, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "cdef48f5-39d5-4341-85f2-b72984f1dd46", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "junit_task_13", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null, + "case": "a" + }, + "referenceTaskName": "t13", + "retryCount": 0, + "seq": 5, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "junit_task_13", + "scheduledTime": 1534984145539, + "startTime": 0, + "endTime": 0, + "updateTime": 1534984145539, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "f304e4ec-effd-46e5-a2f3-854afe2b4fc6", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "SUB_WORKFLOW", + "status": "IN_PROGRESS", + "inputData": { + "workflowInput": {}, + "subWorkflowId": "a1fd729d-ad38-4159-a788-4bd07f60e911", + "subWorkflowName": "junit_test_wf", + "subWorkflowVersion": 1 + }, + "referenceTaskName": "sw1", + "retryCount": 0, + "seq": 6, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "SUB_WORKFLOW", + "scheduledTime": 1534984145539, + "startTime": 1534984145541, + "endTime": 1534984145527, + "updateTime": 1534984145669, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "bc62166d-1fb2-4cc9-9d94-52da4d88bd19", + "callbackAfterSeconds": 0, + "outputData": { + "subWorkflowId": "a1fd729d-ad38-4159-a788-4bd07f60e911" + }, + "workflowTask": { + "taskReferenceName": "sw1", + "type": "SUB_WORKFLOW", + "startDelay": 0, + "subWorkflowParam": { + "name": "junit_test_wf" + }, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 2 + }, + { + "taskType": "JOIN", + "status": "IN_PROGRESS", + "inputData": { + "joinOn": [ + "t11", + "join2", + "sw1" + ] + }, + "referenceTaskName": "join1", + "retryCount": 0, + "seq": 7, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "JOIN", + "scheduledTime": 1534984145686, + "startTime": 1534984145687, + "endTime": 1534984145685, + "updateTime": 1534984145687, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "3dccf1fe-4653-4e04-b7a5-f21a2a723da8", + "callbackAfterSeconds": 0, + "outputData": { + "t11": {} + }, + "workflowTask": { + "taskReferenceName": "join1", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t11", + "join2", + "sw1" + ], + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 1 + }, + { + "taskType": "JOIN", + "status": "IN_PROGRESS", + "inputData": { + "joinOn": [ + "t14", + "t20" + ] + }, + "referenceTaskName": "join2", + "retryCount": 0, + "seq": 8, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "JOIN", + "scheduledTime": 1534984145687, + "startTime": 1534984145687, + "endTime": 1534984145686, + "updateTime": 1534984145687, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "b371077b-b382-4e19-80b8-6d6540752793", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "join2", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t14", + "t20" + ], + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 0 + } + ], + "input": { + "case": "a" + }, + "workflowType": "FanInOutNestedTest", + "version": 1, + "correlationId": "fork_join_nested_test", + "schemaVersion": 1, + "startTime": 1534984145445 +} diff --git a/test-harness/src/test/resources/integration/scenarios/FanInOutTest.json b/test-harness/src/test/resources/integration/scenarios/FanInOutTest.json new file mode 100644 index 0000000000..952f375daf --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/FanInOutTest.json @@ -0,0 +1,207 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534983505157, + "updateTime": 1534983505166, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "FORK", + "status": "COMPLETED", + "referenceTaskName": "fanouttask", + "retryCount": 0, + "seq": 1, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "FORK", + "scheduledTime": 1534983505159, + "startTime": 1534983505161, + "endTime": 1534983505164, + "updateTime": 1534983505164, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest", + "taskId": "aa40aee3-4e9d-4cbc-8298-37d39883fec5", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "fanouttask", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_3", + "taskReferenceName": "t3", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 2 + }, + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 2, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534983505160, + "startTime": 0, + "endTime": 0, + "updateTime": 1534983505160, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest", + "taskId": "feb6c6c2-0974-4e78-b4df-df4389c46aea", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "junit_task_2", + "status": "SCHEDULED", + "inputData": { + "tp1": null + }, + "referenceTaskName": "t2", + "retryCount": 0, + "seq": 3, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "junit_task_2", + "scheduledTime": 1534983505160, + "startTime": 0, + "endTime": 0, + "updateTime": 1534983505160, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest", + "taskId": "36ee8037-e378-4649-92ca-3655c4e2ba75", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "JOIN", + "status": "IN_PROGRESS", + "inputData": { + "joinOn": [ + "t3", + "t2" + ] + }, + "referenceTaskName": "fanouttask_join", + "retryCount": 0, + "seq": 4, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "JOIN", + "scheduledTime": 1534983505164, + "startTime": 1534983505164, + "endTime": 1534983505163, + "updateTime": 1534983505164, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest", + "taskId": "e681e9ce-b902-4931-9759-3ec5df88ddd0", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "fanouttask_join", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t3", + "t2" + ], + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 0 + } + ], + "workflowType": "FanInOutTest", + "version": 1, + "correlationId": "fanouttest", + "schemaVersion": 1, + "startTime": 1534983505157 +} diff --git a/test-harness/src/test/resources/integration/scenarios/FanInOutTest_2.json b/test-harness/src/test/resources/integration/scenarios/FanInOutTest_2.json new file mode 100644 index 0000000000..992fb60820 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/FanInOutTest_2.json @@ -0,0 +1,207 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534990432158, + "updateTime": 1534990432327, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "FORK", + "status": "COMPLETED", + "referenceTaskName": "fanouttask", + "retryCount": 0, + "seq": 1, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "FORK", + "scheduledTime": 1534990432193, + "startTime": 1534990432201, + "endTime": 1534990432325, + "updateTime": 1534990432325, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest_2", + "taskId": "1a7aa928-4159-447a-b9a8-2722a1da1031", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "fanouttask", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_0_RT_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_0_RT_3", + "taskReferenceName": "t3", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "junit_task_0_RT_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 8 + }, + { + "taskType": "junit_task_0_RT_1", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 2, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "junit_task_0_RT_1", + "scheduledTime": 1534990432200, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990432200, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest_2", + "taskId": "7933d420-53df-450f-bf38-950c90835b0e", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_0_RT_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "junit_task_0_RT_2", + "status": "SCHEDULED", + "inputData": { + "tp1": null + }, + "referenceTaskName": "t2", + "retryCount": 0, + "seq": 3, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "junit_task_0_RT_2", + "scheduledTime": 1534990432200, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990432200, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest_2", + "taskId": "9028683d-6604-49a8-b803-e1d9f924c474", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_0_RT_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "workflow.input.param1" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "JOIN", + "status": "IN_PROGRESS", + "inputData": { + "joinOn": [ + "t3", + "t2" + ] + }, + "referenceTaskName": "fanouttask_join", + "retryCount": 0, + "seq": 4, + "correlationId": "fanouttest", + "pollCount": 0, + "taskDefName": "JOIN", + "scheduledTime": 1534990432324, + "startTime": 1534990432325, + "endTime": 1534990432323, + "updateTime": 1534990432325, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutTest_2", + "taskId": "94140a15-875a-4f39-9573-3e2f6960f557", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "fanouttask_join", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t3", + "t2" + ], + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 1 + } + ], + "workflowType": "FanInOutTest_2", + "version": 1, + "correlationId": "fanouttest", + "schemaVersion": 1, + "startTime": 1534990432158 +} diff --git a/test-harness/src/test/resources/integration/scenarios/RTOWF.json b/test-harness/src/test/resources/integration/scenarios/RTOWF.json new file mode 100644 index 0000000000..0c7e5e1082 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/RTOWF.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534986389370, + "updateTime": 1534986389594, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "task_rt", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "task_rt_t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_1", + "pollCount": 0, + "taskDefName": "task_rt", + "scheduledTime": 1534986389470, + "startTime": 0, + "endTime": 0, + "updateTime": 1534986389470, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 10, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "RTOWF", + "taskId": "225c5461-b21b-4934-8b09-65c24a3daeda", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "task_rt", + "taskReferenceName": "task_rt_t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "RTOWF", + "version": 1, + "correlationId": "unit_test_1", + "schemaVersion": 2, + "startTime": 1534986389370 +} diff --git a/test-harness/src/test/resources/integration/scenarios/WorkflowWithSubWorkflow.json b/test-harness/src/test/resources/integration/scenarios/WorkflowWithSubWorkflow.json new file mode 100644 index 0000000000..09af345b62 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/WorkflowWithSubWorkflow.json @@ -0,0 +1,61 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534989930597, + "updateTime": 1534989930779, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_5", + "status": "SCHEDULED", + "inputData": { + "p1": "param 1 value", + "p2": null + }, + "referenceTaskName": "a1", + "retryCount": 0, + "seq": 1, + "correlationId": "test", + "pollCount": 0, + "taskDefName": "junit_task_5", + "scheduledTime": 1534989930669, + "startTime": 0, + "endTime": 0, + "updateTime": 1534989930669, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "WorkflowWithSubWorkflow", + "taskId": "9d704f3e-3814-4e60-8592-a4e2aec81e50", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_5", + "taskReferenceName": "a1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "wfName": "junit_test_wf", + "param3": "param 2 value", + "param1": "param 1 value" + }, + "workflowType": "WorkflowWithSubWorkflow", + "version": 1, + "correlationId": "test", + "schemaVersion": 2, + "startTime": 1534989930597 +} diff --git a/test-harness/src/test/resources/integration/scenarios/concurrentWorkflowExecutions.json b/test-harness/src/test/resources/integration/scenarios/concurrentWorkflowExecutions.json new file mode 100644 index 0000000000..bf14efed98 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/concurrentWorkflowExecutions.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535058398369, + "updateTime": 1535058398372, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_concurrrent", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1535058398371, + "startTime": 0, + "endTime": 0, + "updateTime": 1535058398371, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf", + "taskId": "39f2ccac-e449-4eab-87ba-cb85ba817a3c", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf", + "version": 1, + "correlationId": "unit_test_concurrrent", + "schemaVersion": 2, + "startTime": 1535058398369 +} diff --git a/test-harness/src/test/resources/integration/scenarios/empty_workflow.json b/test-harness/src/test/resources/integration/scenarios/empty_workflow.json new file mode 100644 index 0000000000..ab11ffa4b1 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/empty_workflow.json @@ -0,0 +1,14 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534983504822, + "updateTime": 1534983504841, + "status": "COMPLETED", + "endTime": 1534983504841, + "workflowId": "WORKFLOW_INSTANCE_ID", + "workflowType": "empty_workflow", + "version": 1, + "correlationId": "testWorkflowWithNoTasks", + "reasonForIncompletion": "No tasks found to be executed", + "schemaVersion": 2, + "startTime": 1534983504822 +} diff --git a/test-harness/src/test/resources/integration/scenarios/forkJoinNested.json b/test-harness/src/test/resources/integration/scenarios/forkJoinNested.json new file mode 100644 index 0000000000..4248674eaf --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/forkJoinNested.json @@ -0,0 +1,628 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535070652209, + "updateTime": 1535070652424, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "FORK", + "status": "COMPLETED", + "referenceTaskName": "fork1", + "retryCount": 0, + "seq": 1, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "FORK", + "scheduledTime": 1535070652273, + "startTime": 1535070652282, + "endTime": 1535070652416, + "updateTime": 1535070652416, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "7154b7b6-cf9e-45b4-a81f-f9faa349845c", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "fork1", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_11", + "taskReferenceName": "t11", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "fork2", + "taskReferenceName": "fork2", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_14", + "taskReferenceName": "t14", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "junit_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "Decision", + "taskReferenceName": "d1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "a": [ + { + "name": "junit_task_16", + "taskReferenceName": "t16", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_19", + "taskReferenceName": "t19", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "b": [ + { + "name": "junit_task_17", + "taskReferenceName": "t17", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_18", + "taskReferenceName": "t18", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + { + "taskReferenceName": "join2", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t14", + "t20" + ], + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 9 + }, + { + "taskType": "junit_task_11", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null, + "case": "a" + }, + "referenceTaskName": "t11", + "retryCount": 0, + "seq": 2, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "junit_task_11", + "scheduledTime": 1535070652279, + "startTime": 0, + "endTime": 0, + "updateTime": 1535070652279, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "f56e4de5-1331-478b-bee5-055703cd47c8", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_11", + "taskReferenceName": "t11", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "FORK", + "status": "COMPLETED", + "referenceTaskName": "fork2", + "retryCount": 0, + "seq": 3, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "FORK", + "scheduledTime": 1535070652280, + "startTime": 1535070652283, + "endTime": 1535070652417, + "updateTime": 1535070652417, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "dada5cfc-e8ab-4b14-97d6-5115af8fdb4c", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "fork2", + "taskReferenceName": "fork2", + "type": "FORK_JOIN", + "forkTasks": [ + [ + { + "name": "junit_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_14", + "taskReferenceName": "t14", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + [ + { + "name": "junit_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "Decision", + "taskReferenceName": "d1", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "DECISION", + "caseValueParam": "case", + "decisionCases": { + "a": [ + { + "name": "junit_task_16", + "taskReferenceName": "t16", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_19", + "taskReferenceName": "t19", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "b": [ + { + "name": "junit_task_17", + "taskReferenceName": "t17", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ] + }, + "defaultCase": [ + { + "name": "junit_task_18", + "taskReferenceName": "t18", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + { + "name": "junit_task_20", + "taskReferenceName": "t20", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + } + ], + "startDelay": 0, + "optional": false + } + ] + ], + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 3 + }, + { + "taskType": "junit_task_12", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null, + "case": "a" + }, + "referenceTaskName": "t12", + "retryCount": 0, + "seq": 4, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "junit_task_12", + "scheduledTime": 1535070652280, + "startTime": 0, + "endTime": 0, + "updateTime": 1535070652280, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "72262faf-8ce6-4d2d-9bff-d7c4a549db54", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_12", + "taskReferenceName": "t12", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "junit_task_13", + "status": "SCHEDULED", + "inputData": { + "p1": null, + "p2": null, + "case": "a" + }, + "referenceTaskName": "t13", + "retryCount": 0, + "seq": 5, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "junit_task_13", + "scheduledTime": 1535070652280, + "startTime": 0, + "endTime": 0, + "updateTime": 1535070652281, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "53163269-212d-4670-9586-8cd9dfd096db", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_13", + "taskReferenceName": "t13", + "inputParameters": { + "p1": "workflow.input.param1", + "p2": "workflow.input.param2", + "case": "workflow.input.case" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + }, + { + "taskType": "JOIN", + "status": "IN_PROGRESS", + "inputData": { + "joinOn": [ + "t11", + "join2" + ] + }, + "referenceTaskName": "join1", + "retryCount": 0, + "seq": 7, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "JOIN", + "scheduledTime": 1535070652415, + "startTime": 1535070652416, + "endTime": 1535070652415, + "updateTime": 1535070652416, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "d39443cc-d531-4612-9eb0-f1b4c2c1aa29", + "callbackAfterSeconds": 0, + "outputData": { + "t11": {} + }, + "workflowTask": { + "taskReferenceName": "join1", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t11", + "join2" + ], + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 1 + }, + { + "taskType": "JOIN", + "status": "IN_PROGRESS", + "inputData": { + "joinOn": [ + "t14", + "t20" + ] + }, + "referenceTaskName": "join2", + "retryCount": 0, + "seq": 8, + "correlationId": "fork_join_nested_test", + "pollCount": 0, + "taskDefName": "JOIN", + "scheduledTime": 1535070652416, + "startTime": 1535070652416, + "endTime": 1535070652415, + "updateTime": 1535070652416, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "FanInOutNestedTest", + "taskId": "7696c0e4-5112-497f-a61f-6a24317a4d8a", + "callbackAfterSeconds": 0, + "workflowTask": { + "taskReferenceName": "join2", + "type": "JOIN", + "startDelay": 0, + "joinOn": [ + "t14", + "t20" + ], + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 0 + } + ], + "input": { + "case": "a" + }, + "workflowType": "FanInOutNestedTest", + "version": 1, + "correlationId": "fork_join_nested_test", + "schemaVersion": 1, + "startTime": 1535070652209 +} diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf.json b/test-harness/src/test/resources/integration/scenarios/junit_test_wf.json new file mode 100644 index 0000000000..87390f515b --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/junit_test_wf.json @@ -0,0 +1,91 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534901322631, + "updateTime": 1534901322800, + "createdBy": null, + "updatedBy": null, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "parentWorkflowId": null, + "parentWorkflowTaskId": null, + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_126654816802294", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534901322699, + "startTime": 0, + "endTime": 0, + "updateTime": 1534901322699, + "startDelayInSeconds": 0, + "retriedTaskId": null, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf", + "taskId": "7456b2cb-a9dc-4a35-8b52-4c9e914d3073", + "reasonForIncompletion": null, + "callbackAfterSeconds": 0, + "workerId": null, + "outputData": {}, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "description": null, + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "dynamicTaskNameParam": null, + "caseValueParam": null, + "caseExpression": null, + "decisionCases": {}, + "dynamicForkJoinTasksParam": null, + "dynamicForkTasksParam": null, + "dynamicForkTasksInputParamName": null, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": null, + "joinOn": [], + "sink": null, + "optional": false, + "rateLimited": null + }, + "domain": null, + "inputMessage": null, + "outputMessage": null, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "output": {}, + "workflowType": "junit_test_wf", + "version": 1, + "correlationId": "unit_test_126654816802294", + "reRunFromWorkflowId": null, + "reasonForIncompletion": null, + "schemaVersion": 2, + "event": null, + "taskToDomain": {}, + "failedReferenceTaskNames": [], + "startTime": 1534901322631 +} \ No newline at end of file diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf3.json b/test-harness/src/test/resources/integration/scenarios/junit_test_wf3.json new file mode 100644 index 0000000000..2d1f2cdca0 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/junit_test_wf3.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534990090639, + "updateTime": 1534990090810, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_122addfc0-354a-4e99-b756-e34d72ea1f97", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534990090709, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990090709, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf3", + "taskId": "7f53def1-835a-4aa5-ae9a-7b6052e657b1", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf3", + "version": 1, + "correlationId": "unit_test_122addfc0-354a-4e99-b756-e34d72ea1f97", + "schemaVersion": 2, + "startTime": 1534990090639 +} diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf_non_restartable.json b/test-harness/src/test/resources/integration/scenarios/junit_test_wf_non_restartable.json new file mode 100644 index 0000000000..48d70f9c2b --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/junit_test_wf_non_restartable.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534990253433, + "updateTime": 1534990253667, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_180b692c7-691a-4edb-a0a6-8b65cc76f9a0", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534990253540, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990253540, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf_non_restartable", + "taskId": "37fc37f7-729c-4b22-9b8c-a23d68149ed2", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf_non_restartable", + "version": 1, + "correlationId": "unit_test_180b692c7-691a-4edb-a0a6-8b65cc76f9a0", + "schemaVersion": 2, + "startTime": 1534990253433 +} diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf_sw.json b/test-harness/src/test/resources/integration/scenarios/junit_test_wf_sw.json new file mode 100644 index 0000000000..cc2627adde --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/junit_test_wf_sw.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534986608565, + "updateTime": 1534986608739, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_3", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_sw", + "pollCount": 0, + "taskDefName": "junit_task_3", + "scheduledTime": 1534986608637, + "startTime": 0, + "endTime": 0, + "updateTime": 1534986608637, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf_sw", + "taskId": "6ecc0e58-adb3-408b-b815-0762eb572488", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_3", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf_sw", + "version": 1, + "correlationId": "unit_test_sw", + "schemaVersion": 2, + "startTime": 1534986608565 +} diff --git a/test-harness/src/test/resources/integration/scenarios/longRunningWf.json b/test-harness/src/test/resources/integration/scenarios/longRunningWf.json new file mode 100644 index 0000000000..08a65eb266 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/longRunningWf.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534990739242, + "updateTime": 1534990739413, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_1", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534990739311, + "startTime": 0, + "endTime": 0, + "updateTime": 1534990739311, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "longRunningWf", + "taskId": "0fefb562-dbef-4cec-bec8-a5e72be5cfef", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "longRunningWf", + "version": 1, + "correlationId": "unit_test_1", + "schemaVersion": 2, + "startTime": 1534990739242 +} diff --git a/test-harness/src/test/resources/integration/scenarios/retry.json b/test-harness/src/test/resources/integration/scenarios/retry.json new file mode 100644 index 0000000000..34f01eab2f --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/retry.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535063868376, + "updateTime": 1535063868556, + "status": "RUNNING", + "endTime": 0, + "workflowId": "045ec0b9-5f79-446b-9832-82918ec047aa", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_1d2488fa6-a508-49d5-9a88-82335a7d43fa", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1535063868452, + "startTime": 0, + "endTime": 0, + "updateTime": 1535063868452, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "045ec0b9-5f79-446b-9832-82918ec047aa", + "workflowType": "junit_test_wf", + "taskId": "80362582-3e22-4b5d-8635-6cc9c2824adf", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf", + "version": 1, + "correlationId": "unit_test_1d2488fa6-a508-49d5-9a88-82335a7d43fa", + "schemaVersion": 2, + "startTime": 1535063868376 +} diff --git a/test-harness/src/test/resources/integration/scenarios/simpleWorkflowFailureWithTerminalError.json b/test-harness/src/test/resources/integration/scenarios/simpleWorkflowFailureWithTerminalError.json new file mode 100644 index 0000000000..b88a711691 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/simpleWorkflowFailureWithTerminalError.json @@ -0,0 +1,60 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535068496317, + "updateTime": 1535068496495, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_1", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1535068496389, + "startTime": 0, + "endTime": 0, + "updateTime": 1535068496389, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf", + "taskId": "94c952ad-10c8-4b04-a236-586f57f44a0a", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "queueWaitTime": 0, + "taskStatus": "SCHEDULED" + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf", + "version": 1, + "correlationId": "unit_test_1", + "schemaVersion": 2, + "startTime": 1535068496317 +} diff --git a/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTaskSpecificDomain.json b/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTaskSpecificDomain.json new file mode 100644 index 0000000000..d0ccd59237 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTaskSpecificDomain.json @@ -0,0 +1,65 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535067958883, + "updateTime": 1535067959010, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_3", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_sw", + "pollCount": 0, + "taskDefName": "junit_task_3", + "scheduledTime": 1535067959000, + "startTime": 0, + "endTime": 0, + "updateTime": 1535067959000, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf_sw", + "taskId": "3ebec7c7-ba78-4983-85d4-3081886cd7f7", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_3", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "domain": "domain1", + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf_sw", + "version": 1, + "correlationId": "unit_test_sw", + "schemaVersion": 2, + "taskToDomain": { + "junit_task_3": "domain1", + "junit_task_2": "domain1" + }, + "startTime": 1535067958883 +} diff --git a/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTasksInOneDomain.json b/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTasksInOneDomain.json new file mode 100644 index 0000000000..26180163f6 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTasksInOneDomain.json @@ -0,0 +1,64 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535067060792, + "updateTime": 1535067060906, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_3", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_sw", + "pollCount": 0, + "taskDefName": "junit_task_3", + "scheduledTime": 1535067060896, + "startTime": 0, + "endTime": 0, + "updateTime": 1535067060896, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf_sw", + "taskId": "06e596e1-dc12-4bd0-937c-dfde77bacbcf", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_3", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "domain": "domain11", + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf_sw", + "version": 1, + "correlationId": "unit_test_sw", + "schemaVersion": 2, + "taskToDomain": { + "*": "domain11,, domain12" + }, + "startTime": 1535067060792 +} diff --git a/test-harness/src/test/resources/integration/scenarios/template_workflow.json b/test-harness/src/test/resources/integration/scenarios/template_workflow.json new file mode 100644 index 0000000000..0a06e7bd0f --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/template_workflow.json @@ -0,0 +1,75 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534983505050, + "updateTime": 1534983505131, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "templated_task", + "status": "SCHEDULED", + "inputData": { + "http_request": { + "method": "GET", + "vipStack": "test_stack", + "body": { + "requestDetails": { + "key1": "value1", + "key2": 42 + }, + "outputPath": "s3://bucket/outputPath", + "inputPaths": [ + "file://path1", + "file://path2" + ] + }, + "uri": "/get/something" + } + }, + "referenceTaskName": "t0", + "retryCount": 0, + "seq": 1, + "correlationId": "testTaskDefTemplate", + "pollCount": 0, + "taskDefName": "templated_task", + "scheduledTime": 1534983505121, + "startTime": 0, + "endTime": 0, + "updateTime": 1534983505121, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "template_workflow", + "taskId": "9dea4567-0240-4eab-bde8-99f4535ea3fc", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "templated_task", + "taskReferenceName": "t0", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "path1": "file://path1", + "path2": "file://path2", + "requestDetails": { + "key1": "value1", + "key2": 42 + }, + "outputPath": "s3://bucket/outputPath" + }, + "workflowType": "template_workflow", + "version": 1, + "correlationId": "testTaskDefTemplate", + "schemaVersion": 2, + "startTime": 1534983505050 +} diff --git a/test-harness/src/test/resources/integration/scenarios/test_event.json b/test-harness/src/test/resources/integration/scenarios/test_event.json new file mode 100644 index 0000000000..26b0730e7f --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/test_event.json @@ -0,0 +1,101 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534988786979, + "updateTime": 1534988787169, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "EVENT", + "status": "COMPLETED", + "inputData": { + "sink": "conductor" + }, + "referenceTaskName": "wait0", + "retryCount": 0, + "seq": 1, + "pollCount": 0, + "taskDefName": "eventX", + "scheduledTime": 1534988787043, + "startTime": 1534988787051, + "endTime": 1534988787169, + "updateTime": 1534988787169, + "startDelayInSeconds": 0, + "retried": false, + "executed": true, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "test_event", + "taskId": "4f39b091-03c5-469c-970f-1c2addec1571", + "callbackAfterSeconds": 0, + "outputData": { + "event_produced": "conductor:test_event:wait0", + "sink": "conductor", + "workflowType": "test_event", + "correlationId": null, + "workflowVersion": 1, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID" + }, + "workflowTask": { + "name": "eventX", + "taskReferenceName": "wait0", + "inputParameters": { + "sink": "conductor" + }, + "type": "EVENT", + "startDelay": 0, + "sink": "conductor", + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "COMPLETED", + "queueWaitTime": 8 + }, + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 2, + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1534988787163, + "startTime": 0, + "endTime": 0, + "updateTime": 1534988787163, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "test_event", + "taskId": "2d482701-9cae-4576-ab2c-91e096b5ed1c", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "output": { + "event_produced": "conductor:test_event:wait0", + "sink": "conductor", + "workflowType": "test_event", + "correlationId": null, + "workflowVersion": 1, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID" + }, + "workflowType": "test_event", + "version": 1, + "schemaVersion": 2, + "startTime": 1534988786979 +} diff --git a/test-harness/src/test/resources/integration/scenarios/test_wait.json b/test-harness/src/test/resources/integration/scenarios/test_wait.json new file mode 100644 index 0000000000..9a74441969 --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/test_wait.json @@ -0,0 +1,46 @@ +{ + "ownerApp": "junit_app", + "createTime": 1534988907130, + "updateTime": 1534988907297, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "WAIT", + "status": "IN_PROGRESS", + "referenceTaskName": "wait0", + "retryCount": 0, + "seq": 1, + "pollCount": 0, + "taskDefName": "wait", + "scheduledTime": 1534988907192, + "startTime": 1534988907200, + "endTime": 1534988907186, + "updateTime": 1534988907200, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 0, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "test_wait", + "taskId": "22052711-1039-4888-935a-5d0efa8b5bbf", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "wait", + "taskReferenceName": "wait0", + "type": "WAIT", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "IN_PROGRESS", + "queueWaitTime": 8 + } + ], + "workflowType": "test_wait", + "version": 1, + "schemaVersion": 2, + "startTime": 1534988907130 +} diff --git a/test-harness/src/test/resources/integration/scenarios/timeout.json b/test-harness/src/test/resources/integration/scenarios/timeout.json new file mode 100644 index 0000000000..66298f0c9b --- /dev/null +++ b/test-harness/src/test/resources/integration/scenarios/timeout.json @@ -0,0 +1,61 @@ +{ + "ownerApp": "junit_app", + "createTime": 1535068128051, + "updateTime": 1535068128223, + "status": "RUNNING", + "endTime": 0, + "workflowId": "WORKFLOW_INSTANCE_ID", + "tasks": [ + { + "taskType": "junit_task_1", + "status": "SCHEDULED", + "inputData": { + "p1": "p1 value", + "p2": "p2 value" + }, + "referenceTaskName": "t1", + "retryCount": 0, + "seq": 1, + "correlationId": "unit_test_175105d0e-55a2-4bb0-bbf7-06e119de34fe", + "pollCount": 0, + "taskDefName": "junit_task_1", + "scheduledTime": 1535068128120, + "startTime": 0, + "endTime": 0, + "updateTime": 1535068128120, + "startDelayInSeconds": 0, + "retried": false, + "executed": false, + "callbackFromWorker": true, + "responseTimeoutSeconds": 3600, + "workflowInstanceId": "WORKFLOW_INSTANCE_ID", + "workflowType": "junit_test_wf", + "taskId": "9b9c93d2-fe29-4a75-8771-d8e71f3cde84", + "callbackAfterSeconds": 0, + "workflowTask": { + "name": "junit_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}" + }, + "type": "SIMPLE", + "startDelay": 0, + "optional": false + }, + "rateLimitPerSecond": 0, + "taskStatus": "SCHEDULED", + "queueWaitTime": 0 + } + ], + "input": { + "failureWfName": "FanInOutTest", + "param1": "p1 value", + "param2": "p2 value" + }, + "workflowType": "junit_test_wf", + "version": 1, + "correlationId": "unit_test_175105d0e-55a2-4bb0-bbf7-06e119de34fe", + "schemaVersion": 2, + "startTime": 1535068128051 +} From a657c98dcd8824a63f132fc32e767b6f416c8567 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 23 Aug 2018 22:40:36 -0700 Subject: [PATCH 136/163] Conductor changes in order to be backwards compatible with already running workflows - Extra checks for metadata population at WorkflowExecutor and ExecutionService - Added battery of tests --- .../conductor/common/run/Workflow.java | 18 +- .../core/execution/WorkflowExecutor.java | 20 +- .../core/metadata/MetadataMapperService.java | 76 +- .../conductor/service/ExecutionService.java | 12 +- .../AbstractWorkflowServiceTest.java | 4322 +++++++++++++++++ .../WorkflowLegacyMigrationTest.java | 114 + .../integration/WorkflowServiceTest.java | 4072 +--------------- 7 files changed, 4542 insertions(+), 4092 deletions(-) create mode 100644 test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java create mode 100644 test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index f58a5049df..381221f751 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -27,6 +27,7 @@ @ProtoMessage public class Workflow extends Auditable { + @ProtoEnum public enum WorkflowStatus { RUNNING(false, false), COMPLETED(true, true), FAILED(true, false), TIMED_OUT(true, false), TERMINATED(true, false), PAUSED(false, true); @@ -317,7 +318,10 @@ public void setParentWorkflowTaskId(String parentWorkflowTaskId) { * @return the schemaVersion Version of the schema for the workflow definition */ public int getSchemaVersion() { - return getWorkflowDefinition().getSchemaVersion(); + return getWorkflowDefinition() != null ? + getWorkflowDefinition().getSchemaVersion() : + schemaVersion + ; } /** @@ -359,19 +363,23 @@ public void setWorkflowDefinition(WorkflowDef workflowDefinition) { } /** - * Conveience method for accessing the workflow definition name. + * Convenience method for accessing the workflow definition name. * @return the workflow definition name. */ public String getWorkflowName() { - return getWorkflowDefinition().getName(); + return getWorkflowDefinition() != null ? + getWorkflowDefinition().getName() : + workflowType; } /** - * Conveience method for accessing the workflow definition version. + * Convenience method for accessing the workflow definition version. * @return the workflow definition version. */ public int getWorkflowVersion() { - return getWorkflowDefinition().getVersion(); + return getWorkflowDefinition() != null ? + getWorkflowDefinition().getVersion() : + version; } public Task getTaskByRefName(String refName) { diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index c99239b95c..bbfe8263fe 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -54,7 +54,6 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -224,20 +223,7 @@ public String startWorkflow( String event, Map taskToDomain ) { - - Optional potentialDef = - version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); - - //Check if the workflow definition is valid - WorkflowDef workflowDefinition = potentialDef - .orElseThrow(() -> { - logger.error("There is no workflow defined with name {} and version {}", name, version); - return new ApplicationException( - Code.NOT_FOUND, - String.format("No such workflow defined. name=%s, version=%s", name, version) - ); - } - ); + WorkflowDef workflowDefinition = metadataMapperService.lookupForWorkflowDefinition(name, version); return startWorkflow( workflowDefinition, @@ -620,6 +606,7 @@ public void updateTask(TaskResult taskResult) { String workflowId = taskResult.getWorkflowInstanceId(); Workflow workflowInstance = executionDAO.getWorkflow(workflowId); + metadataMapperService.populateWorkflowWithDefinitions(workflowInstance); Task task = executionDAO.getTask(taskResult.getTaskId()); logger.debug("Task: {} belonging to Workflow {} being updated", task, workflowInstance); @@ -758,7 +745,7 @@ public boolean decide(String workflowId) { //If it is a new workflow the tasks will be still empty even though include tasks is true Workflow workflow = executionDAO.getWorkflow(workflowId, true); - // FIXME: The workflow could be null. + metadataMapperService.populateWorkflowWithDefinitions(workflow); try { DeciderOutcome outcome = deciderService.decide(workflow); @@ -862,6 +849,7 @@ public void resumeWorkflow(String workflowId) { public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) { Workflow wf = executionDAO.getWorkflow(workflowId, true); + metadataMapperService.populateWorkflowWithDefinitions(wf); // If the wf is not running then cannot skip any task if (!wf.getStatus().equals(WorkflowStatus.RUNNING)) { diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index a0210ccad0..b8eaacb85a 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -1,22 +1,30 @@ package com.netflix.conductor.core.metadata; +import com.google.common.annotations.VisibleForTesting; import com.google.inject.Singleton; +import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.core.WorkflowContext; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.metrics.Monitors; +import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + /** * Populates metadata definitions within workflow objects. * Benefits of loading and populating metadata definitions upfront could be: @@ -35,7 +43,50 @@ public MetadataMapperService(MetadataDAO metadataDAO) { this.metadataDAO = metadataDAO; } - public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { + public WorkflowDef lookupForWorkflowDefinition(String name, Integer version) { + Optional potentialDef = + version == null ? lookupLatestWorkflowDefinition(name) : lookupWorkflowDefinition(name, version); + + //Check if the workflow definition is valid + WorkflowDef workflowDefinition = potentialDef + .orElseThrow(() -> { + logger.error("There is no workflow defined with name {} and version {}", name, version); + return new ApplicationException( + ApplicationException.Code.NOT_FOUND, + String.format("No such workflow defined. name=%s, version=%s", name, version) + ); + } + ); + return workflowDefinition; + } + + @VisibleForTesting + Optional lookupWorkflowDefinition(String workflowName, int workflowVersion) { + // TODO: Update to use ServiceUtils once this is merged with dev + // FIXME: Add messages. + checkNotNull(workflowName); + checkArgument(StringUtils.isNotBlank(workflowName)); + checkArgument(workflowVersion > 0); + + return metadataDAO.get(workflowName, workflowVersion); + } + + @VisibleForTesting + Optional lookupLatestWorkflowDefinition(String workflowName) { + // FIXME: Add messages. + checkNotNull(workflowName); + checkArgument(StringUtils.isNotBlank(workflowName)); + + return metadataDAO.getLatest(workflowName); + } + + public Workflow populateWorkflowWithDefinitions(Workflow workflow) { + + WorkflowDef workflowDefinition = workflow.getWorkflowDefinition(); + if (workflowDefinition == null) { + workflowDefinition = lookupForWorkflowDefinition(workflow.getWorkflowName(), workflow.getWorkflowVersion()); + workflow.setWorkflowDefinition(workflowDefinition); + } // Populate definitions on the workflow definition workflowDefinition.collectTasks().stream().forEach( @@ -50,9 +101,27 @@ public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { checkForMissingDefinitions(workflowDefinition); + return workflow; + } + + public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { + // Populate definitions on the workflow definition + workflowDefinition.collectTasks().stream().forEach( + workflowTask -> populateWorkflowTaskWithDefinition(workflowTask) + ); + checkForMissingDefinitions(workflowDefinition); return workflowDefinition; } + private WorkflowTask populateWorkflowTaskWithDefinition(WorkflowTask workflowTask) { + if (shouldPopulateDefinition(workflowTask)) { + workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); + } else if (workflowTask.getType().equals(TaskType.SUB_WORKFLOW.name())) { + populateVersionForSubWorkflow(workflowTask); + } + return workflowTask; + } + private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { SubWorkflowParams subworkflowParams = workflowTask.getSubWorkflowParam(); if (subworkflowParams.getVersion() == null) { @@ -86,6 +155,11 @@ private void checkForMissingDefinitions(WorkflowDef workflowDefinition) { } } + public Task populateTaskWithDefinitions(Task task) { + populateWorkflowTaskWithDefinition(task.getWorkflowTask()); + return task; + } + public static boolean shouldPopulateDefinition(WorkflowTask workflowTask) { return workflowTask.getType().equals(TaskType.SIMPLE.name()) && workflowTask.getTaskDefinition() == null; diff --git a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java index c1ba32c6bb..8dbcee41c8 100644 --- a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java +++ b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java @@ -31,6 +31,7 @@ import com.netflix.conductor.core.events.queue.Message; import com.netflix.conductor.core.execution.SystemTaskType; import com.netflix.conductor.core.execution.WorkflowExecutor; +import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.core.utils.QueueUtils; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; @@ -76,13 +77,18 @@ public class ExecutionService { private int taskRequeueTimeout; + private MetadataMapperService metadataMapperService; + @Inject - public ExecutionService(WorkflowExecutor wfProvider, ExecutionDAO executionDAO, QueueDAO queue, MetadataDAO metadata, IndexDAO indexer, Configuration config) { + public ExecutionService(WorkflowExecutor wfProvider, ExecutionDAO executionDAO, QueueDAO queue, + MetadataDAO metadata, MetadataMapperService metadataMapperService, + IndexDAO indexer, Configuration config) { this.executor = wfProvider; this.executionDAO = executionDAO; this.queue = queue; this.metadata = metadata; + this.metadataMapperService = metadataMapperService; this.indexer = indexer; this.taskRequeueTimeout = config.getIntProperty("task.requeue.timeout", 60_000); } @@ -167,7 +173,9 @@ public List getTasks(String taskType, String startKey, int count) throws E } public Task getTask(String taskId) throws Exception { - return executionDAO.getTask(taskId); + Task task = executionDAO.getTask(taskId); + metadataMapperService.populateTaskWithDefinitions(task); + return task; } public Task getPendingTaskForWorkflow(String taskReferenceName, String workflowId) { diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java new file mode 100644 index 0000000000..16649ec32d --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java @@ -0,0 +1,4322 @@ +/** + * Copyright 2016 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * + */ +package com.netflix.conductor.tests.integration; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.util.concurrent.Uninterruptibles; +import com.netflix.conductor.common.metadata.tasks.PollData; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.Task.Status; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskDef.RetryLogic; +import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; +import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.TaskType; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.Workflow.WorkflowStatus; +import com.netflix.conductor.core.WorkflowContext; +import com.netflix.conductor.core.execution.ApplicationException; +import com.netflix.conductor.core.execution.SystemTaskType; +import com.netflix.conductor.core.execution.WorkflowExecutor; +import com.netflix.conductor.core.execution.WorkflowSweeper; +import com.netflix.conductor.core.execution.tasks.SubWorkflow; +import com.netflix.conductor.core.metadata.MetadataMapperService; +import com.netflix.conductor.dao.QueueDAO; +import com.netflix.conductor.service.ExecutionService; +import com.netflix.conductor.service.MetadataService; +import org.apache.commons.lang.StringUtils; +import org.junit.After; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.inject.Inject; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.UUID; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; +import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +public abstract class AbstractWorkflowServiceTest { + + private static final Logger logger = LoggerFactory.getLogger(AbstractWorkflowServiceTest.class); + + private static final String COND_TASK_WF = "ConditionalTaskWF"; + + private static final String FORK_JOIN_NESTED_WF = "FanInOutNestedTest"; + + private static final String FORK_JOIN_WF = "FanInOutTest"; + + private static final String DYNAMIC_FORK_JOIN_WF = "DynamicFanInOutTest"; + + private static final String DYNAMIC_FORK_JOIN_WF_LEGACY = "DynamicFanInOutTestLegacy"; + + private static final int RETRY_COUNT = 1; + private static final String JUNIT_TEST_WF_NON_RESTARTABLE = "junit_test_wf_non_restartable"; + private static final String WF_WITH_SUB_WF = "WorkflowWithSubWorkflow"; + + @Rule + public final ExpectedException expectedException = ExpectedException.none(); + + @Inject + protected ExecutionService workflowExecutionService; + + @Inject + protected SubWorkflow subworkflow; + + @Inject + protected MetadataService metadataService; + + @Inject + protected WorkflowSweeper workflowSweeper; + + @Inject + protected QueueDAO queueDAO; + + @Inject + protected WorkflowExecutor workflowExecutor; + + @Inject + protected MetadataMapperService metadataMapperService; + + private static boolean registered; + + private static List taskDefs; + + protected static final String LINEAR_WORKFLOW_T1_T2 = "junit_test_wf"; + + private static final String LINEAR_WORKFLOW_T1_T2_SW = "junit_test_wf_sw"; + + private static final String LONG_RUNNING = "longRunningWf"; + + private static final String TEST_WORKFLOW_NAME_3 = "junit_test_wf3"; + + @Before + public void init() throws Exception { + System.setProperty("EC2_REGION", "us-east-1"); + System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); + if (registered) { + return; + } + + + WorkflowContext.set(new WorkflowContext("junit_app")); + for (int i = 0; i < 21; i++) { + + String name = "junit_task_" + i; + if (metadataService.getTaskDef(name) != null) { + continue; + } + + TaskDef task = new TaskDef(); + task.setName(name); + task.setTimeoutSeconds(120); + task.setRetryCount(RETRY_COUNT); + metadataService.registerTaskDef(Collections.singletonList(task)); + } + + for (int i = 0; i < 5; i++) { + + String name = "junit_task_0_RT_" + i; + if (metadataService.getTaskDef(name) != null) { + continue; + } + + TaskDef task = new TaskDef(); + task.setName(name); + task.setTimeoutSeconds(120); + task.setRetryCount(0); + metadataService.registerTaskDef(Collections.singletonList(task)); + } + + TaskDef task = new TaskDef(); + task.setName("short_time_out"); + task.setTimeoutSeconds(5); + task.setRetryCount(RETRY_COUNT); + metadataService.registerTaskDef(Collections.singletonList(task)); + + WorkflowDef def = new WorkflowDef(); + def.setName(LINEAR_WORKFLOW_T1_T2); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + Map outputParameters = new HashMap<>(); + outputParameters.put("o1", "${workflow.input.param1}"); + outputParameters.put("o2", "${t2.output.uuid}"); + outputParameters.put("o3", "${t1.output.op}"); + def.setOutputParameters(outputParameters); + def.setFailureWorkflow("$workflow.input.failureWfName"); + def.setSchemaVersion(2); + LinkedList wftasks = new LinkedList<>(); + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("junit_task_1"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("t1"); + + WorkflowTask wft2 = new WorkflowTask(); + wft2.setName("junit_task_2"); + Map ip2 = new HashMap<>(); + ip2.put("tp1", "${workflow.input.param1}"); + ip2.put("tp2", "${t1.output.op}"); + wft2.setInputParameters(ip2); + wft2.setTaskReferenceName("t2"); + + wftasks.add(wft1); + wftasks.add(wft2); + def.setTasks(wftasks); + + WorkflowTask wft3 = new WorkflowTask(); + wft3.setName("junit_task_3"); + Map ip3 = new HashMap<>(); + ip3.put("tp1", "${workflow.input.param1}"); + ip3.put("tp2", "${t1.output.op}"); + wft3.setInputParameters(ip3); + wft3.setTaskReferenceName("t3"); + + WorkflowDef def2 = new WorkflowDef(); + def2.setName(TEST_WORKFLOW_NAME_3); + def2.setDescription(def2.getName()); + def2.setVersion(1); + def2.setInputParameters(Arrays.asList("param1", "param2")); + LinkedList wftasks2 = new LinkedList<>(); + + wftasks2.add(wft1); + wftasks2.add(wft2); + wftasks2.add(wft3); + def2.setSchemaVersion(2); + def2.setTasks(wftasks2); + + try { + + WorkflowDef[] wdsf = new WorkflowDef[]{def, def2}; + for (WorkflowDef wd : wdsf) { + metadataService.updateWorkflowDef(wd); + } + createForkJoinWorkflow(); + def.setName(LONG_RUNNING); + metadataService.updateWorkflowDef(def); + } catch (Exception e) { + } + + taskDefs = metadataService.getTaskDefs(); + + registered = true; + } + + @Test + public void testWorkflowWithNoTasks() throws Exception { + + WorkflowDef empty = new WorkflowDef(); + empty.setName("empty_workflow"); + empty.setSchemaVersion(2); + metadataService.registerWorkflowDef(empty); + + String id = startOrLoadWorkflowExecution(empty.getName(), 1, "testWorkflowWithNoTasks", new HashMap<>()); + assertNotNull(id); + Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals(0, workflow.getTasks().size()); + } + + @Test + public void testTaskDefTemplate() throws Exception { + + System.setProperty("STACK2", "test_stack"); + TaskDef templatedTask = new TaskDef(); + templatedTask.setName("templated_task"); + Map httpRequest = new HashMap<>(); + httpRequest.put("method", "GET"); + httpRequest.put("vipStack", "${STACK2}"); + httpRequest.put("uri", "/get/something"); + Map body = new HashMap<>(); + body.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}")); + body.put("requestDetails", "${workflow.input.requestDetails}"); + body.put("outputPath", "${workflow.input.outputPath}"); + httpRequest.put("body", body); + templatedTask.getInputTemplate().put("http_request", httpRequest); + metadataService.registerTaskDef(Arrays.asList(templatedTask)); + + WorkflowDef templateWf = new WorkflowDef(); + templateWf.setName("template_workflow"); + WorkflowTask wft = new WorkflowTask(); + wft.setName(templatedTask.getName()); + wft.setWorkflowTaskType(TaskType.SIMPLE); + wft.setTaskReferenceName("t0"); + templateWf.getTasks().add(wft); + templateWf.setSchemaVersion(2); + metadataService.registerWorkflowDef(templateWf); + + Map requestDetails = new HashMap<>(); + requestDetails.put("key1", "value1"); + requestDetails.put("key2", 42); + + Map input = new HashMap<>(); + input.put("path1", "file://path1"); + input.put("path2", "file://path2"); + input.put("outputPath", "s3://bucket/outputPath"); + input.put("requestDetails", requestDetails); + + String id = startOrLoadWorkflowExecution(templateWf.getName(), 1, "testTaskDefTemplate", input); + assertNotNull(id); + Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); + assertNotNull(workflow); + assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal()); + assertEquals(1, workflow.getTasks().size()); + Task task = workflow.getTasks().get(0); + Map taskInput = task.getInputData(); + assertNotNull(taskInput); + assertTrue(taskInput.containsKey("http_request")); + assertTrue(taskInput.get("http_request") instanceof Map); + + ObjectMapper om = new ObjectMapper(); + + //Use the commented sysout to get the string value + //System.out.println(om.writeValueAsString(om.writeValueAsString(taskInput))); + String expected = "{\"http_request\":{\"method\":\"GET\",\"vipStack\":\"test_stack\",\"body\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]},\"uri\":\"/get/something\"}}"; + assertEquals(expected, om.writeValueAsString(taskInput)); + } + + + @Test + public void testWorkflowSchemaVersion() throws Exception { + WorkflowDef ver2 = new WorkflowDef(); + ver2.setSchemaVersion(2); + ver2.setName("Test_schema_version2"); + ver2.setVersion(1); + + WorkflowDef ver1 = new WorkflowDef(); + ver1.setName("Test_schema_version1"); + ver1.setVersion(1); + + metadataService.updateWorkflowDef(ver1); + metadataService.updateWorkflowDef(ver2); + + WorkflowDef found = metadataService.getWorkflowDef(ver2.getName(), 1).get(); + assertEquals(2, found.getSchemaVersion()); + + WorkflowDef found1 = metadataService.getWorkflowDef(ver1.getName(), 1).get(); + assertEquals(1, found1.getSchemaVersion()); + + } + + @Test + public void testForkJoin() throws Exception { + try { + createForkJoinWorkflow(); + } catch (Exception e) { + } + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(0); + metadataService.updateTaskDef(taskDef); + + taskName = "junit_task_2"; + taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(0); + metadataService.updateTaskDef(taskDef); + + taskName = "junit_task_3"; + taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(0); + metadataService.updateTaskDef(taskDef); + + taskName = "junit_task_4"; + taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(0); + metadataService.updateTaskDef(taskDef); + + Map input = new HashMap<>(); + String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input); + System.out.println("testForkJoin.wfid=" + workflowId); + printTaskStatuses(workflowId, "initiated"); + + Task task1 = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task1); + assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); + + Task task2 = workflowExecutionService.poll("junit_task_2", "test"); + assertNotNull(task2); + assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); + + Task task3 = workflowExecutionService.poll("junit_task_3", "test"); + assertNull(task3); + + task1.setStatus(COMPLETED); + workflowExecutionService.updateTask(task1); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + printTaskStatuses(workflow, "T1 completed"); + + task3 = workflowExecutionService.poll("junit_task_3", "test"); + assertNotNull(task3); + + task2.setStatus(COMPLETED); + task3.setStatus(COMPLETED); + + ExecutorService executorService = Executors.newFixedThreadPool(2); + Future future1 = executorService.submit(() -> { + try { + workflowExecutionService.updateTask(task2); + } catch (Exception e) { + throw new RuntimeException(e); + } + + }); + future1.get(); + + final Task _t3 = task3; + Future future2 = executorService.submit(() -> { + try { + workflowExecutionService.updateTask(_t3); + } catch (Exception e) { + throw new RuntimeException(e); + } + + }); + future2.get(); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + printTaskStatuses(workflow, "T2 T3 completed"); + assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertTrue("Found " + workflow.getTasks().stream().map(t -> t.getReferenceTaskName() + "." + t.getStatus()).collect(Collectors.toList()), workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t4"))); + + Task t4 = workflowExecutionService.poll("junit_task_4", "test"); + assertNotNull(t4); + t4.setStatus(COMPLETED); + workflowExecutionService.updateTask(t4); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); + printTaskStatuses(workflow, "All completed"); + } + + @Test + public void testForkJoinNested() throws Exception { + + createForkJoinNestedWorkflow(); + + Map input = new HashMap<>(); + input.put("case", "a"); //This should execute t16 and t19 + String wfid = startOrLoadWorkflowExecution("forkJoinNested", FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input, null, null); + System.out.println("testForkJoinNested.wfid=" + wfid); + + Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); + + + Task t1 = workflowExecutionService.poll("junit_task_11", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); + + Task t2 = workflowExecutionService.poll("junit_task_12", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + + Task t3 = workflowExecutionService.poll("junit_task_13", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); + + assertNotNull(t1); + assertNotNull(t2); + assertNotNull(t3); + + t1.setStatus(COMPLETED); + t2.setStatus(COMPLETED); + t3.setStatus(COMPLETED); + + workflowExecutionService.updateTask(t1); + workflowExecutionService.updateTask(t2); + workflowExecutionService.updateTask(t3); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); + + String[] tasks = new String[]{"junit_task_14", "junit_task_16"}; + for (String tt : tasks) { + Task polled = workflowExecutionService.poll(tt, "test"); + assertNotNull("poll resulted empty for task: " + tt, polled); + polled.setStatus(COMPLETED); + workflowExecutionService.updateTask(polled); + } + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet + + Task task19 = workflowExecutionService.poll("junit_task_19", "test"); + assertNotNull(task19); + task19.setStatus(COMPLETED); + workflowExecutionService.updateTask(task19); + + Task task20 = workflowExecutionService.poll("junit_task_20", "test"); + assertNotNull(task20); + task20.setStatus(COMPLETED); + workflowExecutionService.updateTask(task20); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + + Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); + assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); + + pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); + assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); + + Task task15 = workflowExecutionService.poll("junit_task_15", "test"); + assertNotNull(task15); + task15.setStatus(COMPLETED); + workflowExecutionService.updateTask(task15); + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); + + } + + @Test + public void testForkJoinNestedWithSubWorkflow() throws Exception { + + createForkJoinNestedWorkflowWithSubworkflow(); + + Map input = new HashMap<>(); + input.put("case", "a"); //This should execute t16 and t19 + String wfid = startOrLoadWorkflowExecution(FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input, null, null); + System.out.println("testForkJoinNested.wfid=" + wfid); + + Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("sw1"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); + + + Task t1 = workflowExecutionService.poll("junit_task_11", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); + + Task t2 = workflowExecutionService.poll("junit_task_12", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + + Task t3 = workflowExecutionService.poll("junit_task_13", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); + + assertNotNull(t1); + assertNotNull(t2); + assertNotNull(t3); + + t1.setStatus(COMPLETED); + t2.setStatus(COMPLETED); + t3.setStatus(COMPLETED); + + workflowExecutionService.updateTask(t1); + workflowExecutionService.updateTask(t2); + workflowExecutionService.updateTask(t3); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); + + String[] tasks = new String[]{"junit_task_1", "junit_task_2", "junit_task_14", "junit_task_16"}; + for (String tt : tasks) { + Task polled = workflowExecutionService.poll(tt, "test"); + assertNotNull("poll resulted empty for task: " + tt, polled); + polled.setStatus(COMPLETED); + workflowExecutionService.updateTask(polled); + } + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + + assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet + assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet + + Task task19 = workflowExecutionService.poll("junit_task_19", "test"); + assertNotNull(task19); + task19.setStatus(COMPLETED); + workflowExecutionService.updateTask(task19); + + Task task20 = workflowExecutionService.poll("junit_task_20", "test"); + assertNotNull(task20); + task20.setStatus(COMPLETED); + workflowExecutionService.updateTask(task20); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + + Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); + assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); + + pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); + assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); + Task task15 = workflowExecutionService.poll("junit_task_15", "test"); + assertNotNull(task15); + task15.setStatus(COMPLETED); + workflowExecutionService.updateTask(task15); + + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); + + } + + @Test + public void testForkJoinFailure() throws Exception { + + try { + createForkJoinWorkflow(); + } catch (Exception e) { + } + + String taskName = "junit_task_2"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + int retryCount = taskDef.getRetryCount(); + taskDef.setRetryCount(0); + metadataService.updateTaskDef(taskDef); + + + Map input = new HashMap(); + String wfid = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input); + System.out.println("testForkJoinFailure.wfid=" + wfid); + + Task t1 = workflowExecutionService.poll("junit_task_2", "test"); + assertNotNull(t1); + assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); + + Task t2 = workflowExecutionService.poll("junit_task_1", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + + Task t3 = workflowExecutionService.poll("junit_task_3", "test"); + assertNull(t3); + + assertNotNull(t1); + assertNotNull(t2); + t1.setStatus(FAILED); + t2.setStatus(COMPLETED); + + workflowExecutionService.updateTask(t2); + Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals("Found " + wf.getTasks(), WorkflowStatus.RUNNING, wf.getStatus()); + + t3 = workflowExecutionService.poll("junit_task_3", "test"); + assertNotNull(t3); + + + workflowExecutionService.updateTask(t1); + wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals("Found " + wf.getTasks(), WorkflowStatus.FAILED, wf.getStatus()); + + + taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(retryCount); + metadataService.updateTaskDef(taskDef); + } + + @SuppressWarnings("unchecked") + @Test + public void testDynamicForkJoinLegacy() throws Exception { + + try { + createDynamicForkJoinWorkflowDefsLegacy(); + } catch (Exception e) { + } + + Map input = new HashMap(); + String wfid = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF_LEGACY, 1, "dynfanouttest1", input); + System.out.println("testDynamicForkJoinLegacy.wfid=" + wfid); + + Task t1 = workflowExecutionService.poll("junit_task_1", "test"); + //assertTrue(ess.ackTaskRecieved(t1.getTaskId(), "test")); + + DynamicForkJoinTaskList dynamicForkJoinTasks = new DynamicForkJoinTaskList(); + + input = new HashMap(); + input.put("k1", "v1"); + dynamicForkJoinTasks.add("junit_task_2", null, "xdt1", input); + + HashMap input2 = new HashMap(); + input2.put("k2", "v2"); + dynamicForkJoinTasks.add("junit_task_3", null, "xdt2", input2); + + t1.getOutputData().put("dynamicTasks", dynamicForkJoinTasks); + t1.setStatus(COMPLETED); + + workflowExecutionService.updateTask(t1); + + Task t2 = workflowExecutionService.poll("junit_task_2", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + assertEquals("xdt1", t2.getReferenceTaskName()); + assertTrue(t2.getInputData().containsKey("k1")); + assertEquals("v1", t2.getInputData().get("k1")); + Map output = new HashMap(); + output.put("ok1", "ov1"); + t2.setOutputData(output); + t2.setStatus(COMPLETED); + workflowExecutionService.updateTask(t2); + + Task t3 = workflowExecutionService.poll("junit_task_3", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); + assertEquals("xdt2", t3.getReferenceTaskName()); + assertTrue(t3.getInputData().containsKey("k2")); + assertEquals("v2", t3.getInputData().get("k2")); + + output = new HashMap<>(); + output.put("ok1", "ov1"); + t3.setOutputData(output); + t3.setStatus(COMPLETED); + workflowExecutionService.updateTask(t3); + + Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); + + // Check the output + Task joinTask = wf.getTaskByRefName("dynamicfanouttask_join"); + assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size()); + Set joinTaskOutput = joinTask.getOutputData().keySet(); + System.out.println("joinTaskOutput=" + joinTaskOutput); + for (String key : joinTask.getOutputData().keySet()) { + assertTrue(key.equals("xdt1") || key.equals("xdt2")); + assertEquals("ov1", ((Map) joinTask.getOutputData().get(key)).get("ok1")); + } + } + + @SuppressWarnings("unchecked") + @Test + public void testDynamicForkJoin() throws Exception { + + createDynamicForkJoinWorkflowDefs(); + + String taskName = "junit_task_2"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + int retryCount = taskDef.getRetryCount(); + taskDef.setRetryCount(2); + taskDef.setRetryDelaySeconds(0); + taskDef.setRetryLogic(RetryLogic.FIXED); + metadataService.updateTaskDef(taskDef); + + Map workflowInput = new HashMap<>(); + String workflowId = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF, 1, "dynfanouttest1", workflowInput); + System.out.println("testDynamicForkJoin.wfid=" + workflowId); + Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); + + Task task1 = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task1); + assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); + assertEquals("dt1", task1.getReferenceTaskName()); + + Map inputParams2 = new HashMap<>(); + inputParams2.put("k1", "v1"); + WorkflowTask workflowTask2 = new WorkflowTask(); + workflowTask2.setName("junit_task_2"); + workflowTask2.setTaskReferenceName("xdt1"); + + Map inputParams3 = new HashMap<>(); + inputParams3.put("k2", "v2"); + WorkflowTask workflowTask3 = new WorkflowTask(); + workflowTask3.setName("junit_task_3"); + workflowTask3.setTaskReferenceName("xdt2"); + + HashMap dynamicTasksInput = new HashMap<>(); + dynamicTasksInput.put("xdt1", inputParams2); + dynamicTasksInput.put("xdt2", inputParams3); + task1.getOutputData().put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); + task1.getOutputData().put("dynamicTasksInput", dynamicTasksInput); + task1.setStatus(COMPLETED); + + workflowExecutionService.updateTask(task1); + workflow = workflowExecutor.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 5, workflow.getTasks().size()); + + Task task2 = workflowExecutionService.poll("junit_task_2", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); + assertEquals("xdt1", task2.getReferenceTaskName()); + assertTrue(task2.getInputData().containsKey("k1")); + assertEquals("v1", task2.getInputData().get("k1")); + Map output = new HashMap<>(); + output.put("ok1", "ov1"); + task2.setOutputData(output); + task2.setStatus(FAILED); + workflowExecutionService.updateTask(task2); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).count()); + assertTrue(workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).allMatch(t -> t.getWorkflowTask() != null)); + assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); + + task2 = workflowExecutionService.poll("junit_task_2", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); + assertEquals("xdt1", task2.getReferenceTaskName()); + assertTrue(task2.getInputData().containsKey("k1")); + assertEquals("v1", task2.getInputData().get("k1")); + task2.setOutputData(output); + task2.setStatus(COMPLETED); + workflowExecutionService.updateTask(task2); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); + + Task task3 = workflowExecutionService.poll("junit_task_3", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(task3.getTaskId())); + assertEquals("xdt2", task3.getReferenceTaskName()); + assertTrue(task3.getInputData().containsKey("k2")); + assertEquals("v2", task3.getInputData().get("k2")); + output = new HashMap<>(); + output.put("ok1", "ov1"); + task3.setOutputData(output); + task3.setStatus(COMPLETED); + workflowExecutionService.updateTask(task3); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); + + Task task4 = workflowExecutionService.poll("junit_task_4", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(task4.getTaskId())); + assertEquals("task4", task4.getReferenceTaskName()); + task4.setStatus(COMPLETED); + workflowExecutionService.updateTask(task4); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); + + // Check the output + Task joinTask = workflow.getTaskByRefName("dynamicfanouttask_join"); + assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size()); + Set joinTaskOutput = joinTask.getOutputData().keySet(); + System.out.println("joinTaskOutput=" + joinTaskOutput); + for (String key : joinTask.getOutputData().keySet()) { + assertTrue(key.equals("xdt1") || key.equals("xdt2")); + assertEquals("ov1", ((Map) joinTask.getOutputData().get(key)).get("ok1")); + } + + // reset the task def + taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(retryCount); + taskDef.setRetryDelaySeconds(1); + metadataService.updateTaskDef(taskDef); + } + + private void createForkJoinWorkflow() throws Exception { + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(FORK_JOIN_WF); + workflowDef.setDescription(workflowDef.getName()); + workflowDef.setVersion(1); + workflowDef.setInputParameters(Arrays.asList("param1", "param2")); + + WorkflowTask fanoutTask = new WorkflowTask(); + fanoutTask.setType(TaskType.FORK_JOIN.name()); + fanoutTask.setTaskReferenceName("fanouttask"); + + WorkflowTask workflowTask1 = new WorkflowTask(); + workflowTask1.setName("junit_task_1"); + Map inputParams1 = new HashMap<>(); + inputParams1.put("p1", "workflow.input.param1"); + inputParams1.put("p2", "workflow.input.param2"); + workflowTask1.setInputParameters(inputParams1); + workflowTask1.setTaskReferenceName("t1"); + + WorkflowTask workflowTask3 = new WorkflowTask(); + workflowTask3.setName("junit_task_3"); + workflowTask3.setInputParameters(inputParams1); + workflowTask3.setTaskReferenceName("t3"); + + WorkflowTask workflowTask2 = new WorkflowTask(); + workflowTask2.setName("junit_task_2"); + Map inputParams2 = new HashMap<>(); + inputParams2.put("tp1", "workflow.input.param1"); + workflowTask2.setInputParameters(inputParams2); + workflowTask2.setTaskReferenceName("t2"); + + WorkflowTask workflowTask4 = new WorkflowTask(); + workflowTask4.setName("junit_task_4"); + workflowTask4.setInputParameters(inputParams2); + workflowTask4.setTaskReferenceName("t4"); + + fanoutTask.getForkTasks().add(Arrays.asList(workflowTask1, workflowTask3)); + fanoutTask.getForkTasks().add(Collections.singletonList(workflowTask2)); + + workflowDef.getTasks().add(fanoutTask); + + WorkflowTask joinTask = new WorkflowTask(); + joinTask.setType(TaskType.JOIN.name()); + joinTask.setTaskReferenceName("fanouttask_join"); + joinTask.setJoinOn(Arrays.asList("t3", "t2")); + + workflowDef.getTasks().add(joinTask); + workflowDef.getTasks().add(workflowTask4); + metadataService.updateWorkflowDef(workflowDef); + } + + + private void createForkJoinWorkflowWithZeroRetry() throws Exception { + + WorkflowDef def = new WorkflowDef(); + def.setName(FORK_JOIN_WF + "_2"); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + + WorkflowTask fanout = new WorkflowTask(); + fanout.setType(TaskType.FORK_JOIN.name()); + fanout.setTaskReferenceName("fanouttask"); + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("junit_task_0_RT_1"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("t1"); + + WorkflowTask wft3 = new WorkflowTask(); + wft3.setName("junit_task_0_RT_3"); + wft3.setInputParameters(ip1); + wft3.setTaskReferenceName("t3"); + + WorkflowTask wft2 = new WorkflowTask(); + wft2.setName("junit_task_0_RT_2"); + Map ip2 = new HashMap<>(); + ip2.put("tp1", "workflow.input.param1"); + wft2.setInputParameters(ip2); + wft2.setTaskReferenceName("t2"); + + WorkflowTask wft4 = new WorkflowTask(); + wft4.setName("junit_task_0_RT_4"); + wft4.setInputParameters(ip2); + wft4.setTaskReferenceName("t4"); + + fanout.getForkTasks().add(Arrays.asList(wft1, wft3)); + fanout.getForkTasks().add(Arrays.asList(wft2)); + + def.getTasks().add(fanout); + + WorkflowTask join = new WorkflowTask(); + join.setType(TaskType.JOIN.name()); + join.setTaskReferenceName("fanouttask_join"); + join.setJoinOn(Arrays.asList("t3", "t2")); + + def.getTasks().add(join); + def.getTasks().add(wft4); + metadataService.updateWorkflowDef(def); + + } + + private void createForkJoinNestedWorkflow() throws Exception { + + WorkflowDef def = new WorkflowDef(); + def.setName(FORK_JOIN_NESTED_WF); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + + Map ip1 = new HashMap<>(); + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + ip1.put("case", "workflow.input.case"); + + WorkflowTask[] tasks = new WorkflowTask[21]; + + for (int i = 10; i < 21; i++) { + WorkflowTask wft = new WorkflowTask(); + wft.setName("junit_task_" + i); + wft.setInputParameters(ip1); + wft.setTaskReferenceName("t" + i); + tasks[i] = wft; + } + + WorkflowTask d1 = new WorkflowTask(); + d1.setType(TaskType.DECISION.name()); + d1.setName("Decision"); + d1.setTaskReferenceName("d1"); + d1.setInputParameters(ip1); + d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20])); + d1.setCaseValueParam("case"); + Map> decisionCases = new HashMap<>(); + decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20])); + decisionCases.put("b", Arrays.asList(tasks[17], tasks[20])); + d1.setDecisionCases(decisionCases); + + WorkflowTask fork2 = new WorkflowTask(); + fork2.setType(TaskType.FORK_JOIN.name()); + fork2.setName("fork2"); + fork2.setTaskReferenceName("fork2"); + fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14])); + fork2.getForkTasks().add(Arrays.asList(tasks[13], d1)); + + WorkflowTask join2 = new WorkflowTask(); + join2.setType(TaskType.JOIN.name()); + join2.setTaskReferenceName("join2"); + join2.setJoinOn(Arrays.asList("t14", "t20")); + + WorkflowTask fork1 = new WorkflowTask(); + fork1.setType(TaskType.FORK_JOIN.name()); + fork1.setTaskReferenceName("fork1"); + fork1.getForkTasks().add(Arrays.asList(tasks[11])); + fork1.getForkTasks().add(Arrays.asList(fork2, join2)); + + WorkflowTask join1 = new WorkflowTask(); + join1.setType(TaskType.JOIN.name()); + join1.setTaskReferenceName("join1"); + join1.setJoinOn(Arrays.asList("t11", "join2")); + + def.getTasks().add(fork1); + def.getTasks().add(join1); + def.getTasks().add(tasks[15]); + + metadataService.updateWorkflowDef(def); + } + + private void createForkJoinNestedWorkflowWithSubworkflow() throws Exception { + + WorkflowDef def = new WorkflowDef(); + def.setName(FORK_JOIN_NESTED_WF); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + + Map ip1 = new HashMap<>(); + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + ip1.put("case", "workflow.input.case"); + + WorkflowTask[] tasks = new WorkflowTask[21]; + + for (int i = 10; i < 21; i++) { + WorkflowTask wft = new WorkflowTask(); + wft.setName("junit_task_" + i); + wft.setInputParameters(ip1); + wft.setTaskReferenceName("t" + i); + tasks[i] = wft; + } + + WorkflowTask d1 = new WorkflowTask(); + d1.setType(TaskType.DECISION.name()); + d1.setName("Decision"); + d1.setTaskReferenceName("d1"); + d1.setInputParameters(ip1); + d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20])); + d1.setCaseValueParam("case"); + Map> decisionCases = new HashMap<>(); + decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20])); + decisionCases.put("b", Arrays.asList(tasks[17], tasks[20])); + d1.setDecisionCases(decisionCases); + + WorkflowTask subWorkflow = new WorkflowTask(); + subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); + SubWorkflowParams sw = new SubWorkflowParams(); + sw.setName(LINEAR_WORKFLOW_T1_T2); + subWorkflow.setSubWorkflowParam(sw); + subWorkflow.setTaskReferenceName("sw1"); + + WorkflowTask fork2 = new WorkflowTask(); + fork2.setType(TaskType.FORK_JOIN.name()); + fork2.setName("fork2"); + fork2.setTaskReferenceName("fork2"); + fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14])); + fork2.getForkTasks().add(Arrays.asList(tasks[13], d1)); + + WorkflowTask join2 = new WorkflowTask(); + join2.setType(TaskType.JOIN.name()); + join2.setTaskReferenceName("join2"); + join2.setJoinOn(Arrays.asList("t14", "t20")); + + WorkflowTask fork1 = new WorkflowTask(); + fork1.setType(TaskType.FORK_JOIN.name()); + fork1.setTaskReferenceName("fork1"); + fork1.getForkTasks().add(Arrays.asList(tasks[11])); + fork1.getForkTasks().add(Arrays.asList(fork2, join2)); + fork1.getForkTasks().add(Arrays.asList(subWorkflow)); + + + WorkflowTask join1 = new WorkflowTask(); + join1.setType(TaskType.JOIN.name()); + join1.setTaskReferenceName("join1"); + join1.setJoinOn(Arrays.asList("t11", "join2", "sw1")); + + def.getTasks().add(fork1); + def.getTasks().add(join1); + def.getTasks().add(tasks[15]); + + metadataService.updateWorkflowDef(def); + + + } + + private void createDynamicForkJoinWorkflowDefs() throws Exception { + + WorkflowDef def = new WorkflowDef(); + def.setName(DYNAMIC_FORK_JOIN_WF); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + + WorkflowTask workflowTask1 = new WorkflowTask(); + workflowTask1.setName("junit_task_1"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + workflowTask1.setInputParameters(ip1); + workflowTask1.setTaskReferenceName("dt1"); + + WorkflowTask fanout = new WorkflowTask(); + fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); + fanout.setTaskReferenceName("dynamicfanouttask"); + fanout.setDynamicForkTasksParam("dynamicTasks"); + fanout.setDynamicForkTasksInputParamName("dynamicTasksInput"); + fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); + fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); + + WorkflowTask join = new WorkflowTask(); + join.setType(TaskType.JOIN.name()); + join.setTaskReferenceName("dynamicfanouttask_join"); + + WorkflowTask workflowTask4 = new WorkflowTask(); + workflowTask4.setName("junit_task_4"); + workflowTask4.setTaskReferenceName("task4"); + + def.getTasks().add(workflowTask1); + def.getTasks().add(fanout); + def.getTasks().add(join); + def.getTasks().add(workflowTask4); + + metadataMapperService.populateTaskDefinitions(def); + + metadataService.updateWorkflowDef(def); + } + + @SuppressWarnings("deprecation") + private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { + + WorkflowDef def = new WorkflowDef(); + def.setName(DYNAMIC_FORK_JOIN_WF_LEGACY); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("junit_task_1"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("dt1"); + + WorkflowTask fanout = new WorkflowTask(); + fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); + fanout.setTaskReferenceName("dynamicfanouttask"); + fanout.setDynamicForkJoinTasksParam("dynamicTasks"); + fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); + fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); + + WorkflowTask join = new WorkflowTask(); + join.setType(TaskType.JOIN.name()); + join.setTaskReferenceName("dynamicfanouttask_join"); + + def.getTasks().add(wft1); + def.getTasks().add(fanout); + def.getTasks().add(join); + + metadataMapperService.populateTaskDefinitions(def); + + metadataService.updateWorkflowDef(def); + + } + + private void createConditionalWF() throws Exception { + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("junit_task_1"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "workflow.input.param1"); + ip1.put("p2", "workflow.input.param2"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("t1"); + + WorkflowTask wft2 = new WorkflowTask(); + wft2.setName("junit_task_2"); + Map ip2 = new HashMap<>(); + ip2.put("tp1", "workflow.input.param1"); + wft2.setInputParameters(ip2); + wft2.setTaskReferenceName("t2"); + + WorkflowTask wft3 = new WorkflowTask(); + wft3.setName("junit_task_3"); + Map ip3 = new HashMap<>(); + ip2.put("tp3", "workflow.input.param2"); + wft3.setInputParameters(ip3); + wft3.setTaskReferenceName("t3"); + + WorkflowDef def2 = new WorkflowDef(); + def2.setName(COND_TASK_WF); + def2.setDescription(COND_TASK_WF); + def2.setInputParameters(Arrays.asList("param1", "param2")); + + WorkflowTask c2 = new WorkflowTask(); + c2.setType(TaskType.DECISION.name()); + c2.setCaseValueParam("case"); + c2.setName("conditional2"); + c2.setTaskReferenceName("conditional2"); + Map> dc = new HashMap<>(); + dc.put("one", Arrays.asList(wft1, wft3)); + dc.put("two", Arrays.asList(wft2)); + c2.setDecisionCases(dc); + c2.getInputParameters().put("case", "workflow.input.param2"); + + + WorkflowTask condition = new WorkflowTask(); + condition.setType(TaskType.DECISION.name()); + condition.setCaseValueParam("case"); + condition.setName("conditional"); + condition.setTaskReferenceName("conditional"); + Map> decisionCases = new HashMap<>(); + decisionCases.put("nested", Arrays.asList(c2)); + decisionCases.put("three", Arrays.asList(wft3)); + condition.setDecisionCases(decisionCases); + condition.getInputParameters().put("case", "workflow.input.param1"); + condition.getDefaultCase().add(wft2); + def2.getTasks().add(condition); + + WorkflowTask notifyTask = new WorkflowTask(); + notifyTask.setName("junit_task_4"); + notifyTask.setTaskReferenceName("junit_task_4"); + + WorkflowTask finalTask = new WorkflowTask(); + finalTask.setName("finalcondition"); + finalTask.setTaskReferenceName("tf"); + finalTask.setType(TaskType.DECISION.name()); + finalTask.setCaseValueParam("finalCase"); + Map fi = new HashMap<>(); + fi.put("finalCase", "workflow.input.finalCase"); + finalTask.setInputParameters(fi); + finalTask.getDecisionCases().put("notify", Arrays.asList(notifyTask)); + + def2.getTasks().add(finalTask); + metadataService.updateWorkflowDef(def2); + + } + + + @Test + public void testDefDAO() throws Exception { + List taskDefs = metadataService.getTaskDefs(); + assertNotNull(taskDefs); + assertTrue(!taskDefs.isEmpty()); + } + + @Test + public void testSimpleWorkflowFailureWithTerminalError() throws Exception { + + clearWorkflows(); + + TaskDef taskDef = metadataService.getTaskDef("junit_task_1"); + taskDef.setRetryCount(1); + metadataService.updateTaskDef(taskDef); + + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + assertNotNull(found); + Map outputParameters = found.getOutputParameters(); + outputParameters.put("validationErrors", "${t1.output.ErrorMessage}"); + metadataService.updateWorkflowDef(found); + + String correlationId = "unit_test_1"; + Map input = new HashMap<>(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String workflowInstanceId = startOrLoadWorkflowExecution("simpleWorkflowFailureWithTerminalError", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); + logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); + assertNotNull(workflowInstanceId); + + Workflow es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(es); + assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); + + es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + + boolean failed = false; + try { + workflowExecutor.rewind(workflowInstanceId); + } catch (ApplicationException ae) { + failed = true; + } + assertTrue(failed); + + // Polling for the first task should return the same task as before + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); + + TaskResult taskResult = new TaskResult(task); + taskResult.setReasonForIncompletion("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down"); + taskResult.setStatus(TaskResult.Status.FAILED_WITH_TERMINAL_ERROR); + taskResult.addOutputData("TERMINAL_ERROR", "Integration endpoint down: FOOBAR"); + taskResult.addOutputData("ErrorMessage", "There was a terminal error"); + + workflowExecutionService.updateTask(taskResult); + workflowExecutor.decide(workflowInstanceId); + + es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + TaskDef junit_task_1 = metadataService.getTaskDef("junit_task_1"); + Task t1 = es.getTaskByRefName("t1"); + assertNotNull(es); + assertEquals(WorkflowStatus.FAILED, es.getStatus()); + assertEquals("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down", es.getReasonForIncompletion()); + assertEquals(1, junit_task_1.getRetryCount()); //Configured retries at the task definition level + assertEquals(0, t1.getRetryCount()); //Actual retries done on the task + assertEquals(true, es.getOutput().containsKey("o1")); + assertEquals("p1 value", es.getOutput().get("o1")); + assertEquals(es.getOutput().get("validationErrors").toString(), "There was a terminal error"); + + outputParameters.remove("validationErrors"); + metadataService.updateWorkflowDef(found); + + } + + + @Test + public void testSimpleWorkflow() throws Exception { + + clearWorkflows(); + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1"; + Map input = new HashMap<>(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String workflowInstanceId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); + assertNotNull(workflowInstanceId); + + Workflow es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(es); + assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); + + + es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + + + boolean failed = false; + try { + workflowExecutor.rewind(workflowInstanceId); + } catch (ApplicationException ae) { + failed = true; + } + assertTrue(failed); + + // Polling for the first task should return the same task as before + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); + + workflowExecutor.decide(workflowInstanceId); + + String task1Op = "task1.Done"; + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + task = tasks.get(0); + + Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); + assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(workflowInstanceId, false); + assertNotNull(es); + assertNotNull(es.getOutput()); + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull("Found=" + task.getInputData(), task2Input); + assertEquals(task1Op, task2Input); + + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + tasks = es.getTasks(); + assertNotNull(tasks); + assertEquals(2, tasks.size()); + + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + } + + @Test + public void testSimpleWorkflowWithResponseTimeout() throws Exception { + + createWFWithResponseTimeout(); + + String correlationId = "unit_test_1"; + Map workflowInput = new HashMap(); + String inputParam1 = "p1 value"; + workflowInput.put("param1", inputParam1); + workflowInput.put("param2", "p2 value"); + String workflowId = startOrLoadWorkflowExecution("RTOWF", 1, correlationId, workflowInput); + System.out.println("testSimpleWorkflowWithResponseTimeout.wfid=" + workflowId); + assertNotNull(workflowId); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. + assertEquals(1, queueDAO.getSize("task_rt")); + + // Polling for the first task should return the same task as before + Task task = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); + assertNotNull(task); + assertEquals("task_rt", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowId, task.getWorkflowInstanceId()); + + // As the task_rt is out of the queue, the next poll should not get it + Task nullTask = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); + assertNull(nullTask); + + Thread.sleep(10000); + workflowExecutor.decide(workflowId); + assertEquals(1, queueDAO.getSize("task_rt")); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + + // Polling now should get the same task back because it should have been put back in the queue + Task taskAgain = workflowExecutionService.poll("task_rt", "task1.junit.worker"); + assertNotNull(taskAgain); + + taskAgain.getOutputData().put("op", "task1.Done"); + taskAgain.setStatus(COMPLETED); + workflowExecutionService.updateTask(taskAgain); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker.testTimeout"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + + } + + @Test + public void testWorkflowRerunWithSubWorkflows() throws Exception { + // Execute a workflow + String workflowId = this.runWorkflowWithSubworkflow(); + // Check it completed + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + + // Now lets pickup the first task in the sub workflow and rerun it from there + String subWorkflowId = null; + for (Task task : workflow.getTasks()) { + if (task.getTaskType().equalsIgnoreCase("SUB_WORKFLOW")) { + subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); + } + } + assertNotNull(subWorkflowId); + Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + Task subWorkflowTask1 = null; + for (Task task : subWorkflow.getTasks()) { + if (task.getTaskDefName().equalsIgnoreCase("junit_task_1")) { + subWorkflowTask1 = task; + } + } + assertNotNull(subWorkflowTask1); + + RerunWorkflowRequest request = new RerunWorkflowRequest(); + request.setReRunFromTaskId(subWorkflowTask1.getTaskId()); + + Map newInput = new HashMap<>(); + newInput.put("p1", "1"); + newInput.put("p2", "2"); + request.setTaskInput(newInput); + + String correlationId = "unit_test_sw_new"; + Map input = new HashMap<>(); + input.put("param1", "New p1 value"); + input.put("param2", "New p2 value"); + request.setCorrelationId(correlationId); + request.setWorkflowInput(input); + + request.setReRunFromWorkflowId(workflowId); + request.setReRunFromTaskId(subWorkflowTask1.getTaskId()); + // Rerun + workflowExecutor.rerun(request); + + // The main WF and the sub WF should be in RUNNING state + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + assertEquals(correlationId, workflow.getCorrelationId()); + assertEquals("New p1 value", workflow.getInput().get("param1")); + assertEquals("New p2 value", workflow.getInput().get("param2")); + + subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(subWorkflow); + assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + // Since we are re running from the sub workflow task, there + // should be only 1 task that is SCHEDULED + assertEquals(1, subWorkflow.getTasks().size()); + assertEquals(Status.SCHEDULED, subWorkflow.getTasks().get(0).getStatus()); + + // Now execute the task + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(task.getInputData().get("p1").toString(), "1"); + assertEquals(task.getInputData().get("p2").toString(), "2"); + task.getOutputData().put("op", "junit_task_1.done"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(subWorkflow); + assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(2, subWorkflow.getTasks().size()); + + // Poll for second task of the sub workflow and execute it + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + task.getOutputData().put("op", "junit_task_2.done"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // Now the sub workflow and the main workflow must have finished + subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(subWorkflow); + assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); + assertEquals(2, subWorkflow.getTasks().size()); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + } + + @Test + public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { + + clearWorkflows(); + createWorkflowDefForDomain(); + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); + + String correlationId = "unit_test_sw"; + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + Map taskToDomain = new HashMap(); + taskToDomain.put("junit_task_3", "domain1"); + taskToDomain.put("junit_task_2", "domain1"); + + // Poll before so that a polling for this task is "active" + Task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); + assertNull(task); + task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain1"); + assertNull(task); + + String wfid = startOrLoadWorkflowExecution("simpleWorkflowWithTaskSpecificDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); + System.out.println("testSimpleWorkflow.wfid=" + wfid); + assertNotNull(wfid); + Workflow wf = workflowExecutor.getWorkflow(wfid, false); + assertNotNull(wf); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + + + // Check Size + Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain1:junit_task_3", "junit_task_3")); + assertEquals(sizes.get("domain1:junit_task_3").intValue(), 1); + assertEquals(sizes.get("junit_task_3").intValue(), 0); + + // Polling for the first task should return the same task as before + task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); + assertNull(task); + task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); + assertNotNull(task); + assertEquals("junit_task_3", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(wfid, task.getWorkflowInstanceId()); + + String task1Op = "task1.Done"; + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + task = tasks.get(0); + + Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); + assertEquals(wfid, task.getWorkflowInstanceId()); + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, false); + assertNotNull(es); + assertNotNull(es.getOutput()); + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + Workflow essw = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertNotNull(essw.getTaskToDomain()); + assertEquals(essw.getTaskToDomain().size(), 2); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain1"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + tasks = es.getTasks(); + assertNotNull(tasks); + assertEquals(2, tasks.size()); + + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + List pddata = workflowExecutionService.getPollData("junit_task_3"); + assertTrue(pddata.size() == 2); + for (PollData pd : pddata) { + assertEquals(pd.getQueueName(), "junit_task_3"); + assertEquals(pd.getWorkerId(), "task1.junit.worker"); + assertTrue(pd.getLastPollTime() != 0); + if (pd.getDomain() != null) { + assertEquals(pd.getDomain(), "domain1"); + } + } + + + List pdList = workflowExecutionService.getAllPollData(); + int count = 0; + for (PollData pd : pdList) { + if (pd.getQueueName().equals("junit_task_3")) { + count++; + } + } + assertTrue(count == 2); + + } + + + @Test + public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { + + clearWorkflows(); + createWorkflowDefForDomain(); + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); + + String correlationId = "unit_test_sw"; + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + Map taskToDomain = new HashMap(); + taskToDomain.put("*", "domain11,, domain12"); + + // Poll before so that a polling for this task is "active" + Task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); + assertNull(task); + task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain12"); + assertNull(task); + + String wfid = startOrLoadWorkflowExecution("simpleWorkflowWithTasksInOneDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); + System.out.println("testSimpleWorkflow.wfid=" + wfid); + assertNotNull(wfid); + Workflow wf = workflowExecutor.getWorkflow(wfid, false); + assertNotNull(wf); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + + + // Check Size + Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain11:junit_task_3", "junit_task_3")); + assertEquals(sizes.get("domain11:junit_task_3").intValue(), 1); + assertEquals(sizes.get("junit_task_3").intValue(), 0); + + // Polling for the first task should return the same task as before + task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); + assertNull(task); + task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); + assertNotNull(task); + assertEquals("junit_task_3", task.getTaskType()); + assertEquals("domain11", task.getDomain()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(wfid, task.getWorkflowInstanceId()); + + String task1Op = "task1.Done"; + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + task = tasks.get(0); + + Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); + assertEquals(wfid, task.getWorkflowInstanceId()); + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, false); + assertNotNull(es); + assertNotNull(es.getOutput()); + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + Workflow essw = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertNotNull(essw.getTaskToDomain()); + assertEquals(essw.getTaskToDomain().size(), 1); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain11"); + assertNull(task); + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain12"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertEquals("domain12", task.getDomain()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + tasks = es.getTasks(); + assertNotNull(tasks); + assertEquals(2, tasks.size()); + + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + } + + @After + public void clearWorkflows() throws Exception { + List workflows = metadataService.getWorkflowDefs().stream() + .map(WorkflowDef::getName) + .collect(Collectors.toList()); + for (String wfName : workflows) { + List running = workflowExecutionService.getRunningWorkflows(wfName); + for (String wfid : running) { + workflowExecutor.terminateWorkflow(wfid, "cleanup"); + } + } + queueDAO.queuesDetail().keySet().forEach(queueName -> { + queueDAO.flush(queueName); + }); + } + + @Test + public void testLongRunning() throws Exception { + + clearWorkflows(); + + metadataService.getWorkflowDef(LONG_RUNNING, 1).get(); + + String correlationId = "unit_test_1"; + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input); + System.out.println("testLongRunning.wfid=" + wfid); + assertNotNull(wfid); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + // Check the queue + assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); + /// + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + + String task1Op = "task1.In.Progress"; + task.getOutputData().put("op", task1Op); + task.setStatus(Status.IN_PROGRESS); + task.setCallbackAfterSeconds(5); + workflowExecutionService.updateTask(task); + String taskId = task.getTaskId(); + + // Check the queue + assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); + /// + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + // Polling for next task should not return anything + Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNull(task2); + + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNull(task); + + Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); + // Polling for the first task should return the same task as before + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(task.getTaskId(), taskId); + + task1Op = "task1.Done"; + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + assertEquals(wfid, task.getWorkflowInstanceId()); + task = tasks.get(0); + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + tasks = es.getTasks(); + assertNotNull(tasks); + assertEquals(2, tasks.size()); + + + } + + @Test + public void testResetWorkflowInProgressTasks() throws Exception { + + clearWorkflows(); + + metadataService.getWorkflowDef(LONG_RUNNING, 1).get(); + + String correlationId = "unit_test_1"; + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input); + System.out.println("testLongRunning.wfid=" + wfid); + assertNotNull(wfid); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + // Check the queue + assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); + /// + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + + String task1Op = "task1.In.Progress"; + task.getOutputData().put("op", task1Op); + task.setStatus(Status.IN_PROGRESS); + task.setCallbackAfterSeconds(3600); + workflowExecutionService.updateTask(task); + String taskId = task.getTaskId(); + + // Check the queue + assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); + /// + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + // Polling for next task should not return anything + Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNull(task2); + + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNull(task); + + //Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); + // Reset + workflowExecutor.resetCallbacksForInProgressTasks(wfid); + + + // Now Polling for the first task should return the same task as before + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(task.getTaskId(), taskId); + assertEquals(task.getCallbackAfterSeconds(), 0); + + task1Op = "task1.Done"; + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + assertEquals(wfid, task.getWorkflowInstanceId()); + task = tasks.get(0); + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + tasks = es.getTasks(); + assertNotNull(tasks); + assertEquals(2, tasks.size()); + + + } + + + @Test + public void testConcurrentWorkflowExecutions() throws Exception { + + int count = 3; + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_concurrrent"; + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String[] wfids = new String[count]; + + for (int i = 0; i < count; i++) { + String wfid = startOrLoadWorkflowExecution("concurrentWorkflowExecutions", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); + System.out.println("testConcurrentWorkflowExecutions.wfid=" + wfid); + assertNotNull(wfid); + + List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); + assertNotNull(ids); + assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 + boolean foundId = false; + for (String id : ids) { + if (id.equals(wfid)) { + foundId = true; + } + } + assertTrue(foundId); + wfids[i] = wfid; + } + + + String task1Op = ""; + for (int i = 0; i < count; i++) { + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + } + + for (int i = 0; i < count; i++) { + Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + } + + List wfs = workflowExecutionService.getWorkflowInstances(LINEAR_WORKFLOW_T1_T2, correlationId, false, false); + wfs.forEach(wf -> { + assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); + }); + + + } + + @Test + public void testCaseStatements() throws Exception { + createConditionalWF(); + + String correlationId = "testCaseStatements: " + System.currentTimeMillis(); + Map input = new HashMap(); + String wfid; + String[] sequence; + + + //default case + input.put("param1", "xxx"); + input.put("param2", "two"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF, 1, correlationId, input); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + Task task = workflowExecutionService.poll("junit_task_2", "junit"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + assertEquals(3, es.getTasks().size()); + + /// + + + //nested - one + input.put("param1", "nested"); + input.put("param2", "one"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF + 2, COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + sequence = new String[]{"junit_task_1", "junit_task_3"}; + + validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_1", "junit_task_3", SystemTaskType.DECISION.name()}, 5); + // + + //nested - two + input.put("param1", "nested"); + input.put("param2", "two"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF + 3, COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + sequence = new String[]{"junit_task_2"}; + validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_2", SystemTaskType.DECISION.name()}, 4); + // + + //three + input.put("param1", "three"); + input.put("param2", "two"); + input.put("finalCase", "notify"); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF + 4, COND_TASK_WF, 1, correlationId, input, null, null); + System.out.println("testCaseStatements.wfid=" + wfid); + assertNotNull(wfid); + sequence = new String[]{"junit_task_3", "junit_task_4"}; + validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), "junit_task_3", SystemTaskType.DECISION.name(), "junit_task_4"}, 3); + // + + } + + private void validate(String wfid, String[] sequence, String[] executedTasks, int expectedTotalTasks) throws Exception { + for (int i = 0; i < sequence.length; i++) { + String t = sequence[i]; + Task task = getTask(t); + if (task == null) { + System.out.println("Missing task for " + t + ", below are the workflow tasks completed..."); + Workflow workflow = workflowExecutionService.getExecutionStatus(wfid, true); + for (Task x : workflow.getTasks()) { + System.out.println(x.getTaskType() + "/" + x.getReferenceTaskName()); + } + } + assertNotNull("No task for " + t, task); + assertEquals(wfid, task.getWorkflowInstanceId()); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + Workflow workflow = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(workflow); + assertTrue(!workflow.getTasks().isEmpty()); + if (i < sequence.length - 1) { + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + } else { + workflow = workflowExecutionService.getExecutionStatus(wfid, true); + List workflowTasks = workflow.getTasks(); + assertEquals(workflowTasks.toString(), executedTasks.length, workflowTasks.size()); + for (int k = 0; k < executedTasks.length; k++) { + assertEquals("Tasks: " + workflowTasks.toString() + "\n", executedTasks[k], workflowTasks.get(k).getTaskType()); + } + + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + } + } + } + + + private Task getTask(String taskType) throws Exception { + Task task; + int count = 2; + do { + task = workflowExecutionService.poll(taskType, "junit"); + if (task == null) { + count--; + } + if (count < 0) { + break; + } + + } while (task == null); + if (task != null) { + workflowExecutionService.ackTaskReceived(task.getTaskId()); + } + return task; + } + + @Test + public void testRetries() throws Exception { + + String taskName = "junit_task_2"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(2); + taskDef.setRetryDelaySeconds(1); + metadataService.updateTaskDef(taskDef); + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1"; + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + System.out.println("testRetries.wfid=" + wfid); + assertNotNull(wfid); + + List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); + assertNotNull(ids); + assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 + boolean foundId = false; + for (String id : ids) { + if (id.equals(wfid)) { + foundId = true; + } + } + assertTrue(foundId); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + String task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + //fail the task twice and then succeed + verify(inputParam1, wfid, task1Op, true); + Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); + verify(inputParam1, wfid, task1Op, false); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + assertEquals(3, es.getTasks().size()); //task 1, and 2 of the task 2 + + assertEquals("junit_task_1", es.getTasks().get(0).getTaskType()); + assertEquals("junit_task_2", es.getTasks().get(1).getTaskType()); + assertEquals("junit_task_2", es.getTasks().get(2).getTaskType()); + assertEquals(COMPLETED, es.getTasks().get(0).getStatus()); + assertEquals(FAILED, es.getTasks().get(1).getStatus()); + assertEquals(COMPLETED, es.getTasks().get(2).getStatus()); + assertEquals(es.getTasks().get(1).getTaskId(), es.getTasks().get(2).getRetriedTaskId()); + + + } + + @Test + public void testSuccess() throws Exception { + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); + assertNotNull(ids); + assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 + boolean foundId = false; + for (String id : ids) { + if (id.equals(wfid)) { + foundId = true; + } + } + assertTrue(foundId); + + /* + * @correlationId + List byCorrelationId = ess.getWorkflowInstances(LINEAR_WORKFLOW_T1_T2, correlationId, false, false); + assertNotNull(byCorrelationId); + assertTrue(!byCorrelationId.isEmpty()); + assertEquals(1, byCorrelationId.size()); + */ + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // The first task would be marked as scheduled + assertEquals(1, es.getTasks().size()); + assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + + // decideNow should be idempotent if re-run on the same state! + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(1, es.getTasks().size()); + Task t = es.getTasks().get(0); + assertEquals(Status.SCHEDULED, t.getStatus()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + assertNotNull(task); + assertEquals(t.getTaskId(), task.getTaskId()); + es = workflowExecutionService.getExecutionStatus(wfid, true); + t = es.getTasks().get(0); + assertEquals(Status.IN_PROGRESS, t.getStatus()); + String taskId = t.getTaskId(); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + String task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + es = workflowExecutionService.getExecutionStatus(wfid, true); + es.getTasks().forEach(wfTask -> { + if (wfTask.getTaskId().equals(taskId)) { + assertEquals(COMPLETED, wfTask.getStatus()); + } else { + assertEquals(Status.SCHEDULED, wfTask.getStatus()); + } + }); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertNotNull(task); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + // Check the tasks, at this time there should be 2 task + assertEquals(es.getTasks().size(), 2); + es.getTasks().forEach(wfTask -> { + assertEquals(wfTask.getStatus(), COMPLETED); + }); + + System.out.println("Total tasks=" + es.getTasks().size()); + assertTrue(es.getTasks().size() < 10); + + + } + + @Test + public void testDeciderUpdate() throws Exception { + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + Workflow workflow = workflowExecutor.getWorkflow(wfid, false); + long updated1 = workflow.getUpdateTime(); + Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); + workflowExecutor.decide(wfid); + workflow = workflowExecutor.getWorkflow(wfid, false); + long updated2 = workflow.getUpdateTime(); + assertEquals(updated1, updated2); + + Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); + workflowExecutor.terminateWorkflow(wfid, "done"); + workflow = workflowExecutor.getWorkflow(wfid, false); + updated2 = workflow.getUpdateTime(); + assertTrue("updated1[" + updated1 + "] >? updated2[" + updated2 + "]", updated2 > updated1); + + } + + @Test + @Ignore + //Ignore for now, will improve this in the future + public void testFailurePoints() throws Exception { + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // The first task would be marked as scheduled + assertEquals(1, es.getTasks().size()); + assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String taskId = task.getTaskId(); + + String task1Op = "task1.output"; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + try { + workflowExecutionService.updateTask(task); + } catch (Exception e) { + workflowExecutionService.updateTask(task); + } + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + es = workflowExecutionService.getExecutionStatus(wfid, true); + es.getTasks().forEach(wfTask -> { + if (wfTask.getTaskId().equals(taskId)) { + assertEquals(COMPLETED, wfTask.getStatus()); + } else { + assertEquals(Status.SCHEDULED, wfTask.getStatus()); + } + }); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertNotNull(task); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + // Check the tasks, at this time there should be 2 task + assertEquals(es.getTasks().size(), 2); + es.getTasks().forEach(wfTask -> { + assertEquals(wfTask.getStatus(), COMPLETED); + }); + + System.out.println("Total tasks=" + es.getTasks().size()); + assertTrue(es.getTasks().size() < 10); + + + } + + @Test + public void testDeciderMix() throws Exception { + + ExecutorService executors = Executors.newFixedThreadPool(3); + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); + assertNotNull(ids); + assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 + boolean foundId = false; + for (String id : ids) { + if (id.equals(wfid)) { + foundId = true; + } + } + assertTrue(foundId); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // The first task would be marked as scheduled + assertEquals(1, es.getTasks().size()); + assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + + List> futures = new LinkedList<>(); + for (int i = 0; i < 10; i++) { + futures.add(executors.submit(() -> { + workflowExecutor.decide(wfid); + return null; + })); + } + for (Future future : futures) { + future.get(); + } + futures.clear(); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // The first task would be marked as scheduled + assertEquals(1, es.getTasks().size()); + assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + + + // decideNow should be idempotent if re-run on the same state! + workflowExecutor.decide(wfid); + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(1, es.getTasks().size()); + Task t = es.getTasks().get(0); + assertEquals(Status.SCHEDULED, t.getStatus()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + assertNotNull(task); + assertEquals(t.getTaskId(), task.getTaskId()); + es = workflowExecutionService.getExecutionStatus(wfid, true); + t = es.getTasks().get(0); + assertEquals(Status.IN_PROGRESS, t.getStatus()); + String taskId = t.getTaskId(); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + String task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + es = workflowExecutionService.getExecutionStatus(wfid, true); + es.getTasks().forEach(wfTask -> { + if (wfTask.getTaskId().equals(taskId)) { + assertEquals(COMPLETED, wfTask.getStatus()); + } else { + assertEquals(Status.SCHEDULED, wfTask.getStatus()); + } + }); + + //Run sweep 10 times! + for (int i = 0; i < 10; i++) { + futures.add(executors.submit(() -> { + long s = System.currentTimeMillis(); + workflowExecutor.decide(wfid); + System.out.println("Took " + (System.currentTimeMillis() - s) + " ms to run decider"); + return null; + })); + } + for (Future future : futures) { + future.get(); + } + futures.clear(); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(2, es.getTasks().size()); + + System.out.println("Workflow tasks=" + es.getTasks()); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertNotNull(task); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + // Check the tasks, at this time there should be 2 task + assertEquals(es.getTasks().size(), 2); + es.getTasks().forEach(wfTask -> { + assertEquals(wfTask.getStatus(), COMPLETED); + }); + + System.out.println("Total tasks=" + es.getTasks().size()); + assertTrue(es.getTasks().size() < 10); + } + + @Test + public void testFailures() throws Exception { + metadataService.getWorkflowDef(FORK_JOIN_WF, 1).get(); + + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + metadataService.updateTaskDef(taskDef); + + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + assertNotNull(found.getFailureWorkflow()); + assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + input.put("failureWfName", "FanInOutTest"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + Task task = getTask("junit_task_1"); + assertNotNull(task); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.FAILED, es.getStatus()); + + taskDef.setRetryCount(RETRY_COUNT); + metadataService.updateTaskDef(taskDef); + + } + + @Test + public void testRetryWithForkJoin() throws Exception { + String workflowId = this.runAFailedForkJoinWF(); + workflowExecutor.retry(workflowId); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getStatus(), WorkflowStatus.RUNNING); + + printTaskStatuses(workflow, "After retry called"); + + Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); + assertNotNull(t2); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + + Task t3 = workflowExecutionService.poll("junit_task_0_RT_3", "test"); + assertNotNull(t3); + assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); + + t2.setStatus(COMPLETED); + t3.setStatus(COMPLETED); + + ExecutorService es = Executors.newFixedThreadPool(2); + Future future1 = es.submit(() -> { + try { + workflowExecutionService.updateTask(t2); + } catch (Exception e) { + throw new RuntimeException(e); + } + + }); + final Task _t3 = t3; + Future future2 = es.submit(() -> { + try { + workflowExecutionService.updateTask(_t3); + } catch (Exception e) { + throw new RuntimeException(e); + } + + }); + future1.get(); + future2.get(); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + + printTaskStatuses(workflow, "T2, T3 complete"); + workflowExecutor.decide(workflowId); + + Task t4 = workflowExecutionService.poll("junit_task_0_RT_4", "test"); + assertNotNull(t4); + t4.setStatus(COMPLETED); + workflowExecutionService.updateTask(t4); + + printTaskStatuses(workflowId, "After complete"); + } + + @Test + public void testRetry() throws Exception { + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + int retryCount = taskDef.getRetryCount(); + taskDef.setRetryCount(1); + int retryDelay = taskDef.getRetryDelaySeconds(); + taskDef.setRetryDelaySeconds(0); + metadataService.updateTaskDef(taskDef); + + WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + assertNotNull(workflowDef.getFailureWorkflow()); + assertFalse(StringUtils.isBlank(workflowDef.getFailureWorkflow())); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap<>(); + input.put("param1", "p1 value"); + input.put("param2", "p2 value"); + String workflowId = startOrLoadWorkflowExecution("retry", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); + assertNotNull(workflowId); + printTaskStatuses(workflowId, "initial"); + + Task task = getTask("junit_task_1"); + assertNotNull(task); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + task = getTask("junit_task_1"); + assertNotNull(task); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); + + printTaskStatuses(workflowId, "before retry"); + + workflowExecutor.retry(workflowId); + + printTaskStatuses(workflowId, "after retry"); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + task = getTask("junit_task_1"); + assertNotNull(task); + assertEquals(workflowId, task.getWorkflowInstanceId()); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + task = getTask("junit_task_2"); + assertNotNull(task); + assertEquals(workflowId, task.getWorkflowInstanceId()); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + + assertEquals(3, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_1")).count()); + + taskDef.setRetryCount(retryCount); + taskDef.setRetryDelaySeconds(retryDelay); + metadataService.updateTaskDef(taskDef); + + printTaskStatuses(workflowId, "final"); + + } + + @Test + public void testNonRestartartableWorkflows() throws Exception { + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + metadataService.updateTaskDef(taskDef); + + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + found.setName(JUNIT_TEST_WF_NON_RESTARTABLE); + found.setRestartable(false); + metadataService.updateWorkflowDef(found); + + assertNotNull(found); + assertNotNull(found.getFailureWorkflow()); + assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input); + assertNotNull(wfid); + + Task task = getTask("junit_task_1"); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.FAILED, es.getStatus()); + + workflowExecutor.rewind(es.getWorkflowId()); + + workflowExecutor.decide(wfid); + + // Polling for the first task should return the same task as before + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(wfid, task.getWorkflowInstanceId()); + + workflowExecutor.decide(wfid); + + String task1Op = "task1.Done"; + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + assertNotNull(tasks); + assertEquals(1, tasks.size()); + task = tasks.get(0); + + Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); + assertEquals(wfid, task.getWorkflowInstanceId()); + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, false); + assertNotNull(es); + assertNotNull(es.getOutput()); + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull("Found=" + task.getInputData(), task2Input); + assertEquals(task1Op, task2Input); + + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + task.setReasonForIncompletion("unit test failure"); + workflowExecutionService.updateTask(task); + + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + tasks = es.getTasks(); + assertNotNull(tasks); + assertEquals(2, tasks.size()); + + assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); + assertEquals("task1.Done", es.getOutput().get("o3")); + + + expectedException.expect(ApplicationException.class); + expectedException.expectMessage(String.format("is an instance of WorkflowDef: %s and version: %d and is non restartable", JUNIT_TEST_WF_NON_RESTARTABLE, 1)); + workflowExecutor.rewind(es.getWorkflowId()); + } + + + @Test + public void testRestart() throws Exception { + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + metadataService.updateTaskDef(taskDef); + + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + assertNotNull(found.getFailureWorkflow()); + assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + Task task = getTask("junit_task_1"); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.FAILED, es.getStatus()); + + workflowExecutor.rewind(es.getWorkflowId()); + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + task = getTask("junit_task_1"); + assertNotNull(task); + assertEquals(wfid, task.getWorkflowInstanceId()); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + task = getTask("junit_task_2"); + assertNotNull(task); + assertEquals(wfid, task.getWorkflowInstanceId()); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + + + } + + + @Test + public void testTimeout() throws Exception { + + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(1); + taskDef.setTimeoutSeconds(1); + taskDef.setRetryDelaySeconds(0); + taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); + metadataService.updateTaskDef(taskDef); + + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + assertNotNull(found.getFailureWorkflow()); + assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + input.put("failureWfName", "FanInOutTest"); + String wfid = startOrLoadWorkflowExecution("timeout", LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); + assertNotNull(wfid); + + //Ensure that we have a workflow queued up for evaluation here... + long size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); + assertEquals(1, size); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals("fond: " + es.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 1, es.getTasks().size()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals(wfid, task.getWorkflowInstanceId()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + + //Ensure that we have a workflow queued up for evaluation here... + size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); + assertEquals(1, size); + + + Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS); + workflowSweeper.sweep(Arrays.asList(wfid), workflowExecutor); + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals("fond: " + es.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 2, es.getTasks().size()); + + Task task1 = es.getTasks().get(0); + assertEquals(Status.TIMED_OUT, task1.getStatus()); + Task task2 = es.getTasks().get(1); + assertEquals(Status.SCHEDULED, task2.getStatus()); + + task = workflowExecutionService.poll(task2.getTaskDefName(), "task1.junit.worker"); + assertNotNull(task); + assertEquals(wfid, task.getWorkflowInstanceId()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS); + workflowExecutor.decide(wfid); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(2, es.getTasks().size()); + + assertEquals(Status.TIMED_OUT, es.getTasks().get(0).getStatus()); + assertEquals(Status.TIMED_OUT, es.getTasks().get(1).getStatus()); + assertEquals(WorkflowStatus.TIMED_OUT, es.getStatus()); + + assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE)); + + taskDef.setTimeoutSeconds(0); + taskDef.setRetryCount(RETRY_COUNT); + metadataService.updateTaskDef(taskDef); + + } + + @Test + public void testReruns() throws Exception { + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + assertNotNull(wfid); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // Check the tasks, at this time there should be 1 task + assertEquals(es.getTasks().size(), 1); + Task t = es.getTasks().get(0); + assertEquals(Status.SCHEDULED, t.getStatus()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(t.getTaskId(), task.getTaskId()); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + String task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + es = workflowExecutionService.getExecutionStatus(wfid, true); + es.getTasks().forEach(wfTask -> { + if (wfTask.getTaskId().equals(t.getTaskId())) { + assertEquals(wfTask.getStatus(), COMPLETED); + } else { + assertEquals(wfTask.getStatus(), Status.SCHEDULED); + } + }); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + + // Now rerun lets rerun the workflow from the second task + RerunWorkflowRequest request = new RerunWorkflowRequest(); + request.setReRunFromWorkflowId(wfid); + request.setReRunFromTaskId(es.getTasks().get(1).getTaskId()); + + String reRunwfid = workflowExecutor.rerun(request); + + Workflow esRR = workflowExecutionService.getExecutionStatus(reRunwfid, true); + assertNotNull(esRR); + assertEquals(esRR.getReasonForIncompletion(), WorkflowStatus.RUNNING, esRR.getStatus()); + // Check the tasks, at this time there should be 2 tasks + // first one is skipped and the second one is scheduled + assertEquals(esRR.getTasks().toString(), 2, esRR.getTasks().size()); + assertEquals(COMPLETED, esRR.getTasks().get(0).getStatus()); + Task tRR = esRR.getTasks().get(1); + assertEquals(esRR.getTasks().toString(), Status.SCHEDULED, tRR.getStatus()); + assertEquals(tRR.getTaskType(), "junit_task_2"); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(reRunwfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + + ////////////////////// + // Now rerun the entire workflow + RerunWorkflowRequest request1 = new RerunWorkflowRequest(); + request1.setReRunFromWorkflowId(wfid); + + String reRunwfid1 = workflowExecutor.rerun(request1); + + es = workflowExecutionService.getExecutionStatus(reRunwfid1, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // Check the tasks, at this time there should be 1 task + assertEquals(es.getTasks().size(), 1); + assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + + + } + + + @Test + public void testTaskSkipping() throws Exception { + + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(0); + metadataService.updateTaskDef(taskDef); + + + metadataService.getWorkflowDef(TEST_WORKFLOW_NAME_3, 1).get(); + + String correlationId = "unit_test_1" + UUID.randomUUID().toString(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(TEST_WORKFLOW_NAME_3, 1, correlationId, input); + assertNotNull(wfid); + + // Now Skip the second task + workflowExecutor.skipTaskFromWorkflow(wfid, "t2", null); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + // Check the tasks, at this time there should be 3 task + assertEquals(2, es.getTasks().size()); + assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(Status.SKIPPED, es.getTasks().get(1).getStatus()); + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + assertEquals("t1", task.getReferenceTaskName()); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + String task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // If we get the full workflow here then, last task should be completed and the next task should be scheduled + es = workflowExecutionService.getExecutionStatus(wfid, true); + es.getTasks().forEach(wfTask -> { + if (wfTask.getReferenceTaskName().equals("t1")) { + assertEquals(COMPLETED, wfTask.getStatus()); + } else if (wfTask.getReferenceTaskName().equals("t2")) { + assertEquals(Status.SKIPPED, wfTask.getStatus()); + } else { + assertEquals(Status.SCHEDULED, wfTask.getStatus()); + } + }); + + task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); + assertNotNull(task); + assertEquals(Status.IN_PROGRESS, task.getStatus()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + + + } + + @Test + public void testPauseResume() throws Exception { + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + + String correlationId = "unit_test_1" + System.nanoTime(); + Map input = new HashMap(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + + assertNotNull(wfid); + + List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); + assertNotNull(ids); + assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 + boolean foundId = false; + for (String id : ids) { + if (id.equals(wfid)) { + foundId = true; + } + } + assertTrue(foundId); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + Task t = es.getTasks().get(0); + assertEquals(Status.SCHEDULED, t.getStatus()); + + // PAUSE + workflowExecutor.pauseWorkflow(wfid); + + // The workflow is paused but the scheduled task should be pollable + + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(t.getTaskId(), task.getTaskId()); + + String param1 = (String) task.getInputData().get("p1"); + String param2 = (String) task.getInputData().get("p2"); + + assertNotNull(param1); + assertNotNull(param2); + assertEquals("p1 value", param1); + assertEquals("p2 value", param2); + + String task1Op = "task1.output->" + param1 + "." + param2; + task.getOutputData().put("op", task1Op); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // This decide should not schedule the next task + //ds.decideNow(wfid, task); + + // If we get the full workflow here then, last task should be completed and the rest (including PAUSE task) should be scheduled + es = workflowExecutionService.getExecutionStatus(wfid, true); + es.getTasks().forEach(wfTask -> { + if (wfTask.getTaskId().equals(t.getTaskId())) { + assertEquals(wfTask.getStatus(), COMPLETED); + } + }); + + // This should return null as workflow is paused + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNull("Found: " + task, task); + + // Even if decide is run again the next task will not be scheduled as the workflow is still paused-- + workflowExecutor.decide(wfid); + + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertTrue(task == null); + + // RESUME + workflowExecutor.resumeWorkflow(wfid); + + // Now polling should get the second task + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + + Task byRefName = workflowExecutionService.getPendingTaskForWorkflow("t2", wfid); + assertNotNull(byRefName); + assertEquals(task.getTaskId(), byRefName.getTaskId()); + + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + + } + + @Test + public void testSubWorkflow() throws Exception { + + createSubWorkflow(); + metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); + Map input = new HashMap<>(); + input.put("param1", "param 1 value"); + input.put("param3", "param 2 value"); + input.put("wfName", LINEAR_WORKFLOW_T1_T2); + String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input); + assertNotNull(wfId); + + Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + + Task task = workflowExecutionService.poll("junit_task_5", "test"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + assertNotNull(es.getTasks()); + + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); + assertNotNull(task); + assertNotNull(task.getOutputData()); + assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getOutputData().get("subWorkflowId")); + String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); + + es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(es); + assertNotNull(es.getTasks()); + assertEquals(wfId, es.getParentWorkflowId()); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + task = workflowExecutionService.poll("junit_task_1", "test"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + task = workflowExecutionService.poll("junit_task_2", "test"); + assertEquals(subWorkflowId, task.getWorkflowInstanceId()); + String uuid = UUID.randomUUID().toString(); + task.getOutputData().put("uuid", uuid); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + assertNotNull(es.getOutput()); + assertTrue(es.getOutput().containsKey("o1")); + assertTrue(es.getOutput().containsKey("o2")); + assertEquals("sub workflow input param1", es.getOutput().get("o1")); + assertEquals(uuid, es.getOutput().get("o2")); + + task = workflowExecutionService.poll("junit_task_6", "test"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + } + + @Test + public void testSubWorkflowFailure() throws Exception { + + TaskDef taskDef = metadataService.getTaskDef("junit_task_1"); + assertNotNull(taskDef); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(2); + metadataService.updateTaskDef(taskDef); + + + createSubWorkflow(); + metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); + + Map input = new HashMap<>(); + input.put("param1", "param 1 value"); + input.put("param3", "param 2 value"); + input.put("wfName", LINEAR_WORKFLOW_T1_T2); + String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input); + assertNotNull(wfId); + + Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + + Task task = workflowExecutionService.poll("junit_task_5", "test"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + assertNotNull(es.getTasks()); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); + assertNotNull(task); + assertNotNull(task.getOutputData()); + assertNotNull(task.getOutputData().get("subWorkflowId")); + String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); + + es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(es); + assertNotNull(es.getTasks()); + + assertEquals(wfId, es.getParentWorkflowId()); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + task = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(es); + assertEquals(WorkflowStatus.FAILED, es.getStatus()); + workflowExecutor.executeSystemTask(subworkflow, es.getParentWorkflowTaskId(), 1); + es = workflowExecutionService.getExecutionStatus(wfId, true); + assertEquals(WorkflowStatus.FAILED, es.getStatus()); + + taskDef.setTimeoutSeconds(0); + taskDef.setRetryCount(RETRY_COUNT); + metadataService.updateTaskDef(taskDef); + + } + + @Test + public void testSubWorkflowFailureInverse() throws Exception { + + TaskDef taskDef = metadataService.getTaskDef("junit_task_1"); + assertNotNull(taskDef); + taskDef.setRetryCount(0); + taskDef.setTimeoutSeconds(2); + metadataService.updateTaskDef(taskDef); + + + createSubWorkflow(); + + WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); + assertNotNull(found); + Map input = new HashMap<>(); + input.put("param1", "param 1 value"); + input.put("param3", "param 2 value"); + input.put("wfName", LINEAR_WORKFLOW_T1_T2); + String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input); + assertNotNull(wfId); + + Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + + Task task = workflowExecutionService.poll("junit_task_5", "test"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + + es = workflowExecutionService.getExecutionStatus(wfId, true); + assertNotNull(es); + assertNotNull(es.getTasks()); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); + assertNotNull(task); + assertNotNull(task.getOutputData()); + assertNotNull(task.getOutputData().get("subWorkflowId")); + String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); + + es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(es); + assertNotNull(es.getTasks()); + assertEquals(wfId, es.getParentWorkflowId()); + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + + workflowExecutor.terminateWorkflow(wfId, "fail"); + es = workflowExecutionService.getExecutionStatus(wfId, true); + assertEquals(WorkflowStatus.TERMINATED, es.getStatus()); + + es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertEquals(WorkflowStatus.TERMINATED, es.getStatus()); + + } + + @Test + public void testSubWorkflowRetry() throws Exception { + String taskName = "junit_task_1"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + int retryCount = metadataService.getTaskDef(taskName).getRetryCount(); + taskDef.setRetryCount(0); + metadataService.updateTaskDef(taskDef); + + // create a workflow with sub-workflow + createSubWorkflow(); + Optional found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); + assertTrue(found.isPresent()); + + // start the workflow + Map workflowInputParams = new HashMap<>(); + workflowInputParams.put("param1", "param 1"); + workflowInputParams.put("param3", "param 2"); + workflowInputParams.put("wfName", LINEAR_WORKFLOW_T1_T2); + String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", workflowInputParams); + assertNotNull(workflowId); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + + // poll and complete first task + Task task = workflowExecutionService.poll("junit_task_5", "test"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertNotNull(workflow.getTasks()); + assertEquals(2, workflow.getTasks().size()); + + task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name())).findAny().orElse(null); + assertNotNull(task); + assertNotNull(task.getOutputData()); + assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getOutputData().get("subWorkflowId")); + String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); + + workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(workflow); + assertNotNull(workflow.getTasks()); + assertEquals(workflowId, workflow.getParentWorkflowId()); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + // poll and fail the first task in sub-workflow + task = workflowExecutionService.poll("junit_task_1", "test"); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); + + // Retry the failed sub workflow + workflowExecutor.retry(subWorkflowId); + task = workflowExecutionService.poll("junit_task_1", "test"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + task = workflowExecutionService.poll("junit_task_2", "test"); + assertEquals(subWorkflowId, task.getWorkflowInstanceId()); + String uuid = UUID.randomUUID().toString(); + task.getOutputData().put("uuid", uuid); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertNotNull(workflow.getOutput()); + assertTrue(workflow.getOutput().containsKey("o1")); + assertTrue(workflow.getOutput().containsKey("o2")); + assertEquals("sub workflow input param1", workflow.getOutput().get("o1")); + assertEquals(uuid, workflow.getOutput().get("o2")); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + task = workflowExecutionService.poll("junit_task_6", "test"); + assertNotNull(task); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + + // reset retry count + taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(retryCount); + metadataService.updateTaskDef(taskDef); + } + + + @Test + public void testWait() throws Exception { + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName("test_wait"); + workflowDef.setSchemaVersion(2); + + WorkflowTask waitWorkflowTask = new WorkflowTask(); + waitWorkflowTask.setWorkflowTaskType(TaskType.WAIT); + waitWorkflowTask.setName("wait"); + waitWorkflowTask.setTaskReferenceName("wait0"); + + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName("junit_task_1"); + workflowTask.setTaskReferenceName("t1"); + + workflowDef.getTasks().add(waitWorkflowTask); + workflowDef.getTasks().add(workflowTask); + metadataService.registerWorkflowDef(workflowDef); + + String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); + Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); + assertNotNull(workflow); + assertEquals(1, workflow.getTasks().size()); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + Task waitTask = workflow.getTasks().get(0); + assertEquals(TaskType.WAIT.name(), waitTask.getTaskType()); + waitTask.setStatus(COMPLETED); + workflowExecutor.updateTask(new TaskResult(waitTask)); + + Task task = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task); + task.setStatus(Status.COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); + } + + @Test + public void testEventWorkflow() throws Exception { + + TaskDef taskDef = new TaskDef(); + taskDef.setName("eventX"); + taskDef.setTimeoutSeconds(1); + + metadataService.registerTaskDef(Collections.singletonList(taskDef)); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName("test_event"); + workflowDef.setSchemaVersion(2); + + WorkflowTask eventWorkflowTask = new WorkflowTask(); + eventWorkflowTask.setWorkflowTaskType(TaskType.EVENT); + eventWorkflowTask.setName("eventX"); + eventWorkflowTask.setTaskReferenceName("wait0"); + eventWorkflowTask.setSink("conductor"); + + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName("junit_task_1"); + workflowTask.setTaskReferenceName("t1"); + + workflowDef.getTasks().add(eventWorkflowTask); + workflowDef.getTasks().add(workflowTask); + metadataService.registerWorkflowDef(workflowDef); + + String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); + Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); + Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); + assertNotNull(workflow); + + Task eventTask = workflow.getTasks().get(0); + assertEquals(TaskType.EVENT.name(), eventTask.getTaskType()); + assertEquals(COMPLETED, eventTask.getStatus()); + assertTrue(!eventTask.getOutputData().isEmpty()); + assertNotNull(eventTask.getOutputData().get("event_produced")); + + Task task = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task); + task.setStatus(Status.COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); + } + + @Test + public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { + WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); + assertNotNull(workflowDef); + + String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); + Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); + assertNotNull(workflow); + + Task task = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + String taskId = task.getTaskId(); + task.setStatus(Status.IN_PROGRESS); + task.setCallbackAfterSeconds(5L); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(1, workflow.getTasks().size()); + + // task should not be available + task = workflowExecutionService.poll("junit_task_1", "test"); + assertNull(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(1, workflow.getTasks().size()); + + Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); + + task = workflowExecutionService.poll("junit_task_1", "test"); + assertNotNull(task); + assertEquals(taskId, task.getTaskId()); + + task.setStatus(Status.COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(2, workflow.getTasks().size()); + + task = workflowExecutionService.poll("junit_task_2", "test"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + taskId = task.getTaskId(); + task.setStatus(Status.IN_PROGRESS); + task.setCallbackAfterSeconds(5L); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(2, workflow.getTasks().size()); + + // task should not be available + task = workflowExecutionService.poll("junit_task_1", "test"); + assertNull(task); + + Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); + + task = workflowExecutionService.poll("junit_task_2", "test"); + assertNotNull(task); + assertEquals(taskId, task.getTaskId()); + + task.setStatus(Status.COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(2, workflow.getTasks().size()); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + } + + //@Test + public void testRateLimiting() throws Exception { + + TaskDef td = new TaskDef(); + td.setName("eventX1"); + td.setTimeoutSeconds(1); + td.setConcurrentExecLimit(1); + + metadataService.registerTaskDef(Arrays.asList(td)); + + WorkflowDef def = new WorkflowDef(); + def.setName("test_rate_limit"); + def.setSchemaVersion(2); + + WorkflowTask event = new WorkflowTask(); + event.setType("USER_TASK"); + event.setName("eventX1"); + event.setTaskReferenceName("event0"); + event.setSink("conductor"); + + def.getTasks().add(event); + metadataService.registerWorkflowDef(def); + + Executors.newSingleThreadScheduledExecutor().scheduleWithFixedDelay(() -> { + queueDAO.processUnacks("USER_TASK"); + }, 2, 2, TimeUnit.SECONDS); + + String[] ids = new String[100]; + ExecutorService es = Executors.newFixedThreadPool(10); + for (int i = 0; i < 10; i++) { + final int index = i; + es.submit(() -> { + try { + String id = startOrLoadWorkflowExecution(def.getName(), def.getVersion(), "", new HashMap<>()); + ids[index] = id; + } catch (Exception e) { + e.printStackTrace(); + } + + }); + } + Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS); + for (int i = 0; i < 10; i++) { + String id = ids[i]; + Workflow workflow = workflowExecutor.getWorkflow(id, true); + assertNotNull(workflow); + assertEquals(1, workflow.getTasks().size()); + + Task eventTask = workflow.getTasks().get(0); + assertEquals(COMPLETED, eventTask.getStatus()); + assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); + assertTrue(!eventTask.getOutputData().isEmpty()); + assertNotNull(eventTask.getOutputData().get("event_produced")); + } + } + + private void createSubWorkflow() throws Exception { + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("junit_task_5"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("a1"); + + WorkflowTask wft2 = new WorkflowTask(); + wft2.setName("subWorkflowTask"); + wft2.setType(TaskType.SUB_WORKFLOW.name()); + SubWorkflowParams swp = new SubWorkflowParams(); + swp.setName(LINEAR_WORKFLOW_T1_T2); + wft2.setSubWorkflowParam(swp); + Map ip2 = new HashMap<>(); + ip2.put("test", "test value"); + ip2.put("param1", "sub workflow input param1"); + wft2.setInputParameters(ip2); + wft2.setTaskReferenceName("a2"); + + WorkflowTask wft3 = new WorkflowTask(); + wft3.setName("junit_task_6"); + Map ip3 = new HashMap<>(); + ip3.put("p1", "${workflow.input.param1}"); + ip3.put("p2", "${workflow.input.param2}"); + wft3.setInputParameters(ip3); + wft3.setTaskReferenceName("a3"); + + WorkflowDef main = new WorkflowDef(); + main.setSchemaVersion(2); + main.setInputParameters(Arrays.asList("param1", "param2")); + main.setName(WF_WITH_SUB_WF); + main.getTasks().addAll(Arrays.asList(wft1, wft2, wft3)); + + metadataService.updateWorkflowDef(Collections.singletonList(main)); + + } + + private void verify(String inputParam1, String wfid, String task1Op, boolean fail) throws Exception { + Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + + String task2Input = (String) task.getInputData().get("tp2"); + assertNotNull(task2Input); + assertEquals(task1Op, task2Input); + task2Input = (String) task.getInputData().get("tp1"); + assertNotNull(task2Input); + assertEquals(inputParam1, task2Input); + if (fail) { + task.setStatus(FAILED); + task.setReasonForIncompletion("failure...0"); + } else { + task.setStatus(COMPLETED); + } + + workflowExecutionService.updateTask(task); + + Workflow es = workflowExecutionService.getExecutionStatus(wfid, false); + assertNotNull(es); + if (fail) { + assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + } else { + assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); + } + } + + @Before + public void flushAllTaskQueues() { + queueDAO.queuesDetail().keySet().forEach(queueName -> { + queueDAO.flush(queueName); + }); + + if (taskDefs == null) { + return; + } + for (TaskDef td : taskDefs) { + queueDAO.flush(td.getName()); + } + } + + private void createWorkflowDefForDomain() { + WorkflowDef defSW = new WorkflowDef(); + defSW.setName(LINEAR_WORKFLOW_T1_T2_SW); + defSW.setDescription(defSW.getName()); + defSW.setVersion(1); + defSW.setInputParameters(Arrays.asList("param1", "param2")); + Map outputParameters = new HashMap<>(); + outputParameters.put("o1", "${workflow.input.param1}"); + outputParameters.put("o2", "${t2.output.uuid}"); + outputParameters.put("o3", "${t1.output.op}"); + defSW.setOutputParameters(outputParameters); + defSW.setFailureWorkflow("$workflow.input.failureWfName"); + defSW.setSchemaVersion(2); + LinkedList wftasks = new LinkedList<>(); + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("junit_task_3"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("t1"); + + WorkflowTask subWorkflow = new WorkflowTask(); + subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); + SubWorkflowParams sw = new SubWorkflowParams(); + sw.setName(LINEAR_WORKFLOW_T1_T2); + subWorkflow.setSubWorkflowParam(sw); + subWorkflow.setTaskReferenceName("sw1"); + + wftasks.add(wft1); + wftasks.add(subWorkflow); + defSW.setTasks(wftasks); + + try { + metadataService.updateWorkflowDef(defSW); + } catch (Exception e) { + } + } + + private void createWFWithResponseTimeout() throws Exception { + TaskDef task = new TaskDef(); + task.setName("task_rt"); + task.setTimeoutSeconds(120); + task.setRetryCount(RETRY_COUNT); + task.setRetryDelaySeconds(0); + task.setResponseTimeoutSeconds(10); + metadataService.registerTaskDef(Collections.singletonList(task)); + + WorkflowDef def = new WorkflowDef(); + def.setName("RTOWF"); + def.setDescription(def.getName()); + def.setVersion(1); + def.setInputParameters(Arrays.asList("param1", "param2")); + Map outputParameters = new HashMap<>(); + outputParameters.put("o1", "${workflow.input.param1}"); + outputParameters.put("o2", "${t2.output.uuid}"); + outputParameters.put("o3", "${t1.output.op}"); + def.setOutputParameters(outputParameters); + def.setFailureWorkflow("$workflow.input.failureWfName"); + def.setSchemaVersion(2); + LinkedList wftasks = new LinkedList<>(); + + WorkflowTask wft1 = new WorkflowTask(); + wft1.setName("task_rt"); + Map ip1 = new HashMap<>(); + ip1.put("p1", "${workflow.input.param1}"); + ip1.put("p2", "${workflow.input.param2}"); + wft1.setInputParameters(ip1); + wft1.setTaskReferenceName("task_rt_t1"); + + WorkflowTask wft2 = new WorkflowTask(); + wft2.setName("junit_task_2"); + Map ip2 = new HashMap<>(); + ip2.put("tp1", "${workflow.input.param1}"); + ip2.put("tp2", "${t1.output.op}"); + wft2.setInputParameters(ip2); + wft2.setTaskReferenceName("t2"); + + wftasks.add(wft1); + wftasks.add(wft2); + def.setTasks(wftasks); + + metadataService.updateWorkflowDef(def); + } + + private String runWorkflowWithSubworkflow() throws Exception { + clearWorkflows(); + createWorkflowDefForDomain(); + + metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); + + String correlationId = "unit_test_sw"; + Map input = new HashMap<>(); + String inputParam1 = "p1 value"; + input.put("param1", inputParam1); + input.put("param2", "p2 value"); + + String workflowId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null); + System.out.println("testSimpleWorkflow.wfid=" + workflowId); + assertNotNull(workflowId); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. + + // Poll for first task and execute it + Task task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + task.getOutputData().put("op", "junit_task_3.done"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + + // Get the sub workflow id + String subWorkflowId = null; + for (Task t : workflow.getTasks()) { + if (t.getTaskType().equalsIgnoreCase("SUB_WORKFLOW")) { + subWorkflowId = t.getOutputData().get("subWorkflowId").toString(); + } + } + assertNotNull(subWorkflowId); + + Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(subWorkflow); + assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(1, subWorkflow.getTasks().size()); + + // Now the Sub workflow is triggered + // Poll for first task of the sub workflow and execute it + task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + task.getOutputData().put("op", "junit_task_1.done"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(subWorkflow); + assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(2, subWorkflow.getTasks().size()); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + + // Poll for second task of the sub workflow and execute it + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + task.getOutputData().put("op", "junit_task_2.done"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // Now the sub workflow and the main workflow must have finished + subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); + assertNotNull(subWorkflow); + assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); + assertEquals(2, subWorkflow.getTasks().size()); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + + return workflowId; + } + + private String runAFailedForkJoinWF() throws Exception { + try { + this.createForkJoinWorkflowWithZeroRetry(); + } catch (Exception e) { + } + + Map input = new HashMap<>(); + String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF + "_2", 1, "fanouttest", input); + System.out.println("testForkJoin.wfid=" + workflowId); + Task t1 = workflowExecutionService.poll("junit_task_0_RT_1", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); + + Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); + assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); + assertNotNull(t1); + assertNotNull(t2); + + t1.setStatus(COMPLETED); + workflowExecutionService.updateTask(t1); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + printTaskStatuses(workflow, "Initial"); + + t2.setStatus(FAILED); + + ExecutorService executorService = Executors.newFixedThreadPool(2); + Future future1 = executorService.submit(() -> { + try { + workflowExecutionService.updateTask(t2); + } catch (Exception e) { + throw new RuntimeException(e); + } + + }); + future1.get(); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); + + return workflowId; + } + + private void printTaskStatuses(String wfid, String message) throws Exception { + Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); + assertNotNull(wf); + printTaskStatuses(wf, message); + } + + private String startOrLoadWorkflowExecution(String linearWorkflowT1T2, int i, String correlationId, Map input, Map taskToDomain) { + return startOrLoadWorkflowExecution(linearWorkflowT1T2, i, correlationId, input, null, taskToDomain); + } + + private String startOrLoadWorkflowExecution(String linearWorkflowT1T2, int i, String correlationId, Map input) { + return startOrLoadWorkflowExecution(linearWorkflowT1T2, i, correlationId, input, null); + } + + private String startOrLoadWorkflowExecution(String linearWorkflowT1T2Sw, int i, String correlationId, Map input, String event, Map taskToDomain) { + return startOrLoadWorkflowExecution(linearWorkflowT1T2Sw, linearWorkflowT1T2Sw, i, correlationId, input, event, taskToDomain); + } + + abstract String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain); + + private boolean printWFTaskDetails = false; + + private void printTaskStatuses(Workflow wf, String message) throws Exception { + if (printWFTaskDetails) { + System.out.println(message + " >>> Workflow status " + wf.getStatus().name()); + wf.getTasks().forEach(t -> { + System.out.println("Task " + String.format("%-15s", t.getTaskType()) + "\t" + String.format("%-15s", t.getReferenceTaskName()) + "\t" + String.format("%-15s", t.getWorkflowTask().getType()) + "\t" + t.getSeq() + "\t" + t.getStatus() + "\t" + t.getTaskId()); + }); + System.out.println(); + } + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java new file mode 100644 index 0000000000..7a17321a7f --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java @@ -0,0 +1,114 @@ +/** + * Copyright 2016 Netflix, Inc. + *

    + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + *

    + * http://www.apache.org/licenses/LICENSE-2.0 + *

    + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** + * + */ +package com.netflix.conductor.tests.integration; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.io.Resources; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.core.config.Configuration; +import com.netflix.conductor.core.execution.WorkflowExecutor; +import com.netflix.conductor.core.utils.IDGenerator; +import com.netflix.conductor.dao.ExecutionDAO; +import com.netflix.conductor.tests.utils.TestRunner; +import org.apache.commons.io.Charsets; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.runner.RunWith; + +import javax.inject.Inject; +import java.util.Map; + +import static org.junit.Assert.fail; + +@RunWith(TestRunner.class) +public class WorkflowLegacyMigrationTest extends AbstractWorkflowServiceTest { + + private static final String WORKFLOW_SCENARIOS_PATH_PREFIX = "/integration/scenarios/"; + private static final String WORKFLOW_SCENARIO_EXTENSION = ".json"; + + @Inject + private ExecutionDAO executionDAO; + + @Inject + private ObjectMapper objectMapper; + + @Inject + private Configuration configuration; + + @Before + public void init() throws Exception { + super.init(); + } + + private Workflow loadWorkflow(String resourcePath) throws Exception { + + String content = Resources.toString(WorkflowLegacyMigrationTest.class.getResource(resourcePath), Charsets.UTF_8); + String workflowId = IDGenerator.generate(); + content = content.replace("WORKFLOW_INSTANCE_ID", workflowId); + + Workflow workflow = objectMapper.readValue(content, Workflow.class); + workflow.setWorkflowId(workflowId); + + return workflow; + } + + @Override + String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { + Workflow workflow = null; + try { + workflow = loadWorkflow(getWorkflowResourcePath(snapshotResourceName)); + } catch (Exception e) { + fail("Error loading workflow scenario " + snapshotResourceName); + } + + final String workflowId = workflow.getWorkflowId(); + + workflow.setCorrelationId(correlationId); + workflow.setInput(input); + workflow.setEvent(event); + workflow.setTaskToDomain(taskToDomain); + workflow.setVersion(version); + + workflow.getTasks().stream().forEach(task -> { + task.setTaskId(IDGenerator.generate()); + task.setWorkflowInstanceId(workflowId); + task.setCorrelationId(correlationId); + }); + + executionDAO.createTasks(workflow.getTasks()); + executionDAO.createWorkflow(workflow); + workflow.getTasks().stream().forEach(task -> { + workflowExecutor.addTaskToQueue(task); + queueDAO.push(WorkflowExecutor.DECIDER_QUEUE, workflowId, configuration.getSweepFrequency()); + }); + + return workflow.getWorkflowId(); + } + + private String getWorkflowResourcePath(String workflowName) { + return WORKFLOW_SCENARIOS_PATH_PREFIX + workflowName + WORKFLOW_SCENARIO_EXTENSION; + } + + @Ignore + @Test + @Override + public void testForkJoinNestedWithSubWorkflow() { + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index 57847539b6..d660c291c7 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -84,4076 +84,12 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -/** - * @author Viren - */ @RunWith(TestRunner.class) -public class WorkflowServiceTest { - - - private static final Logger logger = LoggerFactory.getLogger(WorkflowServiceTest.class); - - private static final String COND_TASK_WF = "ConditionalTaskWF"; - - private static final String FORK_JOIN_NESTED_WF = "FanInOutNestedTest"; - - private static final String FORK_JOIN_WF = "FanInOutTest"; - - private static final String DYNAMIC_FORK_JOIN_WF = "DynamicFanInOutTest"; - - private static final String DYNAMIC_FORK_JOIN_WF_LEGACY = "DynamicFanInOutTestLegacy"; - - private static final int RETRY_COUNT = 1; - private static final String JUNIT_TEST_WF_NON_RESTARTABLE = "junit_test_wf_non_restartable"; - private static final String WF_WITH_SUB_WF = "WorkflowWithSubWorkflow"; - - @Rule - public final ExpectedException expectedException = ExpectedException.none(); - - @Inject - private ExecutionService workflowExecutionService; - - @Inject - private SubWorkflow subworkflow; - - @Inject - private MetadataService metadataService; - - @Inject - private WorkflowSweeper workflowSweeper; - - @Inject - private QueueDAO queueDAO; - - @Inject - private WorkflowExecutor workflowExecutor; - - @Inject - private MetadataMapperService metadataMapperService; - - private static boolean registered; - - private static List taskDefs; - - private static final String LINEAR_WORKFLOW_T1_T2 = "junit_test_wf"; - - private static final String LINEAR_WORKFLOW_T1_T2_SW = "junit_test_wf_sw"; - - private static final String LONG_RUNNING = "longRunningWf"; - - private static final String TEST_WORKFLOW_NAME_3 = "junit_test_wf3"; - - @Before - public void init() throws Exception { - System.setProperty("EC2_REGION", "us-east-1"); - System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); - if (registered) { - return; - } - - - WorkflowContext.set(new WorkflowContext("junit_app")); - for (int i = 0; i < 21; i++) { - - String name = "junit_task_" + i; - if (metadataService.getTaskDef(name) != null) { - continue; - } - - TaskDef task = new TaskDef(); - task.setName(name); - task.setTimeoutSeconds(120); - task.setRetryCount(RETRY_COUNT); - metadataService.registerTaskDef(Collections.singletonList(task)); - } - - for (int i = 0; i < 5; i++) { - - String name = "junit_task_0_RT_" + i; - if (metadataService.getTaskDef(name) != null) { - continue; - } - - TaskDef task = new TaskDef(); - task.setName(name); - task.setTimeoutSeconds(120); - task.setRetryCount(0); - metadataService.registerTaskDef(Collections.singletonList(task)); - } - - TaskDef task = new TaskDef(); - task.setName("short_time_out"); - task.setTimeoutSeconds(5); - task.setRetryCount(RETRY_COUNT); - metadataService.registerTaskDef(Collections.singletonList(task)); - - WorkflowDef def = new WorkflowDef(); - def.setName(LINEAR_WORKFLOW_T1_T2); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - def.setOutputParameters(outputParameters); - def.setFailureWorkflow("$workflow.input.failureWfName"); - def.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "${workflow.input.param1}"); - ip2.put("tp2", "${t1.output.op}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - wftasks.add(wft1); - wftasks.add(wft2); - def.setTasks(wftasks); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - Map ip3 = new HashMap<>(); - ip3.put("tp1", "${workflow.input.param1}"); - ip3.put("tp2", "${t1.output.op}"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("t3"); - - WorkflowDef def2 = new WorkflowDef(); - def2.setName(TEST_WORKFLOW_NAME_3); - def2.setDescription(def2.getName()); - def2.setVersion(1); - def2.setInputParameters(Arrays.asList("param1", "param2")); - LinkedList wftasks2 = new LinkedList<>(); - - wftasks2.add(wft1); - wftasks2.add(wft2); - wftasks2.add(wft3); - def2.setSchemaVersion(2); - def2.setTasks(wftasks2); - - try { - - WorkflowDef[] wdsf = new WorkflowDef[]{def, def2}; - for (WorkflowDef wd : wdsf) { - metadataService.updateWorkflowDef(wd); - } - createForkJoinWorkflow(); - def.setName(LONG_RUNNING); - metadataService.updateWorkflowDef(def); - } catch (Exception e) { - } - - taskDefs = metadataService.getTaskDefs(); - - registered = true; - } - - - @Test - public void testWorkflowWithNoTasks() throws Exception { - - WorkflowDef empty = new WorkflowDef(); - empty.setName("empty_workflow"); - empty.setSchemaVersion(2); - metadataService.registerWorkflowDef(empty); - - String id = workflowExecutor.startWorkflow(empty.getName(), 1, "testWorkflowWithNoTasks", new HashMap<>()); - assertNotNull(id); - Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(0, workflow.getTasks().size()); - } - - @Test - public void testTaskDefTemplate() throws Exception { - - System.setProperty("STACK2", "test_stack"); - TaskDef templatedTask = new TaskDef(); - templatedTask.setName("templated_task"); - Map httpRequest = new HashMap<>(); - httpRequest.put("method", "GET"); - httpRequest.put("vipStack", "${STACK2}"); - httpRequest.put("uri", "/get/something"); - Map body = new HashMap<>(); - body.put("inputPaths", Arrays.asList("${workflow.input.path1}", "${workflow.input.path2}")); - body.put("requestDetails", "${workflow.input.requestDetails}"); - body.put("outputPath", "${workflow.input.outputPath}"); - httpRequest.put("body", body); - templatedTask.getInputTemplate().put("http_request", httpRequest); - metadataService.registerTaskDef(Arrays.asList(templatedTask)); - - WorkflowDef templateWf = new WorkflowDef(); - templateWf.setName("template_workflow"); - WorkflowTask wft = new WorkflowTask(); - wft.setName(templatedTask.getName()); - wft.setWorkflowTaskType(TaskType.SIMPLE); - wft.setTaskReferenceName("t0"); - templateWf.getTasks().add(wft); - templateWf.setSchemaVersion(2); - metadataService.registerWorkflowDef(templateWf); - - Map requestDetails = new HashMap<>(); - requestDetails.put("key1", "value1"); - requestDetails.put("key2", 42); - - Map input = new HashMap<>(); - input.put("path1", "file://path1"); - input.put("path2", "file://path2"); - input.put("outputPath", "s3://bucket/outputPath"); - input.put("requestDetails", requestDetails); - - String id = workflowExecutor.startWorkflow(templateWf.getName(), 1, "testTaskDefTemplate", input); - assertNotNull(id); - Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); - assertNotNull(workflow); - assertTrue(workflow.getReasonForIncompletion(), !workflow.getStatus().isTerminal()); - assertEquals(1, workflow.getTasks().size()); - Task task = workflow.getTasks().get(0); - Map taskInput = task.getInputData(); - assertNotNull(taskInput); - assertTrue(taskInput.containsKey("http_request")); - assertTrue(taskInput.get("http_request") instanceof Map); - - ObjectMapper om = new ObjectMapper(); - - //Use the commented sysout to get the string value - //System.out.println(om.writeValueAsString(om.writeValueAsString(taskInput))); - String expected = "{\"http_request\":{\"method\":\"GET\",\"vipStack\":\"test_stack\",\"body\":{\"requestDetails\":{\"key1\":\"value1\",\"key2\":42},\"outputPath\":\"s3://bucket/outputPath\",\"inputPaths\":[\"file://path1\",\"file://path2\"]},\"uri\":\"/get/something\"}}"; - assertEquals(expected, om.writeValueAsString(taskInput)); - } - - - @Test - public void testWorkflowSchemaVersion() throws Exception { - WorkflowDef ver2 = new WorkflowDef(); - ver2.setSchemaVersion(2); - ver2.setName("Test_schema_version2"); - ver2.setVersion(1); - - WorkflowDef ver1 = new WorkflowDef(); - ver1.setName("Test_schema_version1"); - ver1.setVersion(1); - - metadataService.updateWorkflowDef(ver1); - metadataService.updateWorkflowDef(ver2); - - WorkflowDef found = metadataService.getWorkflowDef(ver2.getName(), 1).get(); - assertEquals(2, found.getSchemaVersion()); - - WorkflowDef found1 = metadataService.getWorkflowDef(ver1.getName(), 1).get(); - assertEquals(1, found1.getSchemaVersion()); - - } - - @Test - public void testForkJoin() throws Exception { - try { - createForkJoinWorkflow(); - } catch (Exception e) { - } - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - taskName = "junit_task_2"; - taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - taskName = "junit_task_3"; - taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - taskName = "junit_task_4"; - taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - Map input = new HashMap<>(); - String workflowId = workflowExecutor.startWorkflow(FORK_JOIN_WF, 1, "fanouttest", input); - System.out.println("testForkJoin.wfid=" + workflowId); - printTaskStatuses(workflowId, "initiated"); - - Task task1 = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task1); - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - - Task task2 = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(task2); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - - Task task3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNull(task3); - - task1.setStatus(COMPLETED); - workflowExecutionService.updateTask(task1); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); - printTaskStatuses(workflow, "T1 completed"); - - task3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNotNull(task3); - - task2.setStatus(COMPLETED); - task3.setStatus(COMPLETED); - - ExecutorService executorService = Executors.newFixedThreadPool(2); - Future future1 = executorService.submit(() -> { - try { - workflowExecutionService.updateTask(task2); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future1.get(); - - final Task _t3 = task3; - Future future2 = executorService.submit(() -> { - try { - workflowExecutionService.updateTask(_t3); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future2.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - printTaskStatuses(workflow, "T2 T3 completed"); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertTrue("Found " + workflow.getTasks().stream().map(t -> t.getReferenceTaskName() + "." + t.getStatus()).collect(Collectors.toList()), workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t4"))); - - Task t4 = workflowExecutionService.poll("junit_task_4", "test"); - assertNotNull(t4); - t4.setStatus(COMPLETED); - workflowExecutionService.updateTask(t4); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - printTaskStatuses(workflow, "All completed"); - } - - @Test - public void testForkJoinNested() throws Exception { - - createForkJoinNestedWorkflow(); - - Map input = new HashMap<>(); - input.put("case", "a"); //This should execute t16 and t19 - String wfid = workflowExecutor.startWorkflow(FORK_JOIN_NESTED_WF, 1, "fork_join_nested_test", input); - System.out.println("testForkJoinNested.wfid=" + wfid); - - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t13"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("sw1"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork1"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("fork2"))); - - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t1"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t2"))); - - - Task t1 = workflowExecutionService.poll("junit_task_11", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_12", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_13", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - - assertNotNull(t1); - assertNotNull(t2); - assertNotNull(t3); - - t1.setStatus(COMPLETED); - t2.setStatus(COMPLETED); - t3.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t1); - workflowExecutionService.updateTask(t2); - workflowExecutionService.updateTask(t3); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t16"))); - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t14"))); - - String[] tasks = new String[]{"junit_task_1", "junit_task_2", "junit_task_14", "junit_task_16"}; - for (String tt : tasks) { - Task polled = workflowExecutionService.poll(tt, "test"); - assertNotNull("poll resulted empty for task: " + tt, polled); - polled.setStatus(COMPLETED); - workflowExecutionService.updateTask(polled); - } - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - - assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet - assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t20"))); //Not there yet - - Task task19 = workflowExecutionService.poll("junit_task_19", "test"); - assertNotNull(task19); - task19.setStatus(COMPLETED); - workflowExecutionService.updateTask(task19); - - Task task20 = workflowExecutionService.poll("junit_task_20", "test"); - assertNotNull(task20); - task20.setStatus(COMPLETED); - workflowExecutionService.updateTask(task20); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); - - Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); - - pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); - assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); - Task task15 = workflowExecutionService.poll("junit_task_15", "test"); - assertNotNull(task15); - task15.setStatus(COMPLETED); - workflowExecutionService.updateTask(task15); - - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - - } - - @Test - public void testForkJoinFailure() throws Exception { - - try { - createForkJoinWorkflow(); - } catch (Exception e) { - } - - String taskName = "junit_task_2"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - int retryCount = taskDef.getRetryCount(); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - - Map input = new HashMap(); - String wfid = workflowExecutor.startWorkflow(FORK_JOIN_WF, 1, "fanouttest", input); - System.out.println("testForkJoinFailure.wfid=" + wfid); - - Task t1 = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(t1); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_1", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNull(t3); - - assertNotNull(t1); - assertNotNull(t2); - t1.setStatus(FAILED); - t2.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t2); - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals("Found " + wf.getTasks(), WorkflowStatus.RUNNING, wf.getStatus()); - - t3 = workflowExecutionService.poll("junit_task_3", "test"); - assertNotNull(t3); - - - workflowExecutionService.updateTask(t1); - wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals("Found " + wf.getTasks(), WorkflowStatus.FAILED, wf.getStatus()); - - - taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(retryCount); - metadataService.updateTaskDef(taskDef); - } - - @SuppressWarnings("unchecked") - @Test - public void testDynamicForkJoinLegacy() throws Exception { - - try { - createDynamicForkJoinWorkflowDefsLegacy(); - } catch (Exception e) { - } - - Map input = new HashMap(); - String wfid = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF_LEGACY, 1, "dynfanouttest1", input); - System.out.println("testDynamicForkJoinLegacy.wfid=" + wfid); - - Task t1 = workflowExecutionService.poll("junit_task_1", "test"); - //assertTrue(ess.ackTaskRecieved(t1.getTaskId(), "test")); - - DynamicForkJoinTaskList dynamicForkJoinTasks = new DynamicForkJoinTaskList(); - - input = new HashMap(); - input.put("k1", "v1"); - dynamicForkJoinTasks.add("junit_task_2", null, "xdt1", input); - - HashMap input2 = new HashMap(); - input2.put("k2", "v2"); - dynamicForkJoinTasks.add("junit_task_3", null, "xdt2", input2); - - t1.getOutputData().put("dynamicTasks", dynamicForkJoinTasks); - t1.setStatus(COMPLETED); - - workflowExecutionService.updateTask(t1); - - Task t2 = workflowExecutionService.poll("junit_task_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - assertEquals("xdt1", t2.getReferenceTaskName()); - assertTrue(t2.getInputData().containsKey("k1")); - assertEquals("v1", t2.getInputData().get("k1")); - Map output = new HashMap(); - output.put("ok1", "ov1"); - t2.setOutputData(output); - t2.setStatus(COMPLETED); - workflowExecutionService.updateTask(t2); - - Task t3 = workflowExecutionService.poll("junit_task_3", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - assertEquals("xdt2", t3.getReferenceTaskName()); - assertTrue(t3.getInputData().containsKey("k2")); - assertEquals("v2", t3.getInputData().get("k2")); - - output = new HashMap<>(); - output.put("ok1", "ov1"); - t3.setOutputData(output); - t3.setStatus(COMPLETED); - workflowExecutionService.updateTask(t3); - - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - - // Check the output - Task joinTask = wf.getTaskByRefName("dynamicfanouttask_join"); - assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size()); - Set joinTaskOutput = joinTask.getOutputData().keySet(); - System.out.println("joinTaskOutput=" + joinTaskOutput); - for (String key : joinTask.getOutputData().keySet()) { - assertTrue(key.equals("xdt1") || key.equals("xdt2")); - assertEquals("ov1", ((Map) joinTask.getOutputData().get(key)).get("ok1")); - } - } - - @SuppressWarnings("unchecked") - @Test - public void testDynamicForkJoin() throws Exception { - - createDynamicForkJoinWorkflowDefs(); - - String taskName = "junit_task_2"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - int retryCount = taskDef.getRetryCount(); - taskDef.setRetryCount(2); - taskDef.setRetryDelaySeconds(0); - taskDef.setRetryLogic(RetryLogic.FIXED); - metadataService.updateTaskDef(taskDef); - - Map workflowInput = new HashMap<>(); - String workflowId = workflowExecutor.startWorkflow(DYNAMIC_FORK_JOIN_WF, 1, "dynfanouttest1", workflowInput); - System.out.println("testDynamicForkJoin.wfid=" + workflowId); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); - - Task task1 = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task1); - assertTrue(workflowExecutionService.ackTaskReceived(task1.getTaskId())); - assertEquals("dt1", task1.getReferenceTaskName()); - - Map inputParams2 = new HashMap<>(); - inputParams2.put("k1", "v1"); - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - workflowTask2.setTaskReferenceName("xdt1"); - - Map inputParams3 = new HashMap<>(); - inputParams3.put("k2", "v2"); - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_3"); - workflowTask3.setTaskReferenceName("xdt2"); - - HashMap dynamicTasksInput = new HashMap<>(); - dynamicTasksInput.put("xdt1", inputParams2); - dynamicTasksInput.put("xdt2", inputParams3); - task1.getOutputData().put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); - task1.getOutputData().put("dynamicTasksInput", dynamicTasksInput); - task1.setStatus(COMPLETED); - - workflowExecutionService.updateTask(task1); - workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 5, workflow.getTasks().size()); - - Task task2 = workflowExecutionService.poll("junit_task_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - assertEquals("xdt1", task2.getReferenceTaskName()); - assertTrue(task2.getInputData().containsKey("k1")); - assertEquals("v1", task2.getInputData().get("k1")); - Map output = new HashMap<>(); - output.put("ok1", "ov1"); - task2.setOutputData(output); - task2.setStatus(FAILED); - workflowExecutionService.updateTask(task2); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).count()); - assertTrue(workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).allMatch(t -> t.getWorkflowTask() != null)); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); - - task2 = workflowExecutionService.poll("junit_task_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task2.getTaskId())); - assertEquals("xdt1", task2.getReferenceTaskName()); - assertTrue(task2.getInputData().containsKey("k1")); - assertEquals("v1", task2.getInputData().get("k1")); - task2.setOutputData(output); - task2.setStatus(COMPLETED); - workflowExecutionService.updateTask(task2); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); - - Task task3 = workflowExecutionService.poll("junit_task_3", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task3.getTaskId())); - assertEquals("xdt2", task3.getReferenceTaskName()); - assertTrue(task3.getInputData().containsKey("k2")); - assertEquals("v2", task3.getInputData().get("k2")); - output = new HashMap<>(); - output.put("ok1", "ov1"); - task3.setOutputData(output); - task3.setStatus(COMPLETED); - workflowExecutionService.updateTask(task3); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); - - Task task4 = workflowExecutionService.poll("junit_task_4", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(task4.getTaskId())); - assertEquals("task4", task4.getReferenceTaskName()); - task4.setStatus(COMPLETED); - workflowExecutionService.updateTask(task4); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); - - // Check the output - Task joinTask = workflow.getTaskByRefName("dynamicfanouttask_join"); - assertEquals("Found:" + joinTask.getOutputData(), 2, joinTask.getOutputData().keySet().size()); - Set joinTaskOutput = joinTask.getOutputData().keySet(); - System.out.println("joinTaskOutput=" + joinTaskOutput); - for (String key : joinTask.getOutputData().keySet()) { - assertTrue(key.equals("xdt1") || key.equals("xdt2")); - assertEquals("ov1", ((Map) joinTask.getOutputData().get(key)).get("ok1")); - } - - // reset the task def - taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(retryCount); - taskDef.setRetryDelaySeconds(1); - metadataService.updateTaskDef(taskDef); - } - - private void createForkJoinWorkflow() throws Exception { - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName(FORK_JOIN_WF); - workflowDef.setDescription(workflowDef.getName()); - workflowDef.setVersion(1); - workflowDef.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask fanoutTask = new WorkflowTask(); - fanoutTask.setType(TaskType.FORK_JOIN.name()); - fanoutTask.setTaskReferenceName("fanouttask"); - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - Map inputParams1 = new HashMap<>(); - inputParams1.put("p1", "workflow.input.param1"); - inputParams1.put("p2", "workflow.input.param2"); - workflowTask1.setInputParameters(inputParams1); - workflowTask1.setTaskReferenceName("t1"); - - WorkflowTask workflowTask3 = new WorkflowTask(); - workflowTask3.setName("junit_task_3"); - workflowTask3.setInputParameters(inputParams1); - workflowTask3.setTaskReferenceName("t3"); - - WorkflowTask workflowTask2 = new WorkflowTask(); - workflowTask2.setName("junit_task_2"); - Map inputParams2 = new HashMap<>(); - inputParams2.put("tp1", "workflow.input.param1"); - workflowTask2.setInputParameters(inputParams2); - workflowTask2.setTaskReferenceName("t2"); - - WorkflowTask workflowTask4 = new WorkflowTask(); - workflowTask4.setName("junit_task_4"); - workflowTask4.setInputParameters(inputParams2); - workflowTask4.setTaskReferenceName("t4"); - - fanoutTask.getForkTasks().add(Arrays.asList(workflowTask1, workflowTask3)); - fanoutTask.getForkTasks().add(Collections.singletonList(workflowTask2)); - - workflowDef.getTasks().add(fanoutTask); - - WorkflowTask joinTask = new WorkflowTask(); - joinTask.setType(TaskType.JOIN.name()); - joinTask.setTaskReferenceName("fanouttask_join"); - joinTask.setJoinOn(Arrays.asList("t3", "t2")); - - workflowDef.getTasks().add(joinTask); - workflowDef.getTasks().add(workflowTask4); - metadataService.updateWorkflowDef(workflowDef); - } - - - private void createForkJoinWorkflowWithZeroRetry() throws Exception { - - WorkflowDef def = new WorkflowDef(); - def.setName(FORK_JOIN_WF + "_2"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask fanout = new WorkflowTask(); - fanout.setType(TaskType.FORK_JOIN.name()); - fanout.setTaskReferenceName("fanouttask"); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_0_RT_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_0_RT_3"); - wft3.setInputParameters(ip1); - wft3.setTaskReferenceName("t3"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_0_RT_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "workflow.input.param1"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - WorkflowTask wft4 = new WorkflowTask(); - wft4.setName("junit_task_0_RT_4"); - wft4.setInputParameters(ip2); - wft4.setTaskReferenceName("t4"); - - fanout.getForkTasks().add(Arrays.asList(wft1, wft3)); - fanout.getForkTasks().add(Arrays.asList(wft2)); - - def.getTasks().add(fanout); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("fanouttask_join"); - join.setJoinOn(Arrays.asList("t3", "t2")); - - def.getTasks().add(join); - def.getTasks().add(wft4); - metadataService.updateWorkflowDef(def); - - } - - private void createForkJoinNestedWorkflow() throws Exception { - - WorkflowDef def = new WorkflowDef(); - def.setName(FORK_JOIN_NESTED_WF); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - ip1.put("case", "workflow.input.case"); - - WorkflowTask[] tasks = new WorkflowTask[21]; - - for (int i = 10; i < 21; i++) { - WorkflowTask wft = new WorkflowTask(); - wft.setName("junit_task_" + i); - wft.setInputParameters(ip1); - wft.setTaskReferenceName("t" + i); - tasks[i] = wft; - } - - WorkflowTask d1 = new WorkflowTask(); - d1.setType(TaskType.DECISION.name()); - d1.setName("Decision"); - d1.setTaskReferenceName("d1"); - d1.setInputParameters(ip1); - d1.setDefaultCase(Arrays.asList(tasks[18], tasks[20])); - d1.setCaseValueParam("case"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("a", Arrays.asList(tasks[16], tasks[19], tasks[20])); - decisionCases.put("b", Arrays.asList(tasks[17], tasks[20])); - d1.setDecisionCases(decisionCases); - - WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); - SubWorkflowParams sw = new SubWorkflowParams(); - sw.setName(LINEAR_WORKFLOW_T1_T2); - subWorkflow.setSubWorkflowParam(sw); - subWorkflow.setTaskReferenceName("sw1"); - - WorkflowTask fork2 = new WorkflowTask(); - fork2.setType(TaskType.FORK_JOIN.name()); - fork2.setName("fork2"); - fork2.setTaskReferenceName("fork2"); - fork2.getForkTasks().add(Arrays.asList(tasks[12], tasks[14])); - fork2.getForkTasks().add(Arrays.asList(tasks[13], d1)); - - WorkflowTask join2 = new WorkflowTask(); - join2.setType(TaskType.JOIN.name()); - join2.setTaskReferenceName("join2"); - join2.setJoinOn(Arrays.asList("t14", "t20")); - - WorkflowTask fork1 = new WorkflowTask(); - fork1.setType(TaskType.FORK_JOIN.name()); - fork1.setTaskReferenceName("fork1"); - fork1.getForkTasks().add(Arrays.asList(tasks[11])); - fork1.getForkTasks().add(Arrays.asList(fork2, join2)); - fork1.getForkTasks().add(Arrays.asList(subWorkflow)); - - - WorkflowTask join1 = new WorkflowTask(); - join1.setType(TaskType.JOIN.name()); - join1.setTaskReferenceName("join1"); - join1.setJoinOn(Arrays.asList("t11", "join2", "sw1")); - - def.getTasks().add(fork1); - def.getTasks().add(join1); - def.getTasks().add(tasks[15]); - - metadataService.updateWorkflowDef(def); - - - } - - private void createDynamicForkJoinWorkflowDefs() throws Exception { - - WorkflowDef def = new WorkflowDef(); - def.setName(DYNAMIC_FORK_JOIN_WF); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask workflowTask1 = new WorkflowTask(); - workflowTask1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - workflowTask1.setInputParameters(ip1); - workflowTask1.setTaskReferenceName("dt1"); - - WorkflowTask fanout = new WorkflowTask(); - fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - fanout.setTaskReferenceName("dynamicfanouttask"); - fanout.setDynamicForkTasksParam("dynamicTasks"); - fanout.setDynamicForkTasksInputParamName("dynamicTasksInput"); - fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); - fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamicfanouttask_join"); - - WorkflowTask workflowTask4 = new WorkflowTask(); - workflowTask4.setName("junit_task_4"); - workflowTask4.setTaskReferenceName("task4"); - - def.getTasks().add(workflowTask1); - def.getTasks().add(fanout); - def.getTasks().add(join); - def.getTasks().add(workflowTask4); - - metadataMapperService.populateTaskDefinitions(def); - - metadataService.updateWorkflowDef(def); - } - - @SuppressWarnings("deprecation") - private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { - - WorkflowDef def = new WorkflowDef(); - def.setName(DYNAMIC_FORK_JOIN_WF_LEGACY); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("dt1"); - - WorkflowTask fanout = new WorkflowTask(); - fanout.setType(TaskType.FORK_JOIN_DYNAMIC.name()); - fanout.setTaskReferenceName("dynamicfanouttask"); - fanout.setDynamicForkJoinTasksParam("dynamicTasks"); - fanout.getInputParameters().put("dynamicTasks", "dt1.output.dynamicTasks"); - fanout.getInputParameters().put("dynamicTasksInput", "dt1.output.dynamicTasksInput"); - - WorkflowTask join = new WorkflowTask(); - join.setType(TaskType.JOIN.name()); - join.setTaskReferenceName("dynamicfanouttask_join"); - - def.getTasks().add(wft1); - def.getTasks().add(fanout); - def.getTasks().add(join); - - metadataMapperService.populateTaskDefinitions(def); - - metadataService.updateWorkflowDef(def); - - } - - private void createConditionalWF() throws Exception { - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_1"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "workflow.input.param1"); - ip1.put("p2", "workflow.input.param2"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - wft1.setTaskDefinition(new TaskDef("junit_task_1")); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "workflow.input.param1"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - wft2.setTaskDefinition(new TaskDef("junit_task_2")); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_3"); - Map ip3 = new HashMap<>(); - ip2.put("tp3", "workflow.input.param2"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("t3"); - wft3.setTaskDefinition(new TaskDef("junit_task_3")); - - WorkflowDef def2 = new WorkflowDef(); - def2.setName(COND_TASK_WF); - def2.setDescription(COND_TASK_WF); - def2.setInputParameters(Arrays.asList("param1", "param2")); - - WorkflowTask c2 = new WorkflowTask(); - c2.setType(TaskType.DECISION.name()); - c2.setCaseValueParam("case"); - c2.setName("conditional2"); - c2.setTaskReferenceName("conditional2"); - Map> dc = new HashMap<>(); - dc.put("one", Arrays.asList(wft1, wft3)); - dc.put("two", Arrays.asList(wft2)); - c2.setDecisionCases(dc); - c2.getInputParameters().put("case", "workflow.input.param2"); - - - WorkflowTask condition = new WorkflowTask(); - condition.setType(TaskType.DECISION.name()); - condition.setCaseValueParam("case"); - condition.setName("conditional"); - condition.setTaskReferenceName("conditional"); - Map> decisionCases = new HashMap<>(); - decisionCases.put("nested", Arrays.asList(c2)); - decisionCases.put("three", Arrays.asList(wft3)); - condition.setDecisionCases(decisionCases); - condition.getInputParameters().put("case", "workflow.input.param1"); - condition.getDefaultCase().add(wft2); - def2.getTasks().add(condition); - - WorkflowTask notifyTask = new WorkflowTask(); - notifyTask.setName("junit_task_4"); - notifyTask.setTaskReferenceName("junit_task_4"); - notifyTask.setTaskDefinition(new TaskDef("junit_task_4")); - - WorkflowTask finalTask = new WorkflowTask(); - finalTask.setName("finalcondition"); - finalTask.setTaskReferenceName("tf"); - finalTask.setType(TaskType.DECISION.name()); - finalTask.setCaseValueParam("finalCase"); - Map fi = new HashMap<>(); - fi.put("finalCase", "workflow.input.finalCase"); - finalTask.setInputParameters(fi); - finalTask.getDecisionCases().put("notify", Arrays.asList(notifyTask)); - - def2.getTasks().add(finalTask); - metadataService.updateWorkflowDef(def2); - - } - - - @Test - public void testDefDAO() throws Exception { - List taskDefs = metadataService.getTaskDefs(); - assertNotNull(taskDefs); - assertTrue(!taskDefs.isEmpty()); - } - - @Test - public void testSimpleWorkflowFailureWithTerminalError() throws Exception { - - clearWorkflows(); - - TaskDef taskDef = metadataService.getTaskDef("junit_task_1"); - taskDef.setRetryCount(1); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - assertNotNull(found); - Map outputParameters = found.getOutputParameters(); - outputParameters.put("validationErrors", "${t1.output.ErrorMessage}"); - metadataService.updateWorkflowDef(found); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); - assertNotNull(workflowInstanceId); - - Workflow es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. - - boolean failed = false; - try { - workflowExecutor.rewind(workflowInstanceId); - } catch (ApplicationException ae) { - failed = true; - } - assertTrue(failed); - - // Polling for the first task should return the same task as before - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - - TaskResult taskResult = new TaskResult(task); - taskResult.setReasonForIncompletion("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down"); - taskResult.setStatus(TaskResult.Status.FAILED_WITH_TERMINAL_ERROR); - taskResult.addOutputData("TERMINAL_ERROR", "Integration endpoint down: FOOBAR"); - taskResult.addOutputData("ErrorMessage", "There was a terminal error"); - - workflowExecutionService.updateTask(taskResult); - workflowExecutor.decide(workflowInstanceId); - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - TaskDef junit_task_1 = metadataService.getTaskDef("junit_task_1"); - Task t1 = es.getTaskByRefName("t1"); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - assertEquals("NON TRANSIENT ERROR OCCURRED: An integration point required to complete the task is down", es.getReasonForIncompletion()); - assertEquals(1, junit_task_1.getRetryCount()); //Configured retries at the task definition level - assertEquals(0, t1.getRetryCount()); //Actual retries done on the task - assertEquals(true, es.getOutput().containsKey("o1")); - assertEquals("p1 value", es.getOutput().get("o1")); - assertEquals(es.getOutput().get("validationErrors").toString(), "There was a terminal error"); - - outputParameters.remove("validationErrors"); - metadataService.updateWorkflowDef(found); +public class WorkflowServiceTest extends AbstractWorkflowServiceTest { + @Override + String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { + return workflowExecutor.startWorkflow(workflowName, version, correlationId, input, event, taskToDomain); } - - @Test - public void testSimpleWorkflow() throws Exception { - - clearWorkflows(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String workflowInstanceId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); - assertNotNull(workflowInstanceId); - - Workflow es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. - - - boolean failed = false; - try { - workflowExecutor.rewind(workflowInstanceId); - } catch (ApplicationException ae) { - failed = true; - } - assertTrue(failed); - - // Polling for the first task should return the same task as before - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - - workflowExecutor.decide(workflowInstanceId); - - String task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull("Found=" + task.getInputData(), task2Input); - assertEquals(task1Op, task2Input); - - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - } - - @Test - public void testSimpleWorkflowWithResponseTimeout() throws Exception { - - createWFWithResponseTimeout(); - - String correlationId = "unit_test_1"; - Map workflowInput = new HashMap(); - String inputParam1 = "p1 value"; - workflowInput.put("param1", inputParam1); - workflowInput.put("param2", "p2 value"); - String workflowId = workflowExecutor.startWorkflow("RTOWF", 1, correlationId, workflowInput); - System.out.println("testSimpleWorkflowWithResponseTimeout.wfid=" + workflowId); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - assertEquals(1, queueDAO.getSize("task_rt")); - - // Polling for the first task should return the same task as before - Task task = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); - assertNotNull(task); - assertEquals("task_rt", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(workflowId, task.getWorkflowInstanceId()); - - // As the task_rt is out of the queue, the next poll should not get it - Task nullTask = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); - assertNull(nullTask); - - Thread.sleep(10000); - workflowExecutor.decide(workflowId); - assertEquals(1, queueDAO.getSize("task_rt")); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Polling now should get the same task back because it should have been put back in the queue - Task taskAgain = workflowExecutionService.poll("task_rt", "task1.junit.worker"); - assertNotNull(taskAgain); - - taskAgain.getOutputData().put("op", "task1.Done"); - taskAgain.setStatus(COMPLETED); - workflowExecutionService.updateTask(taskAgain); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker.testTimeout"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - } - - @Test - public void testWorkflowRerunWithSubWorkflows() throws Exception { - // Execute a workflow - String workflowId = this.runWorkflowWithSubworkflow(); - // Check it completed - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Now lets pickup the first task in the sub workflow and rerun it from there - String subWorkflowId = null; - for (Task task : workflow.getTasks()) { - if (task.getTaskType().equalsIgnoreCase("SUB_WORKFLOW")) { - subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); - } - } - assertNotNull(subWorkflowId); - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - Task subWorkflowTask1 = null; - for (Task task : subWorkflow.getTasks()) { - if (task.getTaskDefName().equalsIgnoreCase("junit_task_1")) { - subWorkflowTask1 = task; - } - } - assertNotNull(subWorkflowTask1); - - RerunWorkflowRequest request = new RerunWorkflowRequest(); - request.setReRunFromTaskId(subWorkflowTask1.getTaskId()); - - Map newInput = new HashMap<>(); - newInput.put("p1", "1"); - newInput.put("p2", "2"); - request.setTaskInput(newInput); - - String correlationId = "unit_test_sw_new"; - Map input = new HashMap<>(); - input.put("param1", "New p1 value"); - input.put("param2", "New p2 value"); - request.setCorrelationId(correlationId); - request.setWorkflowInput(input); - - request.setReRunFromWorkflowId(workflowId); - request.setReRunFromTaskId(subWorkflowTask1.getTaskId()); - // Rerun - workflowExecutor.rerun(request); - - // The main WF and the sub WF should be in RUNNING state - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - assertEquals(correlationId, workflow.getCorrelationId()); - assertEquals("New p1 value", workflow.getInput().get("param1")); - assertEquals("New p2 value", workflow.getInput().get("param2")); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); - // Since we are re running from the sub workflow task, there - // should be only 1 task that is SCHEDULED - assertEquals(1, subWorkflow.getTasks().size()); - assertEquals(Status.SCHEDULED, subWorkflow.getTasks().get(0).getStatus()); - - // Now execute the task - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(task.getInputData().get("p1").toString(), "1"); - assertEquals(task.getInputData().get("p2").toString(), "2"); - task.getOutputData().put("op", "junit_task_1.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - // Poll for second task of the sub workflow and execute it - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_2.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Now the sub workflow and the main workflow must have finished - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - } - - @Test - public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { - - clearWorkflows(); - createWorkflowDefForDomain(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); - - String correlationId = "unit_test_sw"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - Map taskToDomain = new HashMap(); - taskToDomain.put("junit_task_3", "domain1"); - taskToDomain.put("junit_task_2", "domain1"); - - // Poll before so that a polling for this task is "active" - Task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain1"); - assertNull(task); - - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); - System.out.println("testSimpleWorkflow.wfid=" + wfid); - assertNotNull(wfid); - Workflow wf = workflowExecutor.getWorkflow(wfid, false); - assertNotNull(wf); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. - - - // Check Size - Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain1:junit_task_3", "junit_task_3")); - assertEquals(sizes.get("domain1:junit_task_3").intValue(), 1); - assertEquals(sizes.get("junit_task_3").intValue(), 0); - - // Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); - assertNotNull(task); - assertEquals("junit_task_3", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(wfid, task.getWorkflowInstanceId()); - - String task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(wfid, task.getWorkflowInstanceId()); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - Workflow essw = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(essw.getTaskToDomain()); - assertEquals(essw.getTaskToDomain().size(), 2); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain1"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - List pddata = workflowExecutionService.getPollData("junit_task_3"); - assertTrue(pddata.size() == 2); - for (PollData pd : pddata) { - assertEquals(pd.getQueueName(), "junit_task_3"); - assertEquals(pd.getWorkerId(), "task1.junit.worker"); - assertTrue(pd.getLastPollTime() != 0); - if (pd.getDomain() != null) { - assertEquals(pd.getDomain(), "domain1"); - } - } - - - List pdList = workflowExecutionService.getAllPollData(); - int count = 0; - for (PollData pd : pdList) { - if (pd.getQueueName().equals("junit_task_3")) { - count++; - } - } - assertTrue(count == 2); - - } - - @Test - public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { - - clearWorkflows(); - createWorkflowDefForDomain(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); - - String correlationId = "unit_test_sw"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - Map taskToDomain = new HashMap(); - taskToDomain.put("*", "domain11,, domain12"); - - // Poll before so that a polling for this task is "active" - Task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain12"); - assertNull(task); - - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); - System.out.println("testSimpleWorkflow.wfid=" + wfid); - assertNotNull(wfid); - Workflow wf = workflowExecutor.getWorkflow(wfid, false); - assertNotNull(wf); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. - - - // Check Size - Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain11:junit_task_3", "junit_task_3")); - assertEquals(sizes.get("domain11:junit_task_3").intValue(), 1); - assertEquals(sizes.get("junit_task_3").intValue(), 0); - - // Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); - assertNotNull(task); - assertEquals("junit_task_3", task.getTaskType()); - assertEquals("domain11", task.getDomain()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(wfid, task.getWorkflowInstanceId()); - - String task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(wfid, task.getWorkflowInstanceId()); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - Workflow essw = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(essw.getTaskToDomain()); - assertEquals(essw.getTaskToDomain().size(), 1); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain11"); - assertNull(task); - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain12"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertEquals("domain12", task.getDomain()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - } - - @After - public void clearWorkflows() throws Exception { - List workflows = metadataService.getWorkflowDefs().stream() - .map(WorkflowDef::getName) - .collect(Collectors.toList()); - for (String wfName : workflows) { - List running = workflowExecutionService.getRunningWorkflows(wfName); - for (String wfid : running) { - workflowExecutor.terminateWorkflow(wfid, "cleanup"); - } - } - queueDAO.queuesDetail().keySet().forEach(queueName -> { - queueDAO.flush(queueName); - }); - } - - @Test - public void testLongRunning() throws Exception { - - clearWorkflows(); - - metadataService.getWorkflowDef(LONG_RUNNING, 1).get(); - - String correlationId = "unit_test_1"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LONG_RUNNING, 1, correlationId, input); - System.out.println("testLongRunning.wfid=" + wfid); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); - /// - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - - String task1Op = "task1.In.Progress"; - task.getOutputData().put("op", task1Op); - task.setStatus(Status.IN_PROGRESS); - task.setCallbackAfterSeconds(5); - workflowExecutionService.updateTask(task); - String taskId = task.getTaskId(); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); - /// - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - // Polling for next task should not return anything - Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNull(task2); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNull(task); - - Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); - // Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(task.getTaskId(), taskId); - - task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - assertEquals(wfid, task.getWorkflowInstanceId()); - task = tasks.get(0); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - - } - - @Test - public void testResetWorkflowInProgressTasks() throws Exception { - - clearWorkflows(); - - metadataService.getWorkflowDef(LONG_RUNNING, 1).get(); - - String correlationId = "unit_test_1"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LONG_RUNNING, 1, correlationId, input); - System.out.println("testLongRunning.wfid=" + wfid); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); - /// - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - - String task1Op = "task1.In.Progress"; - task.getOutputData().put("op", task1Op); - task.setStatus(Status.IN_PROGRESS); - task.setCallbackAfterSeconds(3600); - workflowExecutionService.updateTask(task); - String taskId = task.getTaskId(); - - // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); - /// - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - // Polling for next task should not return anything - Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNull(task2); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNull(task); - - //Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); - // Reset - workflowExecutor.resetCallbacksForInProgressTasks(wfid); - - - // Now Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(task.getTaskId(), taskId); - assertEquals(task.getCallbackAfterSeconds(), 0); - - task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - assertEquals(wfid, task.getWorkflowInstanceId()); - task = tasks.get(0); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - - } - - - @Test - public void testConcurrentWorkflowExecutions() throws Exception { - - int count = 3; - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_concurrrent"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String[] wfids = new String[count]; - - for (int i = 0; i < count; i++) { - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - System.out.println("testConcurrentWorkflowExecutions.wfid=" + wfid); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - wfids[i] = wfid; - } - - - String task1Op = ""; - for (int i = 0; i < count; i++) { - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - } - - for (int i = 0; i < count; i++) { - Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - } - - List wfs = workflowExecutionService.getWorkflowInstances(LINEAR_WORKFLOW_T1_T2, correlationId, false, false); - wfs.forEach(wf -> { - assertEquals(WorkflowStatus.COMPLETED, wf.getStatus()); - }); - - - } - - @Test - public void testCaseStatements() throws Exception { - createConditionalWF(); - - String correlationId = "testCaseStatements: " + System.currentTimeMillis(); - Map input = new HashMap(); - String wfid; - String[] sequence; - - - //default case - input.put("param1", "xxx"); - input.put("param2", "two"); - wfid = workflowExecutor.startWorkflow(COND_TASK_WF, 1, correlationId, input); - System.out.println("testCaseStatements.wfid=" + wfid); - assertNotNull(wfid); - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - Task task = workflowExecutionService.poll("junit_task_2", "junit"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - assertEquals(3, es.getTasks().size()); - - /// - - - //nested - one - input.put("param1", "nested"); - input.put("param2", "one"); - wfid = workflowExecutor.startWorkflow(COND_TASK_WF, 1, correlationId, input); - System.out.println("testCaseStatements.wfid=" + wfid); - assertNotNull(wfid); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - sequence = new String[]{"junit_task_1", "junit_task_3"}; - - validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_1", "junit_task_3", SystemTaskType.DECISION.name()}, 5); - // - - //nested - two - input.put("param1", "nested"); - input.put("param2", "two"); - wfid = workflowExecutor.startWorkflow(COND_TASK_WF, 1, correlationId, input); - System.out.println("testCaseStatements.wfid=" + wfid); - assertNotNull(wfid); - sequence = new String[]{"junit_task_2"}; - validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_2", SystemTaskType.DECISION.name()}, 4); - // - - //three - input.put("param1", "three"); - input.put("param2", "two"); - input.put("finalCase", "notify"); - wfid = workflowExecutor.startWorkflow(COND_TASK_WF, 1, correlationId, input); - System.out.println("testCaseStatements.wfid=" + wfid); - assertNotNull(wfid); - sequence = new String[]{"junit_task_3", "junit_task_4"}; - validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), "junit_task_3", SystemTaskType.DECISION.name(), "junit_task_4"}, 3); - // - - } - - private void validate(String wfid, String[] sequence, String[] executedTasks, int expectedTotalTasks) throws Exception { - for (int i = 0; i < sequence.length; i++) { - String t = sequence[i]; - Task task = getTask(t); - if (task == null) { - System.out.println("Missing task for " + t + ", below are the workflow tasks completed..."); - Workflow workflow = workflowExecutionService.getExecutionStatus(wfid, true); - for (Task x : workflow.getTasks()) { - System.out.println(x.getTaskType() + "/" + x.getReferenceTaskName()); - } - } - assertNotNull("No task for " + t, task); - assertEquals(wfid, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - Workflow workflow = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(workflow); - assertTrue(!workflow.getTasks().isEmpty()); - if (i < sequence.length - 1) { - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - } else { - workflow = workflowExecutionService.getExecutionStatus(wfid, true); - List workflowTasks = workflow.getTasks(); - assertEquals(workflowTasks.toString(), executedTasks.length, workflowTasks.size()); - for (int k = 0; k < executedTasks.length; k++) { - assertEquals("Tasks: " + workflowTasks.toString() + "\n", executedTasks[k], workflowTasks.get(k).getTaskType()); - } - - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - } - } - - - private Task getTask(String taskType) throws Exception { - Task task; - int count = 2; - do { - task = workflowExecutionService.poll(taskType, "junit"); - if (task == null) { - count--; - } - if (count < 0) { - break; - } - - } while (task == null); - if (task != null) { - workflowExecutionService.ackTaskReceived(task.getTaskId()); - } - return task; - } - - @Test - public void testRetries() throws Exception { - - String taskName = "junit_task_2"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(2); - taskDef.setRetryDelaySeconds(1); - metadataService.updateTaskDef(taskDef); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1"; - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - System.out.println("testRetries.wfid=" + wfid); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - //fail the task twice and then succeed - verify(inputParam1, wfid, task1Op, true); - Uninterruptibles.sleepUninterruptibly(2, TimeUnit.SECONDS); - verify(inputParam1, wfid, task1Op, false); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - assertEquals(3, es.getTasks().size()); //task 1, and 2 of the task 2 - - assertEquals("junit_task_1", es.getTasks().get(0).getTaskType()); - assertEquals("junit_task_2", es.getTasks().get(1).getTaskType()); - assertEquals("junit_task_2", es.getTasks().get(2).getTaskType()); - assertEquals(COMPLETED, es.getTasks().get(0).getStatus()); - assertEquals(FAILED, es.getTasks().get(1).getStatus()); - assertEquals(COMPLETED, es.getTasks().get(2).getStatus()); - assertEquals(es.getTasks().get(1).getTaskId(), es.getTasks().get(2).getRetriedTaskId()); - - - } - - @Test - public void testSuccess() throws Exception { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - /* - * @correlationId - List byCorrelationId = ess.getWorkflowInstances(LINEAR_WORKFLOW_T1_T2, correlationId, false, false); - assertNotNull(byCorrelationId); - assertTrue(!byCorrelationId.isEmpty()); - assertEquals(1, byCorrelationId.size()); - */ - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(Task.Status.SCHEDULED, es.getTasks().get(0).getStatus()); - - // decideNow should be idempotent if re-run on the same state! - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); - Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - assertNotNull(task); - assertEquals(t.getTaskId(), task.getTaskId()); - es = workflowExecutionService.getExecutionStatus(wfid, true); - t = es.getTasks().get(0); - assertEquals(Status.IN_PROGRESS, t.getStatus()); - String taskId = t.getTaskId(); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(taskId)) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); - } - }); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(task); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - // Check the tasks, at this time there should be 2 task - assertEquals(es.getTasks().size(), 2); - es.getTasks().forEach(wfTask -> { - assertEquals(wfTask.getStatus(), COMPLETED); - }); - - System.out.println("Total tasks=" + es.getTasks().size()); - assertTrue(es.getTasks().size() < 10); - - - } - - @Test - public void testDeciderUpdate() throws Exception { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - Workflow workflow = workflowExecutor.getWorkflow(wfid, false); - long updated1 = workflow.getUpdateTime(); - Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); - workflowExecutor.decide(wfid); - workflow = workflowExecutor.getWorkflow(wfid, false); - long updated2 = workflow.getUpdateTime(); - assertEquals(updated1, updated2); - - Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS); - workflowExecutor.terminateWorkflow(wfid, "done"); - workflow = workflowExecutor.getWorkflow(wfid, false); - updated2 = workflow.getUpdateTime(); - assertTrue("updated1[" + updated1 + "] >? updated2[" + updated2 + "]", updated2 > updated1); - - } - - @Test - @Ignore - //Ignore for now, will improve this in the future - public void testFailurePoints() throws Exception { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(Task.Status.SCHEDULED, es.getTasks().get(0).getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String taskId = task.getTaskId(); - - String task1Op = "task1.output"; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - try { - workflowExecutionService.updateTask(task); - } catch (Exception e) { - workflowExecutionService.updateTask(task); - } - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(taskId)) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); - } - }); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(task); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - // Check the tasks, at this time there should be 2 task - assertEquals(es.getTasks().size(), 2); - es.getTasks().forEach(wfTask -> { - assertEquals(wfTask.getStatus(), COMPLETED); - }); - - System.out.println("Total tasks=" + es.getTasks().size()); - assertTrue(es.getTasks().size() < 10); - - - } - - @Test - public void testDeciderMix() throws Exception { - - ExecutorService executors = Executors.newFixedThreadPool(3); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(Task.Status.SCHEDULED, es.getTasks().get(0).getStatus()); - - List> futures = new LinkedList<>(); - for (int i = 0; i < 10; i++) { - futures.add(executors.submit(() -> { - workflowExecutor.decide(wfid); - return null; - })); - } - for (Future future : futures) { - future.get(); - } - futures.clear(); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // The first task would be marked as scheduled - assertEquals(1, es.getTasks().size()); - assertEquals(Task.Status.SCHEDULED, es.getTasks().get(0).getStatus()); - - - // decideNow should be idempotent if re-run on the same state! - workflowExecutor.decide(wfid); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); - Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - assertNotNull(task); - assertEquals(t.getTaskId(), task.getTaskId()); - es = workflowExecutionService.getExecutionStatus(wfid, true); - t = es.getTasks().get(0); - assertEquals(Status.IN_PROGRESS, t.getStatus()); - String taskId = t.getTaskId(); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(taskId)) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); - } - }); - - //Run sweep 10 times! - for (int i = 0; i < 10; i++) { - futures.add(executors.submit(() -> { - long s = System.currentTimeMillis(); - workflowExecutor.decide(wfid); - System.out.println("Took " + (System.currentTimeMillis() - s) + " ms to run decider"); - return null; - })); - } - for (Future future : futures) { - future.get(); - } - futures.clear(); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(2, es.getTasks().size()); - - System.out.println("Workflow tasks=" + es.getTasks()); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(task); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - // Check the tasks, at this time there should be 2 task - assertEquals(es.getTasks().size(), 2); - es.getTasks().forEach(wfTask -> { - assertEquals(wfTask.getStatus(), COMPLETED); - }); - - System.out.println("Total tasks=" + es.getTasks().size()); - assertTrue(es.getTasks().size() < 10); - } - - @Test - public void testFailures() throws Exception { - metadataService.getWorkflowDef(FORK_JOIN_WF, 1).get(); - - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - input.put("failureWfName", "FanInOutTest"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - Task task = getTask("junit_task_1"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - - taskDef.setRetryCount(RETRY_COUNT); - metadataService.updateTaskDef(taskDef); - - } - - @Test - public void testRetryWithForkJoin() throws Exception { - String workflowId = this.runAFailedForkJoinWF(); - workflowExecutor.retry(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(workflow.getStatus(), WorkflowStatus.RUNNING); - - printTaskStatuses(workflow, "After retry called"); - - Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); - assertNotNull(t2); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - - Task t3 = workflowExecutionService.poll("junit_task_0_RT_3", "test"); - assertNotNull(t3); - assertTrue(workflowExecutionService.ackTaskReceived(t3.getTaskId())); - - t2.setStatus(COMPLETED); - t3.setStatus(COMPLETED); - - ExecutorService es = Executors.newFixedThreadPool(2); - Future future1 = es.submit(() -> { - try { - workflowExecutionService.updateTask(t2); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - final Task _t3 = t3; - Future future2 = es.submit(() -> { - try { - workflowExecutionService.updateTask(_t3); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future1.get(); - future2.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - printTaskStatuses(workflow, "T2, T3 complete"); - workflowExecutor.decide(workflowId); - - Task t4 = workflowExecutionService.poll("junit_task_0_RT_4", "test"); - assertNotNull(t4); - t4.setStatus(COMPLETED); - workflowExecutionService.updateTask(t4); - - printTaskStatuses(workflowId, "After complete"); - } - - @Test - public void testRetry() throws Exception { - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - int retryCount = taskDef.getRetryCount(); - taskDef.setRetryCount(1); - int retryDelay = taskDef.getRetryDelaySeconds(); - taskDef.setRetryDelaySeconds(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - assertNotNull(workflowDef.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(workflowDef.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap<>(); - input.put("param1", "p1 value"); - input.put("param2", "p2 value"); - String workflowId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(workflowId); - printTaskStatuses(workflowId, "initial"); - - Task task = getTask("junit_task_1"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - task = getTask("junit_task_1"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - printTaskStatuses(workflowId, "before retry"); - - workflowExecutor.retry(workflowId); - - printTaskStatuses(workflowId, "after retry"); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - task = getTask("junit_task_1"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - task = getTask("junit_task_2"); - assertNotNull(task); - assertEquals(workflowId, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - assertEquals(3, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_1")).count()); - - taskDef.setRetryCount(retryCount); - taskDef.setRetryDelaySeconds(retryDelay); - metadataService.updateTaskDef(taskDef); - - printTaskStatuses(workflowId, "final"); - - } - - @Test - public void testNonRestartartableWorkflows() throws Exception { - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - found.setName(JUNIT_TEST_WF_NON_RESTARTABLE); - found.setRestartable(false); - metadataService.updateWorkflowDef(found); - - assertNotNull(found); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input); - assertNotNull(wfid); - - Task task = getTask("junit_task_1"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - - workflowExecutor.rewind(es.getWorkflowId()); - - workflowExecutor.decide(wfid); - - // Polling for the first task should return the same task as before - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_1", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(wfid, task.getWorkflowInstanceId()); - - workflowExecutor.decide(wfid); - - String task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); - assertNotNull(tasks); - assertEquals(1, tasks.size()); - task = tasks.get(0); - - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(wfid, task.getWorkflowInstanceId()); - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertEquals("junit_task_2", task.getTaskType()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull("Found=" + task.getInputData(), task2Input); - assertEquals(task1Op, task2Input); - - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - task.setReasonForIncompletion("unit test failure"); - workflowExecutionService.updateTask(task); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); - assertNotNull(tasks); - assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - - - expectedException.expect(ApplicationException.class); - expectedException.expectMessage(String.format("is an instance of WorkflowDef: %s and version: %d and is non restartable", JUNIT_TEST_WF_NON_RESTARTABLE, 1)); - workflowExecutor.rewind(es.getWorkflowId()); - } - - - @Test - public void testRestart() throws Exception { - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - Task task = getTask("junit_task_1"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - - workflowExecutor.rewind(es.getWorkflowId()); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - task = getTask("junit_task_1"); - assertNotNull(task); - assertEquals(wfid, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - task = getTask("junit_task_2"); - assertNotNull(task); - assertEquals(wfid, task.getWorkflowInstanceId()); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - - } - - - @Test - public void testTimeout() throws Exception { - - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(1); - taskDef.setTimeoutSeconds(1); - taskDef.setRetryDelaySeconds(0); - taskDef.setTimeoutPolicy(TimeoutPolicy.RETRY); - metadataService.updateTaskDef(taskDef); - - WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - assertNotNull(found.getFailureWorkflow()); - assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - input.put("failureWfName", "FanInOutTest"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - //Ensure that we have a workflow queued up for evaluation here... - long size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); - assertEquals(1, size); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals("fond: " + es.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 1, es.getTasks().size()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertEquals(wfid, task.getWorkflowInstanceId()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - - //Ensure that we have a workflow queued up for evaluation here... - size = queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE); - assertEquals(1, size); - - - Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS); - workflowSweeper.sweep(Arrays.asList(wfid), workflowExecutor); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals("fond: " + es.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 2, es.getTasks().size()); - - Task task1 = es.getTasks().get(0); - assertEquals(Status.TIMED_OUT, task1.getStatus()); - Task task2 = es.getTasks().get(1); - assertEquals(Status.SCHEDULED, task2.getStatus()); - - task = workflowExecutionService.poll(task2.getTaskDefName(), "task1.junit.worker"); - assertNotNull(task); - assertEquals(wfid, task.getWorkflowInstanceId()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - Uninterruptibles.sleepUninterruptibly(3, TimeUnit.SECONDS); - workflowExecutor.decide(wfid); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(2, es.getTasks().size()); - - assertEquals(Status.TIMED_OUT, es.getTasks().get(0).getStatus()); - assertEquals(Status.TIMED_OUT, es.getTasks().get(1).getStatus()); - assertEquals(WorkflowStatus.TIMED_OUT, es.getStatus()); - - assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE)); - - taskDef.setTimeoutSeconds(0); - taskDef.setRetryCount(RETRY_COUNT); - metadataService.updateTaskDef(taskDef); - - } - - @Test - public void testReruns() throws Exception { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // Check the tasks, at this time there should be 1 task - assertEquals(es.getTasks().size(), 1); - Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(t.getTaskId(), task.getTaskId()); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(t.getTaskId())) { - assertEquals(wfTask.getStatus(), COMPLETED); - } else { - assertEquals(wfTask.getStatus(), Status.SCHEDULED); - } - }); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - // Now rerun lets rerun the workflow from the second task - RerunWorkflowRequest request = new RerunWorkflowRequest(); - request.setReRunFromWorkflowId(wfid); - request.setReRunFromTaskId(es.getTasks().get(1).getTaskId()); - - String reRunwfid = workflowExecutor.rerun(request); - - Workflow esRR = workflowExecutionService.getExecutionStatus(reRunwfid, true); - assertNotNull(esRR); - assertEquals(esRR.getReasonForIncompletion(), WorkflowStatus.RUNNING, esRR.getStatus()); - // Check the tasks, at this time there should be 2 tasks - // first one is skipped and the second one is scheduled - assertEquals(esRR.getTasks().toString(), 2, esRR.getTasks().size()); - assertEquals(COMPLETED, esRR.getTasks().get(0).getStatus()); - Task tRR = esRR.getTasks().get(1); - assertEquals(esRR.getTasks().toString(), Status.SCHEDULED, tRR.getStatus()); - assertEquals(tRR.getTaskType(), "junit_task_2"); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(reRunwfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - ////////////////////// - // Now rerun the entire workflow - RerunWorkflowRequest request1 = new RerunWorkflowRequest(); - request1.setReRunFromWorkflowId(wfid); - - String reRunwfid1 = workflowExecutor.rerun(request1); - - es = workflowExecutionService.getExecutionStatus(reRunwfid1, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // Check the tasks, at this time there should be 1 task - assertEquals(es.getTasks().size(), 1); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - - } - - - @Test - public void testTaskSkipping() throws Exception { - - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(0); - metadataService.updateTaskDef(taskDef); - - - metadataService.getWorkflowDef(TEST_WORKFLOW_NAME_3, 1).get(); - - String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(TEST_WORKFLOW_NAME_3, 1, correlationId, input); - assertNotNull(wfid); - - // Now Skip the second task - workflowExecutor.skipTaskFromWorkflow(wfid, "t2", null); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - // Check the tasks, at this time there should be 3 task - assertEquals(2, es.getTasks().size()); - assertEquals(Task.Status.SCHEDULED, es.getTasks().get(0).getStatus()); - assertEquals(Task.Status.SKIPPED, es.getTasks().get(1).getStatus()); - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - assertEquals("t1", task.getReferenceTaskName()); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getReferenceTaskName().equals("t1")) { - assertEquals(COMPLETED, wfTask.getStatus()); - } else if (wfTask.getReferenceTaskName().equals("t2")) { - assertEquals(Status.SKIPPED, wfTask.getStatus()); - } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); - } - }); - - task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); - assertNotNull(task); - assertEquals(Status.IN_PROGRESS, task.getStatus()); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - - } - - @Test - public void testPauseResume() throws Exception { - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - - String correlationId = "unit_test_1" + System.nanoTime(); - Map input = new HashMap(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - String wfid = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); - assertNotNull(wfid); - - List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); - assertNotNull(ids); - assertTrue("found no ids: " + ids, ids.size() > 0); //if there are concurrent tests running, this would be more than 1 - boolean foundId = false; - for (String id : ids) { - if (id.equals(wfid)) { - foundId = true; - } - } - assertTrue(foundId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); - - // PAUSE - workflowExecutor.pauseWorkflow(wfid); - - // The workflow is paused but the scheduled task should be pollable - - Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(t.getTaskId(), task.getTaskId()); - - String param1 = (String) task.getInputData().get("p1"); - String param2 = (String) task.getInputData().get("p2"); - - assertNotNull(param1); - assertNotNull(param2); - assertEquals("p1 value", param1); - assertEquals("p2 value", param2); - - String task1Op = "task1.output->" + param1 + "." + param2; - task.getOutputData().put("op", task1Op); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // This decide should not schedule the next task - //ds.decideNow(wfid, task); - - // If we get the full workflow here then, last task should be completed and the rest (including PAUSE task) should be scheduled - es = workflowExecutionService.getExecutionStatus(wfid, true); - es.getTasks().forEach(wfTask -> { - if (wfTask.getTaskId().equals(t.getTaskId())) { - assertEquals(wfTask.getStatus(), COMPLETED); - } - }); - - // This should return null as workflow is paused - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNull("Found: " + task, task); - - // Even if decide is run again the next task will not be scheduled as the workflow is still paused-- - workflowExecutor.decide(wfid); - - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertTrue(task == null); - - // RESUME - workflowExecutor.resumeWorkflow(wfid); - - // Now polling should get the second task - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - - Task byRefName = workflowExecutionService.getPendingTaskForWorkflow("t2", wfid); - assertNotNull(byRefName); - assertEquals(task.getTaskId(), byRefName.getTaskId()); - - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - } - - @Test - public void testSubWorkflow() throws Exception { - - createSubWorkflow(); - metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = workflowExecutor.startWorkflow(WF_WITH_SUB_WF, 1, "test", input); - assertNotNull(wfId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - assertNotNull(es.getTasks()); - - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getOutputData().get("subWorkflowId")); - String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); - - es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(es); - assertNotNull(es.getTasks()); - assertEquals(wfId, es.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "test"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertEquals(subWorkflowId, task.getWorkflowInstanceId()); - String uuid = UUID.randomUUID().toString(); - task.getOutputData().put("uuid", uuid); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - assertNotNull(es.getOutput()); - assertTrue(es.getOutput().containsKey("o1")); - assertTrue(es.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", es.getOutput().get("o1")); - assertEquals(uuid, es.getOutput().get("o2")); - - task = workflowExecutionService.poll("junit_task_6", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } - - @Test - public void testSubWorkflowFailure() throws Exception { - - TaskDef taskDef = metadataService.getTaskDef("junit_task_1"); - assertNotNull(taskDef); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(2); - metadataService.updateTaskDef(taskDef); - - - createSubWorkflow(); - metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); - - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = workflowExecutor.startWorkflow(WF_WITH_SUB_WF, 1, "test", input); - assertNotNull(wfId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull(task.getOutputData().get("subWorkflowId")); - String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); - - es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(es); - assertNotNull(es.getTasks()); - - assertEquals(wfId, es.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - workflowExecutor.executeSystemTask(subworkflow, es.getParentWorkflowTaskId(), 1); - es = workflowExecutionService.getExecutionStatus(wfId, true); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - - taskDef.setTimeoutSeconds(0); - taskDef.setRetryCount(RETRY_COUNT); - metadataService.updateTaskDef(taskDef); - - } - - @Test - public void testSubWorkflowFailureInverse() throws Exception { - - TaskDef taskDef = metadataService.getTaskDef("junit_task_1"); - assertNotNull(taskDef); - taskDef.setRetryCount(0); - taskDef.setTimeoutSeconds(2); - metadataService.updateTaskDef(taskDef); - - - createSubWorkflow(); - - WorkflowDef found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1).get(); - assertNotNull(found); - Map input = new HashMap<>(); - input.put("param1", "param 1 value"); - input.put("param3", "param 2 value"); - input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = workflowExecutor.startWorkflow(WF_WITH_SUB_WF, 1, "test", input); - assertNotNull(wfId); - - Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - - es = workflowExecutionService.getExecutionStatus(wfId, true); - assertNotNull(es); - assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull(task.getOutputData().get("subWorkflowId")); - String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); - - es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(es); - assertNotNull(es.getTasks()); - assertEquals(wfId, es.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - - workflowExecutor.terminateWorkflow(wfId, "fail"); - es = workflowExecutionService.getExecutionStatus(wfId, true); - assertEquals(WorkflowStatus.TERMINATED, es.getStatus()); - - es = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertEquals(WorkflowStatus.TERMINATED, es.getStatus()); - - } - - @Test - public void testSubWorkflowRetry() throws Exception { - String taskName = "junit_task_1"; - TaskDef taskDef = metadataService.getTaskDef(taskName); - int retryCount = metadataService.getTaskDef(taskName).getRetryCount(); - taskDef.setRetryCount(0); - metadataService.updateTaskDef(taskDef); - - // create a workflow with sub-workflow - createSubWorkflow(); - Optional found = metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); - assertTrue(found.isPresent()); - - // start the workflow - Map workflowInputParams = new HashMap<>(); - workflowInputParams.put("param1", "param 1"); - workflowInputParams.put("param3", "param 2"); - workflowInputParams.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = workflowExecutor.startWorkflow(WF_WITH_SUB_WF, 1, "test", workflowInputParams); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - - // poll and complete first task - Task task = workflowExecutionService.poll("junit_task_5", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(2, workflow.getTasks().size()); - - task = workflow.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name())).findAny().orElse(null); - assertNotNull(task); - assertNotNull(task.getOutputData()); - assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getOutputData().get("subWorkflowId")); - String subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertNotNull(workflow.getTasks()); - assertEquals(workflowId, workflow.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - // poll and fail the first task in sub-workflow - task = workflowExecutionService.poll("junit_task_1", "test"); - task.setStatus(FAILED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - // Retry the failed sub workflow - workflowExecutor.retry(subWorkflowId); - task = workflowExecutionService.poll("junit_task_1", "test"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertEquals(subWorkflowId, task.getWorkflowInstanceId()); - String uuid = UUID.randomUUID().toString(); - task.getOutputData().put("uuid", uuid); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertNotNull(workflow.getOutput()); - assertTrue(workflow.getOutput().containsKey("o1")); - assertTrue(workflow.getOutput().containsKey("o2")); - assertEquals("sub workflow input param1", workflow.getOutput().get("o1")); - assertEquals(uuid, workflow.getOutput().get("o2")); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - task = workflowExecutionService.poll("junit_task_6", "test"); - assertNotNull(task); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - - // reset retry count - taskDef = metadataService.getTaskDef(taskName); - taskDef.setRetryCount(retryCount); - metadataService.updateTaskDef(taskDef); - } - - - @Test - public void testWait() throws Exception { - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_wait"); - workflowDef.setSchemaVersion(2); - - WorkflowTask waitWorkflowTask = new WorkflowTask(); - waitWorkflowTask.setWorkflowTaskType(TaskType.WAIT); - waitWorkflowTask.setName("wait"); - waitWorkflowTask.setTaskReferenceName("wait0"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_1"); - workflowTask.setTaskReferenceName("t1"); - - workflowDef.getTasks().add(waitWorkflowTask); - workflowDef.getTasks().add(workflowTask); - metadataService.registerWorkflowDef(workflowDef); - - String workflowId = workflowExecutor.startWorkflow(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - - Task waitTask = workflow.getTasks().get(0); - assertEquals(TaskType.WAIT.name(), waitTask.getTaskType()); - waitTask.setStatus(COMPLETED); - workflowExecutor.updateTask(new TaskResult(waitTask)); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testEventWorkflow() throws Exception { - - TaskDef taskDef = new TaskDef(); - taskDef.setName("eventX"); - taskDef.setTimeoutSeconds(1); - - metadataService.registerTaskDef(Collections.singletonList(taskDef)); - - WorkflowDef workflowDef = new WorkflowDef(); - workflowDef.setName("test_event"); - workflowDef.setSchemaVersion(2); - - WorkflowTask eventWorkflowTask = new WorkflowTask(); - eventWorkflowTask.setWorkflowTaskType(TaskType.EVENT); - eventWorkflowTask.setName("eventX"); - eventWorkflowTask.setTaskReferenceName("wait0"); - eventWorkflowTask.setSink("conductor"); - - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName("junit_task_1"); - workflowTask.setTaskReferenceName("t1"); - - workflowDef.getTasks().add(eventWorkflowTask); - workflowDef.getTasks().add(workflowTask); - metadataService.registerWorkflowDef(workflowDef); - - String workflowId = workflowExecutor.startWorkflow(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); - Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - - Task eventTask = workflow.getTasks().get(0); - assertEquals(TaskType.EVENT.name(), eventTask.getTaskType()); - assertEquals(COMPLETED, eventTask.getStatus()); - assertTrue(!eventTask.getOutputData().isEmpty()); - assertNotNull(eventTask.getOutputData().get("event_produced")); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - @Test - public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { - WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); - assertNotNull(workflowDef); - - String workflowId = workflowExecutor.startWorkflow(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); - Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); - assertNotNull(workflow); - - Task task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String taskId = task.getTaskId(); - task.setStatus(Status.IN_PROGRESS); - task.setCallbackAfterSeconds(5L); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - // task should not be available - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNull(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); - - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNotNull(task); - assertEquals(taskId, task.getTaskId()); - - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - taskId = task.getTaskId(); - task.setStatus(Status.IN_PROGRESS); - task.setCallbackAfterSeconds(5L); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - - // task should not be available - task = workflowExecutionService.poll("junit_task_1", "test"); - assertNull(task); - - Uninterruptibles.sleepUninterruptibly(5, TimeUnit.SECONDS); - - task = workflowExecutionService.poll("junit_task_2", "test"); - assertNotNull(task); - assertEquals(taskId, task.getTaskId()); - - task.setStatus(Status.COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(2, workflow.getTasks().size()); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } - - //@Test - public void testRateLimiting() throws Exception { - - TaskDef td = new TaskDef(); - td.setName("eventX1"); - td.setTimeoutSeconds(1); - td.setConcurrentExecLimit(1); - - metadataService.registerTaskDef(Arrays.asList(td)); - - WorkflowDef def = new WorkflowDef(); - def.setName("test_rate_limit"); - def.setSchemaVersion(2); - - WorkflowTask event = new WorkflowTask(); - event.setType("USER_TASK"); - event.setName("eventX1"); - event.setTaskReferenceName("event0"); - event.setSink("conductor"); - - def.getTasks().add(event); - metadataService.registerWorkflowDef(def); - - Executors.newSingleThreadScheduledExecutor().scheduleWithFixedDelay(() -> { - queueDAO.processUnacks("USER_TASK"); - }, 2, 2, TimeUnit.SECONDS); - - String[] ids = new String[100]; - ExecutorService es = Executors.newFixedThreadPool(10); - for (int i = 0; i < 10; i++) { - final int index = i; - es.submit(() -> { - try { - String id = workflowExecutor.startWorkflow(def.getName(), def.getVersion(), "", new HashMap<>()); - ids[index] = id; - } catch (Exception e) { - e.printStackTrace(); - } - - }); - } - Uninterruptibles.sleepUninterruptibly(20, TimeUnit.SECONDS); - for (int i = 0; i < 10; i++) { - String id = ids[i]; - Workflow workflow = workflowExecutor.getWorkflow(id, true); - assertNotNull(workflow); - assertEquals(1, workflow.getTasks().size()); - - Task eventTask = workflow.getTasks().get(0); - assertEquals(COMPLETED, eventTask.getStatus()); - assertEquals("tasks:" + workflow.getTasks(), WorkflowStatus.COMPLETED, workflow.getStatus()); - assertTrue(!eventTask.getOutputData().isEmpty()); - assertNotNull(eventTask.getOutputData().get("event_produced")); - } - } - - private void createSubWorkflow() throws Exception { - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_5"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("a1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("subWorkflowTask"); - wft2.setType(TaskType.SUB_WORKFLOW.name()); - SubWorkflowParams swp = new SubWorkflowParams(); - swp.setName(LINEAR_WORKFLOW_T1_T2); - wft2.setSubWorkflowParam(swp); - Map ip2 = new HashMap<>(); - ip2.put("test", "test value"); - ip2.put("param1", "sub workflow input param1"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("a2"); - - WorkflowTask wft3 = new WorkflowTask(); - wft3.setName("junit_task_6"); - Map ip3 = new HashMap<>(); - ip3.put("p1", "${workflow.input.param1}"); - ip3.put("p2", "${workflow.input.param2}"); - wft3.setInputParameters(ip3); - wft3.setTaskReferenceName("a3"); - - WorkflowDef main = new WorkflowDef(); - main.setSchemaVersion(2); - main.setInputParameters(Arrays.asList("param1", "param2")); - main.setName(WF_WITH_SUB_WF); - main.getTasks().addAll(Arrays.asList(wft1, wft2, wft3)); - - metadataService.updateWorkflowDef(Collections.singletonList(main)); - - } - - private void verify(String inputParam1, String wfid, String task1Op, boolean fail) throws Exception { - Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - - String task2Input = (String) task.getInputData().get("tp2"); - assertNotNull(task2Input); - assertEquals(task1Op, task2Input); - task2Input = (String) task.getInputData().get("tp1"); - assertNotNull(task2Input); - assertEquals(inputParam1, task2Input); - if (fail) { - task.setStatus(FAILED); - task.setReasonForIncompletion("failure...0"); - } else { - task.setStatus(COMPLETED); - } - - workflowExecutionService.updateTask(task); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - if (fail) { - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - } else { - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } - } - - @Before - public void flushAllTaskQueues() { - queueDAO.queuesDetail().keySet().forEach(queueName -> { - queueDAO.flush(queueName); - }); - - if (taskDefs == null) { - return; - } - for (TaskDef td : taskDefs) { - queueDAO.flush(td.getName()); - } - } - - private void createWorkflowDefForDomain() { - WorkflowDef defSW = new WorkflowDef(); - defSW.setName(LINEAR_WORKFLOW_T1_T2_SW); - defSW.setDescription(defSW.getName()); - defSW.setVersion(1); - defSW.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - defSW.setOutputParameters(outputParameters); - defSW.setFailureWorkflow("$workflow.input.failureWfName"); - defSW.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("junit_task_3"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("t1"); - - WorkflowTask subWorkflow = new WorkflowTask(); - subWorkflow.setType(TaskType.SUB_WORKFLOW.name()); - SubWorkflowParams sw = new SubWorkflowParams(); - sw.setName(LINEAR_WORKFLOW_T1_T2); - subWorkflow.setSubWorkflowParam(sw); - subWorkflow.setTaskReferenceName("sw1"); - - wftasks.add(wft1); - wftasks.add(subWorkflow); - defSW.setTasks(wftasks); - - try { - metadataService.updateWorkflowDef(defSW); - } catch (Exception e) { - } - } - - private void createWFWithResponseTimeout() throws Exception { - TaskDef task = new TaskDef(); - task.setName("task_rt"); - task.setTimeoutSeconds(120); - task.setRetryCount(RETRY_COUNT); - task.setRetryDelaySeconds(0); - task.setResponseTimeoutSeconds(10); - metadataService.registerTaskDef(Collections.singletonList(task)); - - WorkflowDef def = new WorkflowDef(); - def.setName("RTOWF"); - def.setDescription(def.getName()); - def.setVersion(1); - def.setInputParameters(Arrays.asList("param1", "param2")); - Map outputParameters = new HashMap<>(); - outputParameters.put("o1", "${workflow.input.param1}"); - outputParameters.put("o2", "${t2.output.uuid}"); - outputParameters.put("o3", "${t1.output.op}"); - def.setOutputParameters(outputParameters); - def.setFailureWorkflow("$workflow.input.failureWfName"); - def.setSchemaVersion(2); - LinkedList wftasks = new LinkedList<>(); - - WorkflowTask wft1 = new WorkflowTask(); - wft1.setName("task_rt"); - Map ip1 = new HashMap<>(); - ip1.put("p1", "${workflow.input.param1}"); - ip1.put("p2", "${workflow.input.param2}"); - wft1.setInputParameters(ip1); - wft1.setTaskReferenceName("task_rt_t1"); - - WorkflowTask wft2 = new WorkflowTask(); - wft2.setName("junit_task_2"); - Map ip2 = new HashMap<>(); - ip2.put("tp1", "${workflow.input.param1}"); - ip2.put("tp2", "${t1.output.op}"); - wft2.setInputParameters(ip2); - wft2.setTaskReferenceName("t2"); - - wftasks.add(wft1); - wftasks.add(wft2); - def.setTasks(wftasks); - - metadataService.updateWorkflowDef(def); - } - - private String runWorkflowWithSubworkflow() throws Exception { - clearWorkflows(); - createWorkflowDefForDomain(); - - metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1).get(); - - String correlationId = "unit_test_sw"; - Map input = new HashMap<>(); - String inputParam1 = "p1 value"; - input.put("param1", inputParam1); - input.put("param2", "p2 value"); - - String workflowId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null); - System.out.println("testSimpleWorkflow.wfid=" + workflowId); - assertNotNull(workflowId); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. - - // Poll for first task and execute it - Task task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_3.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Get the sub workflow id - String subWorkflowId = null; - for (Task t : workflow.getTasks()) { - if (t.getTaskType().equalsIgnoreCase("SUB_WORKFLOW")) { - subWorkflowId = t.getOutputData().get("subWorkflowId").toString(); - } - } - assertNotNull(subWorkflowId); - - Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); - assertEquals(1, subWorkflow.getTasks().size()); - - // Now the Sub workflow is triggered - // Poll for first task of the sub workflow and execute it - task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_1.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - // Poll for second task of the sub workflow and execute it - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); - assertNotNull(task); - assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - task.getOutputData().put("op", "junit_task_2.done"); - task.setStatus(COMPLETED); - workflowExecutionService.updateTask(task); - - // Now the sub workflow and the main workflow must have finished - subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); - assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.COMPLETED, subWorkflow.getStatus()); - assertEquals(2, subWorkflow.getTasks().size()); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - assertEquals(2, workflow.getTasks().size()); - - return workflowId; - } - - private String runAFailedForkJoinWF() throws Exception { - try { - this.createForkJoinWorkflowWithZeroRetry(); - } catch (Exception e) { - } - - Map input = new HashMap<>(); - String workflowId = workflowExecutor.startWorkflow(FORK_JOIN_WF + "_2", 1, "fanouttest", input); - System.out.println("testForkJoin.wfid=" + workflowId); - Task t1 = workflowExecutionService.poll("junit_task_0_RT_1", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); - - Task t2 = workflowExecutionService.poll("junit_task_0_RT_2", "test"); - assertTrue(workflowExecutionService.ackTaskReceived(t2.getTaskId())); - assertNotNull(t1); - assertNotNull(t2); - - t1.setStatus(COMPLETED); - workflowExecutionService.updateTask(t1); - - Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); - printTaskStatuses(workflow, "Initial"); - - t2.setStatus(FAILED); - - ExecutorService executorService = Executors.newFixedThreadPool(2); - Future future1 = executorService.submit(() -> { - try { - workflowExecutionService.updateTask(t2); - } catch (Exception e) { - throw new RuntimeException(e); - } - - }); - future1.get(); - - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); - assertNotNull(workflow); - assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - - return workflowId; - } - - private void printTaskStatuses(String wfid, String message) throws Exception { - Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(wf); - printTaskStatuses(wf, message); - } - - private boolean printWFTaskDetails = false; - - private void printTaskStatuses(Workflow wf, String message) throws Exception { - if (printWFTaskDetails) { - System.out.println(message + " >>> Workflow status " + wf.getStatus().name()); - wf.getTasks().forEach(t -> { - System.out.println("Task " + String.format("%-15s", t.getTaskType()) + "\t" + String.format("%-15s", t.getReferenceTaskName()) + "\t" + String.format("%-15s", t.getWorkflowTask().getType()) + "\t" + t.getSeq() + "\t" + t.getStatus() + "\t" + t.getTaskId()); - }); - System.out.println(); - } - } } From d8bccf24fa4b81c32014521feffdd89d8cb55ec7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 24 Aug 2018 00:01:18 -0700 Subject: [PATCH 137/163] Updating code for code review - Added comments related to backwards compatibility - Renamed folder for legacy workflow scenarios --- .../conductor/common/run/Workflow.java | 3 +- .../core/execution/WorkflowExecutor.java | 7 ++ .../core/metadata/MetadataMapperService.java | 2 - .../AbstractWorkflowServiceTest.java | 76 +++++++++---------- .../WorkflowLegacyMigrationTest.java | 49 +++++++----- .../{ => legacy}/ConditionalTaskWF.json | 0 .../{ => legacy}/ConditionalTaskWF2.json | 0 .../{ => legacy}/ConditionalTaskWF3.json | 0 .../{ => legacy}/ConditionalTaskWF4.json | 0 .../{ => legacy}/DynamicFanInOutTest.json | 0 .../DynamicFanInOutTestLegacy.json | 0 .../{ => legacy}/FanInOutNestedTest.json | 0 .../scenarios/{ => legacy}/FanInOutTest.json | 0 .../{ => legacy}/FanInOutTest_2.json | 0 .../scenarios/{ => legacy}/RTOWF.json | 0 .../{ => legacy}/WorkflowWithSubWorkflow.json | 0 .../concurrentWorkflowExecutions.json | 0 .../{ => legacy}/empty_workflow.json | 0 .../{ => legacy}/forkJoinNested.json | 0 .../scenarios/{ => legacy}/junit_test_wf.json | 0 .../{ => legacy}/junit_test_wf3.json | 0 .../junit_test_wf_non_restartable.json | 0 .../{ => legacy}/junit_test_wf_sw.json | 0 .../scenarios/{ => legacy}/longRunningWf.json | 0 .../scenarios/{ => legacy}/retry.json | 0 ...impleWorkflowFailureWithTerminalError.json | 0 .../simpleWorkflowWithTaskSpecificDomain.json | 0 .../simpleWorkflowWithTasksInOneDomain.json | 0 .../{ => legacy}/template_workflow.json | 0 .../scenarios/{ => legacy}/test_event.json | 0 .../scenarios/{ => legacy}/test_wait.json | 0 .../scenarios/{ => legacy}/timeout.json | 0 32 files changed, 70 insertions(+), 67 deletions(-) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/ConditionalTaskWF.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/ConditionalTaskWF2.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/ConditionalTaskWF3.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/ConditionalTaskWF4.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/DynamicFanInOutTest.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/DynamicFanInOutTestLegacy.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/FanInOutNestedTest.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/FanInOutTest.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/FanInOutTest_2.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/RTOWF.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/WorkflowWithSubWorkflow.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/concurrentWorkflowExecutions.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/empty_workflow.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/forkJoinNested.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/junit_test_wf.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/junit_test_wf3.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/junit_test_wf_non_restartable.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/junit_test_wf_sw.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/longRunningWf.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/retry.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/simpleWorkflowFailureWithTerminalError.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/simpleWorkflowWithTaskSpecificDomain.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/simpleWorkflowWithTasksInOneDomain.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/template_workflow.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/test_event.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/test_wait.json (100%) rename test-harness/src/test/resources/integration/scenarios/{ => legacy}/timeout.json (100%) diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index 381221f751..e8ed348ff7 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -320,8 +320,7 @@ public void setParentWorkflowTaskId(String parentWorkflowTaskId) { public int getSchemaVersion() { return getWorkflowDefinition() != null ? getWorkflowDefinition().getSchemaVersion() : - schemaVersion - ; + schemaVersion; } /** diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index bbfe8263fe..e1f771e4e6 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -606,7 +606,10 @@ public void updateTask(TaskResult taskResult) { String workflowId = taskResult.getWorkflowInstanceId(); Workflow workflowInstance = executionDAO.getWorkflow(workflowId); + + // Backwards compatibility for legacy workflows already running metadataMapperService.populateWorkflowWithDefinitions(workflowInstance); + Task task = executionDAO.getTask(taskResult.getTaskId()); logger.debug("Task: {} belonging to Workflow {} being updated", task, workflowInstance); @@ -745,6 +748,8 @@ public boolean decide(String workflowId) { //If it is a new workflow the tasks will be still empty even though include tasks is true Workflow workflow = executionDAO.getWorkflow(workflowId, true); + + // Backwards compatibility for legacy workflows already running metadataMapperService.populateWorkflowWithDefinitions(workflow); try { @@ -849,6 +854,8 @@ public void resumeWorkflow(String workflowId) { public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, SkipTaskRequest skipTaskRequest) { Workflow wf = executionDAO.getWorkflow(workflowId, true); + + // Backwards compatibility for legacy workflows already running metadataMapperService.populateWorkflowWithDefinitions(wf); // If the wf is not running then cannot skip any task diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index b8eaacb85a..8c9f5cc7a9 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -88,7 +88,6 @@ public Workflow populateWorkflowWithDefinitions(Workflow workflow) { workflow.setWorkflowDefinition(workflowDefinition); } - // Populate definitions on the workflow definition workflowDefinition.collectTasks().stream().forEach( workflowTask -> { if (shouldPopulateDefinition(workflowTask)) { @@ -105,7 +104,6 @@ public Workflow populateWorkflowWithDefinitions(Workflow workflow) { } public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { - // Populate definitions on the workflow definition workflowDefinition.collectTasks().stream().forEach( workflowTask -> populateWorkflowTaskWithDefinition(workflowTask) ); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java index 16649ec32d..1b7d036566 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java @@ -257,7 +257,7 @@ public void testWorkflowWithNoTasks() throws Exception { empty.setSchemaVersion(2); metadataService.registerWorkflowDef(empty); - String id = startOrLoadWorkflowExecution(empty.getName(), 1, "testWorkflowWithNoTasks", new HashMap<>()); + String id = startOrLoadWorkflowExecution(empty.getName(), 1, "testWorkflowWithNoTasks", new HashMap<>(), null, null); assertNotNull(id); Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); assertNotNull(workflow); @@ -303,7 +303,7 @@ public void testTaskDefTemplate() throws Exception { input.put("outputPath", "s3://bucket/outputPath"); input.put("requestDetails", requestDetails); - String id = startOrLoadWorkflowExecution(templateWf.getName(), 1, "testTaskDefTemplate", input); + String id = startOrLoadWorkflowExecution(templateWf.getName(), 1, "testTaskDefTemplate", input, null, null); assertNotNull(id); Workflow workflow = workflowExecutionService.getExecutionStatus(id, true); assertNotNull(workflow); @@ -377,7 +377,7 @@ public void testForkJoin() throws Exception { metadataService.updateTaskDef(taskDef); Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input); + String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input, null, null); System.out.println("testForkJoin.wfid=" + workflowId); printTaskStatuses(workflowId, "initiated"); @@ -662,7 +662,7 @@ public void testForkJoinFailure() throws Exception { Map input = new HashMap(); - String wfid = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input); + String wfid = startOrLoadWorkflowExecution(FORK_JOIN_WF, 1, "fanouttest", input, null, null); System.out.println("testForkJoinFailure.wfid=" + wfid); Task t1 = workflowExecutionService.poll("junit_task_2", "test"); @@ -710,7 +710,7 @@ public void testDynamicForkJoinLegacy() throws Exception { } Map input = new HashMap(); - String wfid = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF_LEGACY, 1, "dynfanouttest1", input); + String wfid = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF_LEGACY, 1, "dynfanouttest1", input, null, null); System.out.println("testDynamicForkJoinLegacy.wfid=" + wfid); Task t1 = workflowExecutionService.poll("junit_task_1", "test"); @@ -784,7 +784,7 @@ public void testDynamicForkJoin() throws Exception { metadataService.updateTaskDef(taskDef); Map workflowInput = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF, 1, "dynfanouttest1", workflowInput); + String workflowId = startOrLoadWorkflowExecution(DYNAMIC_FORK_JOIN_WF, 1, "dynfanouttest1", workflowInput, null, null); System.out.println("testDynamicForkJoin.wfid=" + workflowId); Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); assertNotNull(workflow); @@ -1390,7 +1390,7 @@ public void testSimpleWorkflow() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String workflowInstanceId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String workflowInstanceId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); assertNotNull(workflowInstanceId); @@ -1481,7 +1481,7 @@ public void testSimpleWorkflowWithResponseTimeout() throws Exception { String inputParam1 = "p1 value"; workflowInput.put("param1", inputParam1); workflowInput.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution("RTOWF", 1, correlationId, workflowInput); + String workflowId = startOrLoadWorkflowExecution("RTOWF", 1, correlationId, workflowInput, null, null); System.out.println("testSimpleWorkflowWithResponseTimeout.wfid=" + workflowId); assertNotNull(workflowId); @@ -1903,7 +1903,7 @@ public void testLongRunning() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input, null, null); System.out.println("testLongRunning.wfid=" + wfid); assertNotNull(wfid); @@ -2011,7 +2011,7 @@ public void testResetWorkflowInProgressTasks() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input, null, null); System.out.println("testLongRunning.wfid=" + wfid); assertNotNull(wfid); @@ -2202,7 +2202,7 @@ public void testCaseStatements() throws Exception { //default case input.put("param1", "xxx"); input.put("param2", "two"); - wfid = startOrLoadWorkflowExecution(COND_TASK_WF, 1, correlationId, input); + wfid = startOrLoadWorkflowExecution(COND_TASK_WF, 1, correlationId, input, null, null); System.out.println("testCaseStatements.wfid=" + wfid); assertNotNull(wfid); Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); @@ -2328,7 +2328,7 @@ public void testRetries() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); System.out.println("testRetries.wfid=" + wfid); assertNotNull(wfid); @@ -2395,7 +2395,7 @@ public void testSuccess() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); @@ -2504,7 +2504,7 @@ public void testDeciderUpdate() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); Workflow workflow = workflowExecutor.getWorkflow(wfid, false); @@ -2535,7 +2535,7 @@ public void testFailurePoints() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); @@ -2609,7 +2609,7 @@ public void testDeciderMix() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); List ids = workflowExecutionService.getRunningWorkflows(LINEAR_WORKFLOW_T1_T2); @@ -2758,7 +2758,7 @@ public void testFailures() throws Exception { input.put("param1", inputParam1); input.put("param2", "p2 value"); input.put("failureWfName", "FanInOutTest"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); Task task = getTask("junit_task_1"); @@ -2933,7 +2933,7 @@ public void testNonRestartartableWorkflows() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input, null, null); assertNotNull(wfid); Task task = getTask("junit_task_1"); @@ -3028,7 +3028,7 @@ public void testRestart() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); Task task = getTask("junit_task_1"); @@ -3159,7 +3159,7 @@ public void testReruns() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); @@ -3306,7 +3306,7 @@ public void testTaskSkipping() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(TEST_WORKFLOW_NAME_3, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(TEST_WORKFLOW_NAME_3, 1, correlationId, input, null, null); assertNotNull(wfid); // Now Skip the second task @@ -3376,7 +3376,7 @@ public void testPauseResume() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input); + String wfid = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2, 1, correlationId, input, null, null); assertNotNull(wfid); @@ -3484,7 +3484,7 @@ public void testSubWorkflow() throws Exception { input.put("param1", "param 1 value"); input.put("param3", "param 2 value"); input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input); + String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); assertNotNull(wfId); Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); @@ -3559,7 +3559,7 @@ public void testSubWorkflowFailure() throws Exception { input.put("param1", "param 1 value"); input.put("param3", "param 2 value"); input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input); + String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); assertNotNull(wfId); Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); @@ -3623,7 +3623,7 @@ public void testSubWorkflowFailureInverse() throws Exception { input.put("param1", "param 1 value"); input.put("param3", "param 2 value"); input.put("wfName", LINEAR_WORKFLOW_T1_T2); - String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input); + String wfId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", input, null, null); assertNotNull(wfId); Workflow es = workflowExecutionService.getExecutionStatus(wfId, true); @@ -3677,7 +3677,7 @@ public void testSubWorkflowRetry() throws Exception { workflowInputParams.put("param1", "param 1"); workflowInputParams.put("param3", "param 2"); workflowInputParams.put("wfName", LINEAR_WORKFLOW_T1_T2); - String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", workflowInputParams); + String workflowId = startOrLoadWorkflowExecution(WF_WITH_SUB_WF, 1, "test", workflowInputParams, null, null); assertNotNull(workflowId); Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); @@ -3785,7 +3785,7 @@ public void testWait() throws Exception { workflowDef.getTasks().add(workflowTask); metadataService.registerWorkflowDef(workflowDef); - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); + String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(1, workflow.getTasks().size()); @@ -3833,7 +3833,7 @@ public void testEventWorkflow() throws Exception { workflowDef.getTasks().add(workflowTask); metadataService.registerWorkflowDef(workflowDef); - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); + String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); assertNotNull(workflow); @@ -3859,7 +3859,7 @@ public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1).get(); assertNotNull(workflowDef); - String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>()); + String workflowId = startOrLoadWorkflowExecution(workflowDef.getName(), workflowDef.getVersion(), "", new HashMap<>(), null, null); Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); assertNotNull(workflow); @@ -3962,7 +3962,7 @@ public void testRateLimiting() throws Exception { final int index = i; es.submit(() -> { try { - String id = startOrLoadWorkflowExecution(def.getName(), def.getVersion(), "", new HashMap<>()); + String id = startOrLoadWorkflowExecution(def.getName(), def.getVersion(), "", new HashMap<>(), null, null); ids[index] = id; } catch (Exception e) { e.printStackTrace(); @@ -4166,7 +4166,7 @@ private String runWorkflowWithSubworkflow() throws Exception { input.put("param1", inputParam1); input.put("param2", "p2 value"); - String workflowId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null); + String workflowId = startOrLoadWorkflowExecution(LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, null); System.out.println("testSimpleWorkflow.wfid=" + workflowId); assertNotNull(workflowId); @@ -4250,7 +4250,7 @@ private String runAFailedForkJoinWF() throws Exception { } Map input = new HashMap<>(); - String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF + "_2", 1, "fanouttest", input); + String workflowId = startOrLoadWorkflowExecution(FORK_JOIN_WF + "_2", 1, "fanouttest", input, null, null); System.out.println("testForkJoin.wfid=" + workflowId); Task t1 = workflowExecutionService.poll("junit_task_0_RT_1", "test"); assertTrue(workflowExecutionService.ackTaskReceived(t1.getTaskId())); @@ -4294,16 +4294,8 @@ private void printTaskStatuses(String wfid, String message) throws Exception { printTaskStatuses(wf, message); } - private String startOrLoadWorkflowExecution(String linearWorkflowT1T2, int i, String correlationId, Map input, Map taskToDomain) { - return startOrLoadWorkflowExecution(linearWorkflowT1T2, i, correlationId, input, null, taskToDomain); - } - - private String startOrLoadWorkflowExecution(String linearWorkflowT1T2, int i, String correlationId, Map input) { - return startOrLoadWorkflowExecution(linearWorkflowT1T2, i, correlationId, input, null); - } - - private String startOrLoadWorkflowExecution(String linearWorkflowT1T2Sw, int i, String correlationId, Map input, String event, Map taskToDomain) { - return startOrLoadWorkflowExecution(linearWorkflowT1T2Sw, linearWorkflowT1T2Sw, i, correlationId, input, event, taskToDomain); + private String startOrLoadWorkflowExecution(String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { + return startOrLoadWorkflowExecution(workflowName, workflowName, version, correlationId, input, event, taskToDomain); } abstract String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java index 7a17321a7f..1a1b44d747 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java @@ -27,7 +27,6 @@ import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.tests.utils.TestRunner; import org.apache.commons.io.Charsets; -import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; @@ -40,8 +39,9 @@ @RunWith(TestRunner.class) public class WorkflowLegacyMigrationTest extends AbstractWorkflowServiceTest { - private static final String WORKFLOW_SCENARIOS_PATH_PREFIX = "/integration/scenarios/"; + private static final String WORKFLOW_SCENARIOS_PATH_PREFIX = "/integration/scenarios/legacy/"; private static final String WORKFLOW_SCENARIO_EXTENSION = ".json"; + private static final String WORKFLOW_INSTANCE_ID_PLACEHOLDER = "WORKFLOW_INSTANCE_ID"; @Inject private ExecutionDAO executionDAO; @@ -52,28 +52,13 @@ public class WorkflowLegacyMigrationTest extends AbstractWorkflowServiceTest { @Inject private Configuration configuration; - @Before - public void init() throws Exception { - super.init(); - } - - private Workflow loadWorkflow(String resourcePath) throws Exception { - - String content = Resources.toString(WorkflowLegacyMigrationTest.class.getResource(resourcePath), Charsets.UTF_8); - String workflowId = IDGenerator.generate(); - content = content.replace("WORKFLOW_INSTANCE_ID", workflowId); - - Workflow workflow = objectMapper.readValue(content, Workflow.class); - workflow.setWorkflowId(workflowId); - - return workflow; - } - @Override - String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { + public String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, + int version, String correlationId, Map input, + String event, Map taskToDomain) { Workflow workflow = null; try { - workflow = loadWorkflow(getWorkflowResourcePath(snapshotResourceName)); + workflow = loadWorkflowSnapshot(getWorkflowResourcePath(snapshotResourceName)); } catch (Exception e) { fail("Error loading workflow scenario " + snapshotResourceName); } @@ -94,6 +79,12 @@ String startOrLoadWorkflowExecution(String snapshotResourceName, String workflow executionDAO.createTasks(workflow.getTasks()); executionDAO.createWorkflow(workflow); + + /* + * Apart from loading a workflow snapshot, + * in order to represent a workflow on the system, we need to populate the + * respective queues related to tasks in progress or decisions. + */ workflow.getTasks().stream().forEach(task -> { workflowExecutor.addTaskToQueue(task); queueDAO.push(WorkflowExecutor.DECIDER_QUEUE, workflowId, configuration.getSweepFrequency()); @@ -102,6 +93,18 @@ String startOrLoadWorkflowExecution(String snapshotResourceName, String workflow return workflow.getWorkflowId(); } + private Workflow loadWorkflowSnapshot(String resourcePath) throws Exception { + + String content = Resources.toString(WorkflowLegacyMigrationTest.class.getResource(resourcePath), Charsets.UTF_8); + String workflowId = IDGenerator.generate(); + content = content.replace(WORKFLOW_INSTANCE_ID_PLACEHOLDER, workflowId); + + Workflow workflow = objectMapper.readValue(content, Workflow.class); + workflow.setWorkflowId(workflowId); + + return workflow; + } + private String getWorkflowResourcePath(String workflowName) { return WORKFLOW_SCENARIOS_PATH_PREFIX + workflowName + WORKFLOW_SCENARIO_EXTENSION; } @@ -109,6 +112,10 @@ private String getWorkflowResourcePath(String workflowName) { @Ignore @Test @Override + /* + * This scenario cannot be recreated loading a workflow snapshot. + * ForkJoins are also tested on testForkJoin() + */ public void testForkJoinNestedWithSubWorkflow() { } } diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF.json b/test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF.json rename to test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF.json diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF2.json b/test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF2.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF2.json rename to test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF2.json diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF3.json b/test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF3.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF3.json rename to test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF3.json diff --git a/test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF4.json b/test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF4.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/ConditionalTaskWF4.json rename to test-harness/src/test/resources/integration/scenarios/legacy/ConditionalTaskWF4.json diff --git a/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTest.json b/test-harness/src/test/resources/integration/scenarios/legacy/DynamicFanInOutTest.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTest.json rename to test-harness/src/test/resources/integration/scenarios/legacy/DynamicFanInOutTest.json diff --git a/test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTestLegacy.json b/test-harness/src/test/resources/integration/scenarios/legacy/DynamicFanInOutTestLegacy.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/DynamicFanInOutTestLegacy.json rename to test-harness/src/test/resources/integration/scenarios/legacy/DynamicFanInOutTestLegacy.json diff --git a/test-harness/src/test/resources/integration/scenarios/FanInOutNestedTest.json b/test-harness/src/test/resources/integration/scenarios/legacy/FanInOutNestedTest.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/FanInOutNestedTest.json rename to test-harness/src/test/resources/integration/scenarios/legacy/FanInOutNestedTest.json diff --git a/test-harness/src/test/resources/integration/scenarios/FanInOutTest.json b/test-harness/src/test/resources/integration/scenarios/legacy/FanInOutTest.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/FanInOutTest.json rename to test-harness/src/test/resources/integration/scenarios/legacy/FanInOutTest.json diff --git a/test-harness/src/test/resources/integration/scenarios/FanInOutTest_2.json b/test-harness/src/test/resources/integration/scenarios/legacy/FanInOutTest_2.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/FanInOutTest_2.json rename to test-harness/src/test/resources/integration/scenarios/legacy/FanInOutTest_2.json diff --git a/test-harness/src/test/resources/integration/scenarios/RTOWF.json b/test-harness/src/test/resources/integration/scenarios/legacy/RTOWF.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/RTOWF.json rename to test-harness/src/test/resources/integration/scenarios/legacy/RTOWF.json diff --git a/test-harness/src/test/resources/integration/scenarios/WorkflowWithSubWorkflow.json b/test-harness/src/test/resources/integration/scenarios/legacy/WorkflowWithSubWorkflow.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/WorkflowWithSubWorkflow.json rename to test-harness/src/test/resources/integration/scenarios/legacy/WorkflowWithSubWorkflow.json diff --git a/test-harness/src/test/resources/integration/scenarios/concurrentWorkflowExecutions.json b/test-harness/src/test/resources/integration/scenarios/legacy/concurrentWorkflowExecutions.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/concurrentWorkflowExecutions.json rename to test-harness/src/test/resources/integration/scenarios/legacy/concurrentWorkflowExecutions.json diff --git a/test-harness/src/test/resources/integration/scenarios/empty_workflow.json b/test-harness/src/test/resources/integration/scenarios/legacy/empty_workflow.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/empty_workflow.json rename to test-harness/src/test/resources/integration/scenarios/legacy/empty_workflow.json diff --git a/test-harness/src/test/resources/integration/scenarios/forkJoinNested.json b/test-harness/src/test/resources/integration/scenarios/legacy/forkJoinNested.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/forkJoinNested.json rename to test-harness/src/test/resources/integration/scenarios/legacy/forkJoinNested.json diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf.json b/test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/junit_test_wf.json rename to test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf.json diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf3.json b/test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf3.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/junit_test_wf3.json rename to test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf3.json diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf_non_restartable.json b/test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf_non_restartable.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/junit_test_wf_non_restartable.json rename to test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf_non_restartable.json diff --git a/test-harness/src/test/resources/integration/scenarios/junit_test_wf_sw.json b/test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf_sw.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/junit_test_wf_sw.json rename to test-harness/src/test/resources/integration/scenarios/legacy/junit_test_wf_sw.json diff --git a/test-harness/src/test/resources/integration/scenarios/longRunningWf.json b/test-harness/src/test/resources/integration/scenarios/legacy/longRunningWf.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/longRunningWf.json rename to test-harness/src/test/resources/integration/scenarios/legacy/longRunningWf.json diff --git a/test-harness/src/test/resources/integration/scenarios/retry.json b/test-harness/src/test/resources/integration/scenarios/legacy/retry.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/retry.json rename to test-harness/src/test/resources/integration/scenarios/legacy/retry.json diff --git a/test-harness/src/test/resources/integration/scenarios/simpleWorkflowFailureWithTerminalError.json b/test-harness/src/test/resources/integration/scenarios/legacy/simpleWorkflowFailureWithTerminalError.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/simpleWorkflowFailureWithTerminalError.json rename to test-harness/src/test/resources/integration/scenarios/legacy/simpleWorkflowFailureWithTerminalError.json diff --git a/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTaskSpecificDomain.json b/test-harness/src/test/resources/integration/scenarios/legacy/simpleWorkflowWithTaskSpecificDomain.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTaskSpecificDomain.json rename to test-harness/src/test/resources/integration/scenarios/legacy/simpleWorkflowWithTaskSpecificDomain.json diff --git a/test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTasksInOneDomain.json b/test-harness/src/test/resources/integration/scenarios/legacy/simpleWorkflowWithTasksInOneDomain.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/simpleWorkflowWithTasksInOneDomain.json rename to test-harness/src/test/resources/integration/scenarios/legacy/simpleWorkflowWithTasksInOneDomain.json diff --git a/test-harness/src/test/resources/integration/scenarios/template_workflow.json b/test-harness/src/test/resources/integration/scenarios/legacy/template_workflow.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/template_workflow.json rename to test-harness/src/test/resources/integration/scenarios/legacy/template_workflow.json diff --git a/test-harness/src/test/resources/integration/scenarios/test_event.json b/test-harness/src/test/resources/integration/scenarios/legacy/test_event.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/test_event.json rename to test-harness/src/test/resources/integration/scenarios/legacy/test_event.json diff --git a/test-harness/src/test/resources/integration/scenarios/test_wait.json b/test-harness/src/test/resources/integration/scenarios/legacy/test_wait.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/test_wait.json rename to test-harness/src/test/resources/integration/scenarios/legacy/test_wait.json diff --git a/test-harness/src/test/resources/integration/scenarios/timeout.json b/test-harness/src/test/resources/integration/scenarios/legacy/timeout.json similarity index 100% rename from test-harness/src/test/resources/integration/scenarios/timeout.json rename to test-harness/src/test/resources/integration/scenarios/legacy/timeout.json From 9f1100d6ee2295bca2779328b4fb3ea79fd604c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Mon, 27 Aug 2018 13:32:38 -0700 Subject: [PATCH 138/163] Code review updates - Comments updated related to backwards compatibility - Updated method name for task definitions population - Code styles updates --- .../core/execution/WorkflowExecutor.java | 18 ++++++++++++------ .../core/metadata/MetadataMapperService.java | 9 +++++---- .../conductor/service/ExecutionService.java | 12 ++++++++---- 3 files changed, 25 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index e1f771e4e6..82fcc0fe06 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -607,8 +607,12 @@ public void updateTask(TaskResult taskResult) { String workflowId = taskResult.getWorkflowInstanceId(); Workflow workflowInstance = executionDAO.getWorkflow(workflowId); - // Backwards compatibility for legacy workflows already running - metadataMapperService.populateWorkflowWithDefinitions(workflowInstance); + + // FIXME Backwards compatibility for legacy workflows already running. + // This code will be removed in a future version. + if(workflowInstance.getWorkflowDefinition() == null) { + workflowInstance = metadataMapperService.populateWorkflowWithDefinitions(workflowInstance); + } Task task = executionDAO.getTask(taskResult.getTaskId()); @@ -749,8 +753,9 @@ public boolean decide(String workflowId) { //If it is a new workflow the tasks will be still empty even though include tasks is true Workflow workflow = executionDAO.getWorkflow(workflowId, true); - // Backwards compatibility for legacy workflows already running - metadataMapperService.populateWorkflowWithDefinitions(workflow); + // FIXME Backwards compatibility for legacy workflows already running. + // This code will be removed in a future version. + workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); try { DeciderOutcome outcome = deciderService.decide(workflow); @@ -855,8 +860,9 @@ public void skipTaskFromWorkflow(String workflowId, String taskReferenceName, Sk Workflow wf = executionDAO.getWorkflow(workflowId, true); - // Backwards compatibility for legacy workflows already running - metadataMapperService.populateWorkflowWithDefinitions(wf); + // FIXME Backwards compatibility for legacy workflows already running. + // This code will be removed in a future version. + wf = metadataMapperService.populateWorkflowWithDefinitions(wf); // If the wf is not running then cannot skip any task if (!wf.getStatus().equals(WorkflowStatus.RUNNING)) { diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index 8c9f5cc7a9..104f6aac49 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -82,9 +82,10 @@ Optional lookupLatestWorkflowDefinition(String workflowName) { public Workflow populateWorkflowWithDefinitions(Workflow workflow) { - WorkflowDef workflowDefinition = workflow.getWorkflowDefinition(); - if (workflowDefinition == null) { - workflowDefinition = lookupForWorkflowDefinition(workflow.getWorkflowName(), workflow.getWorkflowVersion()); + WorkflowDef workflowDefinition = Optional.ofNullable(workflow.getWorkflowDefinition()) + .orElseGet(() -> lookupForWorkflowDefinition(workflow.getWorkflowName(), workflow.getWorkflowVersion())); + + if (workflow.getWorkflowDefinition() == null) { workflow.setWorkflowDefinition(workflowDefinition); } @@ -153,7 +154,7 @@ private void checkForMissingDefinitions(WorkflowDef workflowDefinition) { } } - public Task populateTaskWithDefinitions(Task task) { + public Task populateTaskWithDefinition(Task task) { populateWorkflowTaskWithDefinition(task.getWorkflowTask()); return task; } diff --git a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java index 8dbcee41c8..6b7da651ca 100644 --- a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java +++ b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java @@ -81,9 +81,13 @@ public class ExecutionService { @Inject - public ExecutionService(WorkflowExecutor wfProvider, ExecutionDAO executionDAO, QueueDAO queue, - MetadataDAO metadata, MetadataMapperService metadataMapperService, - IndexDAO indexer, Configuration config) { + public ExecutionService(WorkflowExecutor wfProvider, + ExecutionDAO executionDAO, + QueueDAO queue, + MetadataDAO metadata, + MetadataMapperService metadataMapperService, + IndexDAO indexer, + Configuration config) { this.executor = wfProvider; this.executionDAO = executionDAO; this.queue = queue; @@ -174,7 +178,7 @@ public List getTasks(String taskType, String startKey, int count) throws E public Task getTask(String taskId) throws Exception { Task task = executionDAO.getTask(taskId); - metadataMapperService.populateTaskWithDefinitions(task); + task = metadataMapperService.populateTaskWithDefinition(task); return task; } From 34d1a5919afb19b6b731b52d4382e5a04831bb47 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 28 Aug 2018 15:25:05 -0700 Subject: [PATCH 139/163] Updated MetadataMapperService with logging messages for checks --- .../core/metadata/MetadataMapperService.java | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index 104f6aac49..1028628e5b 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -73,9 +73,8 @@ Optional lookupWorkflowDefinition(String workflowName, int workflow @VisibleForTesting Optional lookupLatestWorkflowDefinition(String workflowName) { - // FIXME: Add messages. - checkNotNull(workflowName); - checkArgument(StringUtils.isNotBlank(workflowName)); + checkNotNull(workflowName, "Workflow name cannot be null when searching for a definition"); + checkArgument(StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank when searching for a definition"); return metadataDAO.getLatest(workflowName); } @@ -83,11 +82,11 @@ Optional lookupLatestWorkflowDefinition(String workflowName) { public Workflow populateWorkflowWithDefinitions(Workflow workflow) { WorkflowDef workflowDefinition = Optional.ofNullable(workflow.getWorkflowDefinition()) - .orElseGet(() -> lookupForWorkflowDefinition(workflow.getWorkflowName(), workflow.getWorkflowVersion())); - - if (workflow.getWorkflowDefinition() == null) { - workflow.setWorkflowDefinition(workflowDefinition); - } + .orElseGet(() -> { + WorkflowDef wd = lookupForWorkflowDefinition(workflow.getWorkflowName(), workflow.getWorkflowVersion()); + workflow.setWorkflowDefinition(wd); + return wd; + }); workflowDefinition.collectTasks().stream().forEach( workflowTask -> { @@ -99,7 +98,7 @@ public Workflow populateWorkflowWithDefinitions(Workflow workflow) { } ); - checkForMissingDefinitions(workflowDefinition); + checkNotEmptyDefinitions(workflowDefinition); return workflow; } @@ -108,7 +107,7 @@ public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { workflowDefinition.collectTasks().stream().forEach( workflowTask -> populateWorkflowTaskWithDefinition(workflowTask) ); - checkForMissingDefinitions(workflowDefinition); + checkNotEmptyDefinitions(workflowDefinition); return workflowDefinition; } @@ -139,7 +138,7 @@ private void populateVersionForSubWorkflow(WorkflowTask workflowTask) { } } - private void checkForMissingDefinitions(WorkflowDef workflowDefinition) { + private void checkNotEmptyDefinitions(WorkflowDef workflowDefinition) { // Obtain the names of the tasks with missing definitions Set missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() From 5505777b5f35cc63584a23e76a32b833fe8a1169 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Wed, 29 Aug 2018 15:33:26 +0200 Subject: [PATCH 140/163] Support configuring MySQL connections. --- .../conductor/mysql/MySQLConfiguration.java | 43 ++++++++++++++++--- .../mysql/MySQLDataSourceProvider.java | 22 +++++++++- 2 files changed, 58 insertions(+), 7 deletions(-) diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java index 42dcdfa19c..096e33c392 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java @@ -1,8 +1,10 @@ package com.netflix.conductor.mysql; import com.netflix.conductor.core.config.Configuration; +import com.zaxxer.hikari.HikariConfig; import java.util.Optional; +import java.util.concurrent.TimeUnit; public interface MySQLConfiguration extends Configuration { @@ -21,23 +23,52 @@ public interface MySQLConfiguration extends Configuration { String FLYWAY_TABLE_PROPERTY_NAME = "flyway.table"; Optional FLYWAY_TABLE_DEFAULT_VALUE = Optional.empty(); - default String getJdbcUrl(){ + // The defaults are currently in line with the HikariConfig defaults, which are unfortunately private. + String CONNECTION_MAX_LIFETIME_PROPERTY_NAME = "conductor.mysql.connection.lifetime.max"; + long CONNECTION_MAX_LIFETIME_DEFAULT_VALUE = TimeUnit.MINUTES.toMillis(30); + + String CONNECTION_IDLE_TIMEOUT_PROPERTY_NAME = "conductor.mysql.connection.idle.timeout"; + long CONNECTION_IDLE_TIMEOUT_DEFAULT_VALUE = TimeUnit.MINUTES.toMillis(10); + + String CONNECTION_TIMEOUT_PROPERTY_NAME = "conductor.mysql.connection.timeout"; + long CONNECTION_TIMEOUT_DEFAULT_VALUE = TimeUnit.SECONDS.toMillis(30); + + String AUTO_COMMIT_PROPERTY_NAME = "conductor.mysql.autocommit"; + boolean AUTO_COMMIT_DEFAULT_VALUE = true; + + default String getJdbcUrl() { return getProperty(JDBC_URL_PROPERTY_NAME, JDBC_URL_DEFAULT_VALUE); } - default String getJdbcUserName(){ + default String getJdbcUserName() { return getProperty(JDBC_USER_NAME_PROPERTY_NAME, JDBC_USER_NAME_DEFAULT_VALUE); } - default String getJdbcPassword(){ + default String getJdbcPassword() { return getProperty(JDBC_PASSWORD_PROPERTY_NAME, JDBC_PASSWORD_DEFAULT_VALUE); } - default boolean isFlywayEnabled(){ + default boolean isFlywayEnabled() { return getBoolProperty(FLYWAY_ENABLED_PROPERTY_NAME, FLYWAY_ENABLED_DEFAULT_VALUE); } - default Optional getFlywayTable(){ - return Optional.ofNullable(getProperty(FLYWAY_TABLE_PROPERTY_NAME, null)); + default Optional getFlywayTable() { + return Optional.ofNullable(getProperty(FLYWAY_TABLE_PROPERTY_NAME, null)); + } + + default long getConnectionMaxLifetime() { + return getLongProperty(CONNECTION_MAX_LIFETIME_PROPERTY_NAME, CONNECTION_MAX_LIFETIME_DEFAULT_VALUE); + } + + default long getConnectionIdleTimeout() { + return getLongProperty(CONNECTION_IDLE_TIMEOUT_PROPERTY_NAME, CONNECTION_IDLE_TIMEOUT_DEFAULT_VALUE); + } + + default long getConnectionTimeout() { + return getLongProperty(CONNECTION_TIMEOUT_PROPERTY_NAME, CONNECTION_TIMEOUT_DEFAULT_VALUE); + } + + default boolean isAutoCommit() { + return getBoolProperty(AUTO_COMMIT_PROPERTY_NAME, AUTO_COMMIT_DEFAULT_VALUE); } } diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java index abd596b71c..045da241f0 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java @@ -1,11 +1,16 @@ package com.netflix.conductor.mysql; +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import org.flywaydb.core.Flyway; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.util.concurrent.ThreadFactory; + import javax.inject.Inject; import javax.inject.Provider; import javax.sql.DataSource; @@ -22,7 +27,7 @@ public MySQLDataSourceProvider(MySQLConfiguration configuration) { @Override public DataSource get() { - HikariDataSource dataSource = new HikariDataSource(); + HikariDataSource dataSource = new HikariDataSource(createConfiguration()); dataSource.setJdbcUrl(configuration.getJdbcUrl()); dataSource.setUsername(configuration.getJdbcUserName()); dataSource.setPassword(configuration.getJdbcPassword()); @@ -32,6 +37,21 @@ public DataSource get() { return dataSource; } + private HikariConfig createConfiguration(){ + HikariConfig cfg = new HikariConfig(); + cfg.setMaxLifetime(configuration.getConnectionMaxLifetime()); + cfg.setIdleTimeout(configuration.getConnectionIdleTimeout()); + cfg.setConnectionTimeout(configuration.getConnectionTimeout()); + cfg.setAutoCommit(configuration.isAutoCommit()); + + ThreadFactory tf = new ThreadFactoryBuilder() + .setDaemon(true) + .setNameFormat("hikari-mysql-%d") + .build(); + + cfg.setThreadFactory(tf); + return cfg; + } // TODO Move this into a class that has complete lifecycle for the connection, i.e. startup and shutdown. private void flywayMigrate(DataSource dataSource) { boolean enabled = configuration.isFlywayEnabled(); From b4f5d5b99d5a1fbdc5dc6f8a797a0ff288e05ef3 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Thu, 30 Aug 2018 10:59:28 +0200 Subject: [PATCH 141/163] Fix MySQL connection leak in tests. --- .../conductor/dao/mysql/MySQLDAOTestUtil.java | 6 +++--- .../dao/mysql/MySQLExecutionDAOTest.java | 14 +++++++++++++- .../dao/mysql/MySQLMetadataDAOTest.java | 14 +++++++++++++- .../dao/mysql/MySQLPushPopQueueDAOTest.java | 13 ++++++++++++- .../conductor/dao/mysql/MySQLQueueDAOTest.java | 16 ++++++++++++++-- 5 files changed, 55 insertions(+), 8 deletions(-) diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java index af1a86e362..384bfa690f 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java @@ -22,7 +22,7 @@ @SuppressWarnings("Duplicates") public class MySQLDAOTestUtil { private static final Logger logger = LoggerFactory.getLogger(MySQLDAOTestUtil.class); - private final DataSource dataSource; + private final HikariDataSource dataSource; private final TestConfiguration testConfiguration = new TestConfiguration(); private final ObjectMapper objectMapper = new JsonMapperProvider().get(); @@ -38,7 +38,7 @@ public class MySQLDAOTestUtil { this.dataSource = getDataSource(testConfiguration); } - private DataSource getDataSource(Configuration config) { + private HikariDataSource getDataSource(Configuration config) { HikariDataSource dataSource = new HikariDataSource(); dataSource.setJdbcUrl(config.getProperty("jdbc.url", "jdbc:mysql://localhost:33307/conductor")); @@ -70,7 +70,7 @@ private synchronized static void flywayMigrate(DataSource dataSource) { } } - public DataSource getDataSource() { + public HikariDataSource getDataSource() { return dataSource; } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java index bb4a78efcf..415fb500c0 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java @@ -7,6 +7,8 @@ import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.MetadataDAO; +import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; @@ -19,7 +21,7 @@ @SuppressWarnings("Duplicates") public class MySQLExecutionDAOTest extends ExecutionDAOTest { - private final MySQLDAOTestUtil testMySQL = new MySQLDAOTestUtil(); + private static final MySQLDAOTestUtil testMySQL = new MySQLDAOTestUtil(); private MySQLExecutionDAO executionDAO; @Before @@ -32,6 +34,16 @@ public void setup() throws Exception { testMySQL.resetAllData(); } + @After + public void teardown() throws Exception { + testMySQL.resetAllData(); + } + + @AfterClass + public static void cleanup() throws Exception { + testMySQL.getDataSource().close(); + } + @Test public void testPendingByCorrelationId() throws Exception { diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java index 304c976ec9..244b33c229 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java @@ -5,6 +5,8 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.core.execution.ApplicationException; import org.apache.commons.lang3.builder.EqualsBuilder; +import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -26,15 +28,25 @@ @RunWith(JUnit4.class) public class MySQLMetadataDAOTest { - private final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); + private static final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); + private MySQLMetadataDAO dao; @Before public void setup() throws Exception { dao = new MySQLMetadataDAO(testUtil.getObjectMapper(), testUtil.getDataSource(), testUtil.getTestConfiguration()); + } + + @After + public void teardown() throws Exception { testUtil.resetAllData(); } + @AfterClass + public static void cleanup() throws Exception { + testUtil.getDataSource().close(); + } + @Test(expected=NullPointerException.class) public void testMissingName() throws Exception { WorkflowDef def = new WorkflowDef(); diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java index 7da9a2e048..aae363bb9d 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java @@ -9,6 +9,8 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; +import org.junit.After; +import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -19,16 +21,25 @@ public class MySQLPushPopQueueDAOTest { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLPushPopQueueDAOTest.class); + private static final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); private MySQLQueueDAO dao; - private final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); @Before public void setup() throws Exception { dao = new MySQLQueueDAO(testUtil.getObjectMapper(), testUtil.getDataSource()); + } + + @After + public void teardown() throws Exception { testUtil.resetAllData(); } + @AfterClass + public static void cleanup() throws Exception { + testUtil.getDataSource().close(); + } + @Test public void testWith2THreads() throws Exception { testPollDataWithParallelThreads(2); diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java index 5e597c9863..d97db2e025 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java @@ -3,7 +3,10 @@ import com.google.common.collect.ImmutableList; import com.netflix.conductor.core.events.queue.Message; +import com.zaxxer.hikari.HikariDataSource; +import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -28,16 +31,25 @@ public class MySQLQueueDAOTest { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLQueueDAOTest.class); + private static final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); - private final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); private MySQLQueueDAO dao; @Before public void setup() throws Exception { dao = new MySQLQueueDAO(testUtil.getObjectMapper(), testUtil.getDataSource()); - testUtil.resetAllData(); } + @After + public void teardown() throws Exception { + testUtil.resetAllData(); + } + + @AfterClass + public static void cleanup() throws Exception { + testUtil.getDataSource().close(); + } + @Rule public ExpectedException expected = ExpectedException.none(); From 9c8c3c4a9c2b41322d73aecefb128332439426df Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 30 Aug 2018 15:09:47 -0700 Subject: [PATCH 142/163] Code review update: nitpick about constructor fields on WorkflowExecutor --- .../netflix/conductor/core/execution/WorkflowExecutor.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 82fcc0fe06..44a0128428 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -91,7 +91,7 @@ public class WorkflowExecutor { private final MetadataMapperService metadataMapperService; - private RateLimitingService rateLimitingService; + private final RateLimitingService rateLimitingService; private final ParametersUtils parametersUtils = new ParametersUtils(); @@ -117,7 +117,7 @@ public WorkflowExecutor( this.config = config; this.metadataMapperService = metadataMapperService; this.rateLimitingService = rateLimitingService; - activeWorkerLastPollInSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); + this.activeWorkerLastPollInSecs = config.getIntProperty("tasks.active.worker.lastpoll", 10); } /** From 5fa87f8605af6e9c57473dedd2c2c7f67bdd6cd4 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Fri, 31 Aug 2018 10:12:32 +0200 Subject: [PATCH 143/163] Change MySQL tests to use a separate namespaced logical database for each test. --- .../conductor/dao/ExecutionDAOTest.java | 2 +- .../conductor/dao/mysql/MySQLDAOTestUtil.java | 65 ++++++++----------- .../dao/mysql/MySQLExecutionDAOTest.java | 13 ++-- .../dao/mysql/MySQLMetadataDAOTest.java | 14 ++-- .../dao/mysql/MySQLPushPopQueueDAOTest.java | 13 ++-- .../dao/mysql/MySQLQueueDAOTest.java | 22 +++---- 6 files changed, 59 insertions(+), 70 deletions(-) diff --git a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java index f38b2f2369..089a5f98ab 100644 --- a/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java +++ b/core/src/test/java/com/netflix/conductor/dao/ExecutionDAOTest.java @@ -274,7 +274,7 @@ public void testPending() throws Exception { } @Test - public void test() throws Exception { + public void complexExecutionTest() throws Exception { Workflow workflow = createTestWorkflow(); String workflowId = getExecutionDAO().createWorkflow(workflow); diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java index 384bfa690f..f80b9c48d4 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLDAOTestUtil.java @@ -14,7 +14,6 @@ import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; -import java.util.concurrent.atomic.AtomicBoolean; import javax.sql.DataSource; @@ -26,14 +25,12 @@ public class MySQLDAOTestUtil { private final TestConfiguration testConfiguration = new TestConfiguration(); private final ObjectMapper objectMapper = new JsonMapperProvider().get(); - static AtomicBoolean migrated = new AtomicBoolean(false); - - MySQLDAOTestUtil() { - testConfiguration.setProperty("jdbc.url", "jdbc:mysql://localhost:33307/conductor"); + MySQLDAOTestUtil(String dbName) throws Exception { + testConfiguration.setProperty("jdbc.url", "jdbc:mysql://localhost:33307/" + dbName); testConfiguration.setProperty("jdbc.username", "root"); testConfiguration.setProperty("jdbc.password", ""); // Ensure the DB starts - EmbeddedDatabase.INSTANCE.getDB(); + EmbeddedDatabase.INSTANCE.getDB().createDB(dbName); this.dataSource = getDataSource(testConfiguration); } @@ -49,25 +46,17 @@ private HikariDataSource getDataSource(Configuration config) { // Prevent DB from getting exhausted during rapid testing dataSource.setMaximumPoolSize(8); - if (!migrated.get()) { - flywayMigrate(dataSource); - } + flywayMigrate(dataSource); return dataSource; } - private synchronized static void flywayMigrate(DataSource dataSource) { - if(migrated.get()) { - return; - } + private void flywayMigrate(DataSource dataSource) { - synchronized (MySQLDAOTestUtil.class) { - Flyway flyway = new Flyway(); - flyway.setDataSource(dataSource); - flyway.setPlaceholderReplacement(false); - flyway.migrate(); - migrated.getAndSet(true); - } + Flyway flyway = new Flyway(); + flyway.setDataSource(dataSource); + flyway.setPlaceholderReplacement(false); + flyway.migrate(); } public HikariDataSource getDataSource() { @@ -85,23 +74,23 @@ public ObjectMapper getObjectMapper() { public void resetAllData() { logger.info("Resetting data for test"); try (Connection connection = dataSource.getConnection()) { - try(ResultSet rs = connection.prepareStatement("SHOW TABLES").executeQuery(); - PreparedStatement keysOn = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=1")) { - try(PreparedStatement keysOff = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=0")){ - keysOff.execute(); - while(rs.next()) { - String table = rs.getString(1); - try(PreparedStatement ps = connection.prepareStatement("TRUNCATE TABLE " + table)) { - ps.execute(); - } - } - } finally { - keysOn.execute(); - } - } - } catch (SQLException ex) { - logger.error(ex.getMessage(), ex); - throw new RuntimeException(ex); - } + try (ResultSet rs = connection.prepareStatement("SHOW TABLES").executeQuery(); + PreparedStatement keysOn = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=1")) { + try (PreparedStatement keysOff = connection.prepareStatement("SET FOREIGN_KEY_CHECKS=0")) { + keysOff.execute(); + while (rs.next()) { + String table = rs.getString(1); + try (PreparedStatement ps = connection.prepareStatement("TRUNCATE TABLE " + table)) { + ps.execute(); + } + } + } finally { + keysOn.execute(); + } + } + } catch (SQLException ex) { + logger.error(ex.getMessage(), ex); + throw new RuntimeException(ex); + } } } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java index 415fb500c0..7a856f0f6c 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLExecutionDAOTest.java @@ -5,12 +5,12 @@ import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.ExecutionDAOTest; import com.netflix.conductor.dao.IndexDAO; -import com.netflix.conductor.dao.MetadataDAO; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; import java.util.List; @@ -21,11 +21,14 @@ @SuppressWarnings("Duplicates") public class MySQLExecutionDAOTest extends ExecutionDAOTest { - private static final MySQLDAOTestUtil testMySQL = new MySQLDAOTestUtil(); + private MySQLDAOTestUtil testMySQL; private MySQLExecutionDAO executionDAO; + @Rule public TestName name = new TestName(); + @Before public void setup() throws Exception { + testMySQL = new MySQLDAOTestUtil(name.getMethodName()); executionDAO = new MySQLExecutionDAO( mock(IndexDAO.class), testMySQL.getObjectMapper(), @@ -37,10 +40,6 @@ public void setup() throws Exception { @After public void teardown() throws Exception { testMySQL.resetAllData(); - } - - @AfterClass - public static void cleanup() throws Exception { testMySQL.getDataSource().close(); } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java index 244b33c229..61f52f4ad2 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLMetadataDAOTest.java @@ -6,9 +6,10 @@ import com.netflix.conductor.core.execution.ApplicationException; import org.apache.commons.lang3.builder.EqualsBuilder; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -28,22 +29,21 @@ @RunWith(JUnit4.class) public class MySQLMetadataDAOTest { - private static final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); - + private MySQLDAOTestUtil testUtil; private MySQLMetadataDAO dao; + @Rule + public TestName name = new TestName(); + @Before public void setup() throws Exception { + testUtil = new MySQLDAOTestUtil(name.getMethodName()); dao = new MySQLMetadataDAO(testUtil.getObjectMapper(), testUtil.getDataSource(), testUtil.getTestConfiguration()); } @After public void teardown() throws Exception { testUtil.resetAllData(); - } - - @AfterClass - public static void cleanup() throws Exception { testUtil.getDataSource().close(); } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java index aae363bb9d..e948038ffe 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLPushPopQueueDAOTest.java @@ -10,10 +10,11 @@ import java.util.concurrent.Future; import org.junit.After; -import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -21,22 +22,22 @@ public class MySQLPushPopQueueDAOTest { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLPushPopQueueDAOTest.class); - private static final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); + private MySQLDAOTestUtil testUtil; private MySQLQueueDAO dao; + @Rule + public TestName name = new TestName(); + @Before public void setup() throws Exception { + testUtil = new MySQLDAOTestUtil(name.getMethodName()); dao = new MySQLQueueDAO(testUtil.getObjectMapper(), testUtil.getDataSource()); } @After public void teardown() throws Exception { testUtil.resetAllData(); - } - - @AfterClass - public static void cleanup() throws Exception { testUtil.getDataSource().close(); } diff --git a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java index d97db2e025..cd3037d14d 100644 --- a/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java +++ b/mysql-persistence/src/test/java/com/netflix/conductor/dao/mysql/MySQLQueueDAOTest.java @@ -3,7 +3,6 @@ import com.google.common.collect.ImmutableList; import com.netflix.conductor.core.events.queue.Message; -import com.zaxxer.hikari.HikariDataSource; import org.junit.After; import org.junit.AfterClass; @@ -11,6 +10,7 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,30 +31,30 @@ public class MySQLQueueDAOTest { private static final Logger LOGGER = LoggerFactory.getLogger(MySQLQueueDAOTest.class); - private static final MySQLDAOTestUtil testUtil = new MySQLDAOTestUtil(); + private MySQLDAOTestUtil testUtil; private MySQLQueueDAO dao; + @Rule + public TestName name = new TestName(); + + @Rule + public ExpectedException expected = ExpectedException.none(); + @Before public void setup() throws Exception { + testUtil = new MySQLDAOTestUtil(name.getMethodName()); dao = new MySQLQueueDAO(testUtil.getObjectMapper(), testUtil.getDataSource()); } @After public void teardown() throws Exception { testUtil.resetAllData(); + testUtil.getDataSource().close(); } - @AfterClass - public static void cleanup() throws Exception { - testUtil.getDataSource().close(); - } - - @Rule - public ExpectedException expected = ExpectedException.none(); - @Test - public void test() { + public void complexQueueTest() { String queueName = "TestQueue"; long offsetTimeInSecond = 0; From a8d507a521c4cac4ceeaab1b1b0be522f1c93f9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 31 Aug 2018 13:04:52 -0700 Subject: [PATCH 144/163] Updated Junit dependencies in order to use latest version of Juni4 - Junit-dep has been deprecated for a while and the junit:junit dependency was needed in order to execute the integration tests in a deterministic way --- build.gradle | 6 ++++-- versionsOfDependencies.gradle | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index 0c1c18a462..4473de01a8 100644 --- a/build.gradle +++ b/build.gradle @@ -36,8 +36,10 @@ subprojects { } dependencies { - testCompile "junit:junit-dep:${revJUnit}" - testCompile "org.mockito:mockito-all:${revMockito}" + testCompile "junit:junit:${revJUnit}" + testCompile("org.mockito:mockito-core:${revMockito}") { + exclude group: 'org.hamcrest', module: 'hamcrest-core' + } } group = "com.netflix.${githubProjectName}" diff --git a/versionsOfDependencies.gradle b/versionsOfDependencies.gradle index a297d54dec..7807347274 100644 --- a/versionsOfDependencies.gradle +++ b/versionsOfDependencies.gradle @@ -29,14 +29,14 @@ ext { revJerseyBundle = '1.19.1' revJerseyClient = '1.19.4' revJerseyGuice = '1.19.4' - revJUnit = '4.10' + revJUnit = '4.12' revJsr311Api = '1.1.1' revJq = '0.0.8' revLog4jApi = '2.9.1' revLog4jCore = '2.9.1' revMariaDB4j = '2.2.3' revRxJava = '1.2.2' - revMockito = '1.10.0' + revMockito = '1.10.19' revMySqlConnector = '8.0.11' revNatsStreaming = '0.5.0' revJetteyServer = '9.3.9.v20160517' From fa39a4d6fb804b6a348cdd741c85e07cc2669708 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Fri, 31 Aug 2018 13:06:20 -0700 Subject: [PATCH 145/163] Fixed integration tests following the deterministic execution approach - Polling data at the moment does not get cleared on each of the integration tests. As the interface does not allow clearing the polling data information, the integration tests will only assert of polling data that has been created since the start of a specific test --- .../common/metadata/tasks/PollData.java | 12 +++- .../AbstractWorkflowServiceTest.java | 20 ++++-- .../integration/WorkflowServiceTest.java | 61 ------------------- 3 files changed, 25 insertions(+), 68 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java index cbd4c30d40..5d8a135297 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/tasks/PollData.java @@ -108,6 +108,14 @@ public synchronized boolean equals(Object obj) { return false; return true; } - - + + @Override + public String toString() { + return "PollData{" + + "queueName='" + queueName + '\'' + + ", domain='" + domain + '\'' + + ", workerId='" + workerId + '\'' + + ", lastPollTime=" + lastPollTime + + '}'; + } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java index 1b7d036566..2737afd1cf 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java @@ -48,10 +48,12 @@ import org.apache.commons.lang.StringUtils; import org.junit.After; import org.junit.Before; +import org.junit.FixMethodOrder; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; +import org.junit.runners.MethodSorters; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -69,6 +71,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; +import java.util.function.Predicate; import java.util.stream.Collectors; import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; @@ -79,6 +82,7 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; +@FixMethodOrder(MethodSorters.NAME_ASCENDING) public abstract class AbstractWorkflowServiceTest { private static final Logger logger = LoggerFactory.getLogger(AbstractWorkflowServiceTest.class); @@ -1637,6 +1641,8 @@ public void testWorkflowRerunWithSubWorkflows() throws Exception { @Test public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { + long startTimeTimestamp = System.currentTimeMillis(); + clearWorkflows(); createWorkflowDefForDomain(); @@ -1740,7 +1746,12 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); assertEquals("task1.Done", es.getOutput().get("o3")); - List pddata = workflowExecutionService.getPollData("junit_task_3"); + Predicate pollDataWithinTestTimes = pollData -> pollData.getLastPollTime() != 0 && pollData.getLastPollTime() > startTimeTimestamp; + + List pddata = workflowExecutionService.getPollData("junit_task_3").stream() + .filter(pollDataWithinTestTimes) + .collect(Collectors.toList()); + assertTrue(pddata.size() == 2); for (PollData pd : pddata) { assertEquals(pd.getQueueName(), "junit_task_3"); @@ -1751,8 +1762,9 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { } } - - List pdList = workflowExecutionService.getAllPollData(); + List pdList = workflowExecutionService.getAllPollData().stream() + .filter(pollDataWithinTestTimes) + .collect(Collectors.toList()); int count = 0; for (PollData pd : pdList) { if (pd.getQueueName().equals("junit_task_3")) { @@ -1763,7 +1775,6 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { } - @Test public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { @@ -1872,7 +1883,6 @@ public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); assertEquals("task1.Done", es.getOutput().get("o3")); - } @After diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index d660c291c7..bb16ea3af1 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -18,71 +18,10 @@ */ package com.netflix.conductor.tests.integration; -import com.google.common.util.concurrent.Uninterruptibles; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.netflix.conductor.common.metadata.tasks.PollData; -import com.netflix.conductor.common.metadata.tasks.Task; -import com.netflix.conductor.common.metadata.tasks.Task.Status; -import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskDef.RetryLogic; -import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; -import com.netflix.conductor.common.metadata.tasks.TaskResult; -import com.netflix.conductor.common.metadata.workflow.DynamicForkJoinTaskList; -import com.netflix.conductor.common.metadata.workflow.RerunWorkflowRequest; -import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; -import com.netflix.conductor.common.metadata.workflow.TaskType; -import com.netflix.conductor.common.metadata.workflow.WorkflowDef; -import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.run.Workflow; -import com.netflix.conductor.common.run.Workflow.WorkflowStatus; -import com.netflix.conductor.core.WorkflowContext; -import com.netflix.conductor.core.execution.ApplicationException; -import com.netflix.conductor.core.metadata.MetadataMapperService; -import com.netflix.conductor.core.execution.SystemTaskType; -import com.netflix.conductor.core.execution.WorkflowExecutor; -import com.netflix.conductor.core.execution.WorkflowSweeper; -import com.netflix.conductor.core.execution.tasks.SubWorkflow; -import com.netflix.conductor.dao.QueueDAO; -import com.netflix.conductor.service.ExecutionService; -import com.netflix.conductor.service.MetadataService; import com.netflix.conductor.tests.utils.TestRunner; - -import org.apache.commons.lang.StringUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import java.util.Arrays; -import java.util.Collections; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.UUID; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.Future; -import java.util.concurrent.TimeUnit; -import java.util.stream.Collectors; - -import javax.inject.Inject; - -import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; -import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; @RunWith(TestRunner.class) public class WorkflowServiceTest extends AbstractWorkflowServiceTest { From 93f67a77f947941d0f3196d4f05719491508bffb Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 3 Sep 2018 10:43:40 +0200 Subject: [PATCH 146/163] Pin the Nodejs version to 9. Gulp and perhaps other dependencies have issues with 10. --- docker/ui/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ui/Dockerfile b/docker/ui/Dockerfile index db53cb2ef3..ab9d198ee6 100644 --- a/docker/ui/Dockerfile +++ b/docker/ui/Dockerfile @@ -1,7 +1,7 @@ # # conductor:ui - Netflix conductor UI # -FROM node:alpine +FROM node:9-alpine MAINTAINER Netflix OSS # Install the required packages for the node build From aa5abff449dad8fadb292d83af6fef347e1866d7 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 3 Sep 2018 11:18:01 +0200 Subject: [PATCH 147/163] Add some more configuration options, which were ported from `dev` based on PR feedback. --- .../conductor/mysql/MySQLConfiguration.java | 24 ++++++++++++++++++- .../mysql/MySQLDataSourceProvider.java | 3 +++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java index 096e33c392..e0bfff664a 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLConfiguration.java @@ -24,6 +24,12 @@ public interface MySQLConfiguration extends Configuration { Optional FLYWAY_TABLE_DEFAULT_VALUE = Optional.empty(); // The defaults are currently in line with the HikariConfig defaults, which are unfortunately private. + String CONNECTION_POOL_MAX_SIZE_PROPERTY_NAME = "conductor.mysql.connection.pool.size.max"; + int CONNECTION_POOL_MAX_SIZE_DEFAULT_VALUE = -1; + + String CONNECTION_POOL_MINIMUM_IDLE_PROPERTY_NAME = "conductor.mysql.connection.pool.idle.min"; + int CONNECTION_POOL_MINIMUM_IDLE_DEFAULT_VALUE = -1; + String CONNECTION_MAX_LIFETIME_PROPERTY_NAME = "conductor.mysql.connection.lifetime.max"; long CONNECTION_MAX_LIFETIME_DEFAULT_VALUE = TimeUnit.MINUTES.toMillis(30); @@ -33,8 +39,12 @@ public interface MySQLConfiguration extends Configuration { String CONNECTION_TIMEOUT_PROPERTY_NAME = "conductor.mysql.connection.timeout"; long CONNECTION_TIMEOUT_DEFAULT_VALUE = TimeUnit.SECONDS.toMillis(30); + String ISOLATION_LEVEL_PROPERTY_NAME = "conductor.mysql.transaction.isolation.level"; + String ISOLATION_LEVEL_DEFAULT_VALUE = ""; + String AUTO_COMMIT_PROPERTY_NAME = "conductor.mysql.autocommit"; - boolean AUTO_COMMIT_DEFAULT_VALUE = true; + // This is consistent with the current default when building the Hikari Client. + boolean AUTO_COMMIT_DEFAULT_VALUE = false; default String getJdbcUrl() { return getProperty(JDBC_URL_PROPERTY_NAME, JDBC_URL_DEFAULT_VALUE); @@ -56,6 +66,14 @@ default Optional getFlywayTable() { return Optional.ofNullable(getProperty(FLYWAY_TABLE_PROPERTY_NAME, null)); } + default int getConnectionPoolMaxSize() { + return getIntProperty(CONNECTION_POOL_MAX_SIZE_PROPERTY_NAME, CONNECTION_POOL_MAX_SIZE_DEFAULT_VALUE); + } + + default int getConnectionPoolMinIdle() { + return getIntProperty(CONNECTION_POOL_MINIMUM_IDLE_PROPERTY_NAME, CONNECTION_POOL_MINIMUM_IDLE_DEFAULT_VALUE); + } + default long getConnectionMaxLifetime() { return getLongProperty(CONNECTION_MAX_LIFETIME_PROPERTY_NAME, CONNECTION_MAX_LIFETIME_DEFAULT_VALUE); } @@ -68,6 +86,10 @@ default long getConnectionTimeout() { return getLongProperty(CONNECTION_TIMEOUT_PROPERTY_NAME, CONNECTION_TIMEOUT_DEFAULT_VALUE); } + default String getTransactionIsolationLevel() { + return getProperty(ISOLATION_LEVEL_PROPERTY_NAME, ISOLATION_LEVEL_DEFAULT_VALUE); + } + default boolean isAutoCommit() { return getBoolProperty(AUTO_COMMIT_PROPERTY_NAME, AUTO_COMMIT_DEFAULT_VALUE); } diff --git a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java index 045da241f0..264334c538 100644 --- a/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java +++ b/mysql-persistence/src/main/java/com/netflix/conductor/mysql/MySQLDataSourceProvider.java @@ -39,9 +39,12 @@ public DataSource get() { private HikariConfig createConfiguration(){ HikariConfig cfg = new HikariConfig(); + cfg.setMaximumPoolSize(configuration.getConnectionPoolMaxSize()); + cfg.setMinimumIdle(configuration.getConnectionPoolMinIdle()); cfg.setMaxLifetime(configuration.getConnectionMaxLifetime()); cfg.setIdleTimeout(configuration.getConnectionIdleTimeout()); cfg.setConnectionTimeout(configuration.getConnectionTimeout()); + cfg.setTransactionIsolation(configuration.getTransactionIsolationLevel()); cfg.setAutoCommit(configuration.isAutoCommit()); ThreadFactory tf = new ThreadFactoryBuilder() From cb5710e7b739951ef6bf78c0522b262f5ec09246 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 3 Sep 2018 17:17:51 +0200 Subject: [PATCH 148/163] Pin the elasticsearch docker image to a specific version and update config to accomodate changes. --- docker/grpc/docker-compose.dependencies.yaml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/docker/grpc/docker-compose.dependencies.yaml b/docker/grpc/docker-compose.dependencies.yaml index 214ef7352e..b413e64a4f 100644 --- a/docker/grpc/docker-compose.dependencies.yaml +++ b/docker/grpc/docker-compose.dependencies.yaml @@ -18,13 +18,14 @@ services: ports: - 3306:3306 + # https://www.elastic.co/guide/en/elasticsearch/reference/5.6/docker.html elasticsearch: - image: elasticsearch:5.6-alpine + image: docker.elastic.co/elasticsearch/elasticsearch:5.6.8 environment: - ES_JAVA_OPTS: "-Xms750m -Xmx750m" - transport.host: 0.0.0.0 - discovery.type: single-node - cluster.name: conductor + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - transport.host=0.0.0.0 + - discovery.type=single-node + - xpack.security.enabled=false networks: - internal ports: From 1841d07ddc9c75ed8adf17389157b3256396d107 Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Mon, 3 Sep 2018 17:18:20 +0200 Subject: [PATCH 149/163] Add more error handling and stack output on failure. --- .../java/com/netflix/conductor/bootstrap/Main.java | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java index b015435a02..ddd17f3089 100644 --- a/server/src/main/java/com/netflix/conductor/bootstrap/Main.java +++ b/server/src/main/java/com/netflix/conductor/bootstrap/Main.java @@ -62,10 +62,17 @@ public static void main(String[] args) throws Exception { */ Thread.sleep(EMBEDDED_ES_INIT_TIME); } catch (Exception ioe) { + ioe.printStackTrace(System.err); System.exit(3); } } - serverInjector.getInstance(IndexDAO.class).setup(); + + try { + serverInjector.getInstance(IndexDAO.class).setup(); + } catch (Exception e){ + e.printStackTrace(System.err); + System.exit(3); + } System.out.println("\n\n\n"); @@ -80,6 +87,7 @@ public static void main(String[] args) throws Exception { try { server.start(); } catch (IOException ioe) { + ioe.printStackTrace(System.err); System.exit(3); } }); @@ -88,6 +96,7 @@ public static void main(String[] args) throws Exception { try { server.start(); } catch (Exception ioe) { + ioe.printStackTrace(System.err); System.exit(3); } }); From 7724900da7ec6c06f0aa6f22b46176a007722f5e Mon Sep 17 00:00:00 2001 From: Greg Orzell Date: Tue, 4 Sep 2018 15:48:54 +0200 Subject: [PATCH 150/163] Fix missing ProtoField. --- .../conductor/common/metadata/workflow/WorkflowTask.java | 1 + .../java/com/netflix/conductor/grpc/AbstractProtoMapper.java | 4 ++++ grpc/src/main/proto/model/workflowtask.proto | 1 + 3 files changed, 6 insertions(+) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java index 583b4c5db1..c3686a0a74 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowTask.java @@ -117,6 +117,7 @@ public void setTasks(List tasks) { @ProtoField(id = 19) private TaskDef taskDefinition; + @ProtoField(id = 20) private Boolean rateLimited; /** diff --git a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java index f7fadff09d..02575c6308 100644 --- a/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java +++ b/grpc/src/main/java/com/netflix/conductor/grpc/AbstractProtoMapper.java @@ -898,6 +898,9 @@ public WorkflowTaskPb.WorkflowTask toProto(WorkflowTask from) { if (from.getTaskDefinition() != null) { to.setTaskDefinition( toProto( from.getTaskDefinition() ) ); } + if (from.isRateLimited() != null) { + to.setRateLimited( from.isRateLimited() ); + } return to.build(); } @@ -934,6 +937,7 @@ public WorkflowTask fromProto(WorkflowTaskPb.WorkflowTask from) { if (from.hasTaskDefinition()) { to.setTaskDefinition( fromProto( from.getTaskDefinition() ) ); } + to.setRateLimited( from.getRateLimited() ); return to; } diff --git a/grpc/src/main/proto/model/workflowtask.proto b/grpc/src/main/proto/model/workflowtask.proto index b88df17a17..e16b54e9f4 100644 --- a/grpc/src/main/proto/model/workflowtask.proto +++ b/grpc/src/main/proto/model/workflowtask.proto @@ -32,4 +32,5 @@ message WorkflowTask { string sink = 17; bool optional = 18; TaskDef task_definition = 19; + bool rate_limited = 20; } From 19b74c7a3e895c93aeac265c56a92ccf3fff3aef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 4 Sep 2018 17:44:05 -0700 Subject: [PATCH 151/163] Added toString override method for WorkflowDef --- .../common/metadata/workflow/WorkflowDef.java | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 00614b4f43..7361db83ed 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -273,4 +273,19 @@ public int hashCode() { getSchemaVersion() ); } + + @Override + public String toString() { + return "WorkflowDef{" + + "name='" + name + '\'' + + ", description='" + description + '\'' + + ", version=" + version + + ", tasks=" + tasks + + ", inputParameters=" + inputParameters + + ", outputParameters=" + outputParameters + + ", failureWorkflow='" + failureWorkflow + '\'' + + ", schemaVersion=" + schemaVersion + + ", restartable=" + restartable + + '}'; + } } From 26131cdb72192a2efaf7dade40f6550934021cc0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 4 Sep 2018 17:55:54 -0700 Subject: [PATCH 152/163] Abstracted functionality for creating objects to an abstract class - On behalf of reusing logic later on grpc end2end tests --- .../integration/AbstractEndToEndTest.java | 22 +++++++++++++++++++ .../tests/integration/End2EndTests.java | 18 ++------------- 2 files changed, 24 insertions(+), 16 deletions(-) create mode 100644 test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java new file mode 100644 index 0000000000..54108dd8b5 --- /dev/null +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -0,0 +1,22 @@ +package com.netflix.conductor.tests.integration; + +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.TaskType; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; + +public class AbstractEndToEndTest { + + protected WorkflowTask createWorkflowTask(String name) { + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName(name); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); + workflowTask.setTaskReferenceName(name); + return workflowTask; + } + + protected TaskDef createTaskDefinition(String name) { + TaskDef taskDefinition = new TaskDef(); + taskDefinition.setName(name); + return taskDefinition; + } +} diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index f8dfe39d31..22f6ee5cef 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -60,7 +60,7 @@ * @author Viren * */ -public class End2EndTests { +public class End2EndTests extends AbstractEndToEndTest { private static TaskClient taskClient; private static WorkflowClient workflowClient; @@ -315,20 +315,6 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { } - private WorkflowTask createWorkflowTask(String name) { - WorkflowTask workflowTask = new WorkflowTask(); - workflowTask.setName(name); - workflowTask.setWorkflowTaskType(TaskType.SIMPLE); - workflowTask.setTaskReferenceName(name); - return workflowTask; - } - - private TaskDef createTaskDefinition(String name) { - TaskDef taskDefinition = new TaskDef(); - taskDefinition.setName(name); - return taskDefinition; - } - // Helper method for creating task definitions on the server private List createAndRegisterTaskDefinitions(String prefixTaskDefinition, int numberOfTaskDefinitions) { assertNotNull(taskClient); @@ -339,7 +325,7 @@ private List createAndRegisterTaskDefinitions(String prefixTaskDefiniti def.setTimeoutPolicy(TimeoutPolicy.RETRY); definitions.add(def); } - taskClient.registerTaskDefs(definitions); + metadataClient.registerTaskDefs(definitions); return definitions; } From 7156e6382d720807d320af3e932f993fba828128 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 4 Sep 2018 17:57:29 -0700 Subject: [PATCH 153/163] Added integration tests for ephemeral workflows and ephemeral tasks on a grpc server --- .../tests/integration/End2EndGrpcTests.java | 80 ++++++++++++++++++- 1 file changed, 79 insertions(+), 1 deletion(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 83271886f6..b4b5ce2343 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -60,11 +60,12 @@ * @author Viren * */ -public class End2EndGrpcTests { +public class End2EndGrpcTests extends AbstractEndToEndTest { private static TaskClient tc; private static WorkflowClient wc; private static MetadataClient mc; private static EmbeddedElasticSearch search; + private static final String TASK_DEFINITION_PREFIX = "task_"; @BeforeClass public static void setup() throws Exception { @@ -226,4 +227,81 @@ public void testAll() throws Exception { assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(1, wf.getTasks().size()); } + + @Test + public void testEphemeralWorkflowsWithStoredTasks() throws Exception { + List definitions = createAndRegisterTaskDefinitions("storedTaskDef", 5); + + WorkflowDef workflowDefinition = new WorkflowDef(); + workflowDefinition.setName("testEphemeralWorkflow"); + + WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); + WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflow"; + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + String workflowId = wc.startWorkflow(workflowRequest); + assertNotNull(workflowId); + + Workflow workflow = wc.getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + } + + @Test + public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { + WorkflowDef workflowDefinition = new WorkflowDef(); + workflowDefinition.setName("testEphemeralWorkflowWithEphemeralTasks"); + + WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); + TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); + workflowTask1.setTaskDefinition(taskDefinition1); + + WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); + TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); + workflowTask2.setTaskDefinition(taskDefinition2); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + String workflowId = wc.startWorkflow(workflowRequest); + assertNotNull(workflowId); + + Workflow workflow = wc.getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + + List ephemeralTasks = ephemeralWorkflow.getTasks(); + assertEquals(2, ephemeralTasks.size()); + for (WorkflowTask ephemeralTask : ephemeralTasks) { + assertNotNull(ephemeralTask.getTaskDefinition()); + } + + } + + // Helper method for creating task definitions on the server + private List createAndRegisterTaskDefinitions(String prefixTaskDefinition, int numberOfTaskDefinitions) { + String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); + List definitions = new LinkedList<>(); + for (int i = 0; i < numberOfTaskDefinitions; i++) { + TaskDef def = new TaskDef(prefix + i, "task " + i + "description"); + def.setTimeoutPolicy(TimeoutPolicy.RETRY); + definitions.add(def); + } + mc.registerTaskDefs(definitions); + return definitions; + } } From 1042775cb5448f65b9566f4b34d6c60ff25ad875 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 4 Sep 2018 18:12:38 -0700 Subject: [PATCH 154/163] Renamed variables for clients on End2End integration tests - On behalf of better readability --- .../integration/AbstractEndToEndTest.java | 1 + .../tests/integration/End2EndGrpcTests.java | 71 +++++++++---------- .../tests/integration/End2EndTests.java | 1 - 3 files changed, 36 insertions(+), 37 deletions(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java index 54108dd8b5..1099a81c03 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -5,6 +5,7 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowTask; public class AbstractEndToEndTest { + protected static final String TASK_DEFINITION_PREFIX = "task_"; protected WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index b4b5ce2343..e7fde7f176 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -61,11 +61,10 @@ * */ public class End2EndGrpcTests extends AbstractEndToEndTest { - private static TaskClient tc; - private static WorkflowClient wc; - private static MetadataClient mc; + private static TaskClient taskClient; + private static WorkflowClient workflowClient; + private static MetadataClient metadataClient; private static EmbeddedElasticSearch search; - private static final String TASK_DEFINITION_PREFIX = "task_"; @BeforeClass public static void setup() throws Exception { @@ -84,9 +83,9 @@ public static void setup() throws Exception { assertTrue("failed to instantiate GRPCServer", server.isPresent()); server.get().start(); - tc = new TaskClient("localhost", 8090); - wc = new WorkflowClient("localhost", 8090); - mc = new MetadataClient("localhost", 8090); + taskClient = new TaskClient("localhost", 8090); + workflowClient = new WorkflowClient("localhost", 8090); + metadataClient = new MetadataClient("localhost", 8090); } @AfterClass @@ -97,18 +96,18 @@ public static void teardown() throws Exception { @Test public void testAll() throws Exception { - assertNotNull(tc); + assertNotNull(taskClient); List defs = new LinkedList<>(); for (int i = 0; i < 5; i++) { TaskDef def = new TaskDef("t" + i, "task " + i); def.setTimeoutPolicy(TimeoutPolicy.RETRY); defs.add(def); } - mc.registerTaskDefs(defs); + metadataClient.registerTaskDefs(defs); for (int i = 0; i < 5; i++) { final String taskName = "t" + i; - TaskDef def = mc.getTaskDef(taskName); + TaskDef def = metadataClient.getTaskDef(taskName); assertNotNull(def); assertEquals(taskName, def.getName()); } @@ -129,8 +128,8 @@ public void testAll() throws Exception { def.getTasks().add(t0); def.getTasks().add(t1); - mc.registerWorkflowDef(def); - WorkflowDef foundd = mc.getWorkflowDef(def.getName(), null); + metadataClient.registerWorkflowDef(def); + WorkflowDef foundd = metadataClient.getWorkflowDef(def.getName(), null); assertNotNull(foundd); assertEquals(def.getName(), foundd.getName()); assertEquals(def.getVersion(), foundd.getVersion()); @@ -140,49 +139,49 @@ public void testAll() throws Exception { startWf.setName(def.getName()); startWf.setCorrelationId(correlationId); - String workflowId = wc.startWorkflow(startWf); + String workflowId = workflowClient.startWorkflow(startWf); assertNotNull(workflowId); System.out.println("Started workflow id=" + workflowId); - Workflow wf = wc.getWorkflow(workflowId, false); + Workflow wf = workflowClient.getWorkflow(workflowId, false); assertEquals(0, wf.getTasks().size()); assertEquals(workflowId, wf.getWorkflowId()); - wf = wc.getWorkflow(workflowId, true); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(1, wf.getTasks().size()); assertEquals(t0.getTaskReferenceName(), wf.getTasks().get(0).getReferenceTaskName()); assertEquals(workflowId, wf.getWorkflowId()); - List runningIds = wc.getRunningWorkflow(def.getName(), def.getVersion()); + List runningIds = workflowClient.getRunningWorkflow(def.getName(), def.getVersion()); assertNotNull(runningIds); assertEquals(1, runningIds.size()); assertEquals(workflowId, runningIds.get(0)); - List polled = tc.batchPollTasksByTaskType("non existing task", "test", 1, 100); + List polled = taskClient.batchPollTasksByTaskType("non existing task", "test", 1, 100); assertNotNull(polled); assertEquals(0, polled.size()); - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertEquals(1, polled.size()); assertEquals(t0.getName(), polled.get(0).getTaskDefName()); Task task = polled.get(0); - Boolean acked = tc.ack(task.getTaskId(), "test"); + Boolean acked = taskClient.ack(task.getTaskId(), "test"); assertNotNull(acked); assertTrue(acked); task.getOutputData().put("key1", "value1"); task.setStatus(Status.COMPLETED); - tc.updateTask(new TaskResult(task)); + taskClient.updateTask(new TaskResult(task)); - polled = tc.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); + polled = taskClient.batchPollTasksByTaskType(t0.getName(), "test", 1, 100); assertNotNull(polled); assertTrue(polled.toString(), polled.isEmpty()); - wf = wc.getWorkflow(workflowId, true); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(2, wf.getTasks().size()); @@ -191,38 +190,38 @@ public void testAll() throws Exception { assertEquals(Status.COMPLETED, wf.getTasks().get(0).getStatus()); assertEquals(Status.SCHEDULED, wf.getTasks().get(1).getStatus()); - Task taskById = tc.getTaskDetails(task.getTaskId()); + Task taskById = taskClient.getTaskDetails(task.getTaskId()); assertNotNull(taskById); assertEquals(task.getTaskId(), taskById.getTaskId()); - List getTasks = tc.getPendingTasksByType(t0.getName(), null, 1); + List getTasks = taskClient.getPendingTasksByType(t0.getName(), null, 1); assertNotNull(getTasks); assertEquals(0, getTasks.size()); //getTasks only gives pending tasks - getTasks = tc.getPendingTasksByType(t1.getName(), null, 1); + getTasks = taskClient.getPendingTasksByType(t1.getName(), null, 1); assertNotNull(getTasks); assertEquals(1, getTasks.size()); - Task pending = tc.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); + Task pending = taskClient.getPendingTaskForWorkflow(workflowId, t1.getTaskReferenceName()); assertNotNull(pending); assertEquals(t1.getTaskReferenceName(), pending.getReferenceTaskName()); assertEquals(workflowId, pending.getWorkflowInstanceId()); Thread.sleep(1000); - SearchResult searchResult = wc.search("workflowType='" + def.getName() + "'"); + SearchResult searchResult = workflowClient.search("workflowType='" + def.getName() + "'"); assertNotNull(searchResult); assertEquals(1, searchResult.getTotalHits()); - wc.terminateWorkflow(workflowId, "terminate reason"); - wf = wc.getWorkflow(workflowId, true); + workflowClient.terminateWorkflow(workflowId, "terminate reason"); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.TERMINATED, wf.getStatus()); - wc.restart(workflowId); - wf = wc.getWorkflow(workflowId, true); + workflowClient.restart(workflowId); + wf = workflowClient.getWorkflow(workflowId, true); assertNotNull(wf); assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); assertEquals(1, wf.getTasks().size()); @@ -246,10 +245,10 @@ public void testEphemeralWorkflowsWithStoredTasks() throws Exception { .withName(workflowExecutionName) .withWorkflowDef(workflowDefinition); - String workflowId = wc.startWorkflow(workflowRequest); + String workflowId = workflowClient.startWorkflow(workflowRequest); assertNotNull(workflowId); - Workflow workflow = wc.getWorkflow(workflowId, true); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); @@ -276,10 +275,10 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { .withName(workflowExecutionName) .withWorkflowDef(workflowDefinition); - String workflowId = wc.startWorkflow(workflowRequest); + String workflowId = workflowClient.startWorkflow(workflowRequest); assertNotNull(workflowId); - Workflow workflow = wc.getWorkflow(workflowId, true); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); @@ -301,7 +300,7 @@ private List createAndRegisterTaskDefinitions(String prefixTaskDefiniti def.setTimeoutPolicy(TimeoutPolicy.RETRY); definitions.add(def); } - mc.registerTaskDefs(definitions); + metadataClient.registerTaskDefs(definitions); return definitions; } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 22f6ee5cef..ece47e2e71 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -68,7 +68,6 @@ public class End2EndTests extends AbstractEndToEndTest { private static MetadataClient metadataClient; private static final int SERVER_PORT = 8080; - private static final String TASK_DEFINITION_PREFIX = "task_"; @BeforeClass public static void setup() throws Exception { From c0e51e9547b7195d9bb845added34aaf31b44011 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Tue, 4 Sep 2018 20:00:30 -0700 Subject: [PATCH 155/163] GRPC server now adds reflection for service consultation - Can be disabled through property 'conductor.grpc.server.reflection.enabled' --- grpc-server/build.gradle | 1 + .../conductor/grpc/server/GRPCServer.java | 17 ++++----- .../grpc/server/GRPCServerConfiguration.java | 7 ++++ .../grpc/server/GRPCServerProvider.java | 35 +++++++++++++------ 4 files changed, 39 insertions(+), 21 deletions(-) diff --git a/grpc-server/build.gradle b/grpc-server/build.gradle index 3e4e89674b..333da15731 100644 --- a/grpc-server/build.gradle +++ b/grpc-server/build.gradle @@ -9,6 +9,7 @@ dependencies { compile project(':conductor-grpc') compile "io.grpc:grpc-netty:${revGrpc}" + compile "io.grpc:grpc-services:${revGrpc}" compile "log4j:log4j:1.2.17" testCompile "io.grpc:grpc-testing:${revGrpc}" diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index c3b9c6f92c..444614790b 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -1,18 +1,15 @@ package com.netflix.conductor.grpc.server; import com.netflix.conductor.service.Lifecycle; - +import io.grpc.BindableService; +import io.grpc.Server; +import io.grpc.ServerBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import java.io.IOException; -import java.util.Arrays; - import javax.inject.Singleton; - -import io.grpc.BindableService; -import io.grpc.Server; -import io.grpc.ServerBuilder; +import java.io.IOException; +import java.util.List; @Singleton public class GRPCServer implements Lifecycle { @@ -21,9 +18,9 @@ public class GRPCServer implements Lifecycle { private final Server server; - public GRPCServer(int port, BindableService... services) { + public GRPCServer(int port, List services) { ServerBuilder builder = ServerBuilder.forPort(port); - Arrays.stream(services).forEach(builder::addService); + services.stream().forEach(builder::addService); server = builder.build(); } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java index 6122d8be1b..a81b83b21e 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerConfiguration.java @@ -9,6 +9,9 @@ public interface GRPCServerConfiguration extends Configuration { String PORT_PROPERTY_NAME = "conductor.grpc.server.port"; int PORT_DEFAULT_VALUE = 8090; + String SERVICE_REFLECTION_ENABLED_PROPERTY_NAME = "conductor.grpc.server.reflection.enabled"; + boolean SERVICE_REFLECTION_ENABLED_DEFAULT_VALUE = true; + default boolean isEnabled(){ return getBooleanProperty(ENABLED_PROPERTY_NAME, ENABLED_DEFAULT_VALUE); } @@ -16,4 +19,8 @@ default boolean isEnabled(){ default int getPort(){ return getIntProperty(PORT_PROPERTY_NAME, PORT_DEFAULT_VALUE); } + + default boolean isReflectionEnabled() { + return getBooleanProperty(SERVICE_REFLECTION_ENABLED_PROPERTY_NAME, SERVICE_REFLECTION_ENABLED_DEFAULT_VALUE); + } } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java index be85c0a472..3cb882869a 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java @@ -1,10 +1,12 @@ package com.netflix.conductor.grpc.server; +import com.google.common.collect.ImmutableList; import com.netflix.conductor.grpc.EventServiceGrpc; import com.netflix.conductor.grpc.MetadataServiceGrpc; import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; +import java.util.List; import java.util.Optional; import javax.inject.Inject; @@ -12,6 +14,7 @@ import io.grpc.BindableService; import io.grpc.health.v1.HealthGrpc; +import io.grpc.protobuf.services.ProtoReflectionService; public class GRPCServerProvider implements Provider> { @@ -24,14 +27,14 @@ public class GRPCServerProvider implements Provider> { @Inject public GRPCServerProvider( - GRPCServerConfiguration conf, + GRPCServerConfiguration grpcServerConfiguration, HealthGrpc.HealthImplBase healthServiceImpl, EventServiceGrpc.EventServiceImplBase eventServiceImpl, MetadataServiceGrpc.MetadataServiceImplBase metadataServiceImpl, TaskServiceGrpc.TaskServiceImplBase taskServiceImpl, WorkflowServiceGrpc.WorkflowServiceImplBase workflowServiceImpl ) { - this.configuration = conf; + this.configuration = grpcServerConfiguration; this.healthServiceImpl = healthServiceImpl; this.eventServiceImpl = eventServiceImpl; @@ -43,15 +46,25 @@ public GRPCServerProvider( @Override public Optional get() { return configuration.isEnabled() ? - Optional.of( - new GRPCServer( - configuration.getPort(), - healthServiceImpl, - eventServiceImpl, - metadataServiceImpl, - taskServiceImpl, - workflowServiceImpl - )) + Optional.of(getGRPCServer()) : Optional.empty(); } + + private GRPCServer getGRPCServer() { + ImmutableList.Builder services = ImmutableList.builder().add( + healthServiceImpl, + eventServiceImpl, + metadataServiceImpl, + taskServiceImpl, + workflowServiceImpl); + + if (configuration.isReflectionEnabled()) { + services.add(ProtoReflectionService.newInstance()); + } + + return new GRPCServer( + configuration.getPort(), + services.build() + ); + } } From d59c2a50094831fc9810e79f17928dfc63a69d97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 5 Sep 2018 11:38:55 -0700 Subject: [PATCH 156/163] GRPC server code review - BindableService varargs for GrpcServer constructor - Method rename on behalf of readability --- .../com/netflix/conductor/grpc/server/GRPCServer.java | 6 +++--- .../conductor/grpc/server/GRPCServerProvider.java | 11 +++++------ 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java index 444614790b..9067e04035 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServer.java @@ -9,7 +9,7 @@ import javax.inject.Singleton; import java.io.IOException; -import java.util.List; +import java.util.Arrays; @Singleton public class GRPCServer implements Lifecycle { @@ -18,9 +18,9 @@ public class GRPCServer implements Lifecycle { private final Server server; - public GRPCServer(int port, List services) { + public GRPCServer(int port, BindableService... services) { ServerBuilder builder = ServerBuilder.forPort(port); - services.stream().forEach(builder::addService); + Arrays.stream(services).forEach(builder::addService); server = builder.build(); } diff --git a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java index 3cb882869a..414e1660fc 100644 --- a/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java +++ b/grpc-server/src/main/java/com/netflix/conductor/grpc/server/GRPCServerProvider.java @@ -6,7 +6,6 @@ import com.netflix.conductor.grpc.TaskServiceGrpc; import com.netflix.conductor.grpc.WorkflowServiceGrpc; -import java.util.List; import java.util.Optional; import javax.inject.Inject; @@ -46,11 +45,11 @@ public GRPCServerProvider( @Override public Optional get() { return configuration.isEnabled() ? - Optional.of(getGRPCServer()) + Optional.of(buildGRPCServer(configuration)) : Optional.empty(); } - private GRPCServer getGRPCServer() { + private GRPCServer buildGRPCServer(GRPCServerConfiguration grpcServerConfiguration) { ImmutableList.Builder services = ImmutableList.builder().add( healthServiceImpl, eventServiceImpl, @@ -58,13 +57,13 @@ private GRPCServer getGRPCServer() { taskServiceImpl, workflowServiceImpl); - if (configuration.isReflectionEnabled()) { + if (grpcServerConfiguration.isReflectionEnabled()) { services.add(ProtoReflectionService.newInstance()); } return new GRPCServer( - configuration.getPort(), - services.build() + grpcServerConfiguration.getPort(), + services.build().toArray(new BindableService[]{}) ); } } From 7c0d4e3c5b6018474d07c84ca36a8e6962e1cef1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 5 Sep 2018 13:23:55 -0700 Subject: [PATCH 157/163] Refactored End2End and GrpcEnd2End tests in order to use equals from workflowDefinition --- .../common/metadata/workflow/WorkflowDef.java | 23 ++++++------ .../integration/AbstractEndToEndTest.java | 25 +++++++++++++ .../tests/integration/End2EndGrpcTests.java | 35 +++++++------------ .../tests/integration/End2EndTests.java | 13 +++---- 4 files changed, 55 insertions(+), 41 deletions(-) diff --git a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java index 7361db83ed..f68765fa17 100644 --- a/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java +++ b/common/src/main/java/com/netflix/conductor/common/metadata/workflow/WorkflowDef.java @@ -28,6 +28,7 @@ import com.github.vmg.protogen.annotations.ProtoField; import com.github.vmg.protogen.annotations.ProtoMessage; +import com.google.common.base.MoreObjects; import com.netflix.conductor.common.metadata.Auditable; /** @@ -276,16 +277,16 @@ public int hashCode() { @Override public String toString() { - return "WorkflowDef{" + - "name='" + name + '\'' + - ", description='" + description + '\'' + - ", version=" + version + - ", tasks=" + tasks + - ", inputParameters=" + inputParameters + - ", outputParameters=" + outputParameters + - ", failureWorkflow='" + failureWorkflow + '\'' + - ", schemaVersion=" + schemaVersion + - ", restartable=" + restartable + - '}'; + return MoreObjects.toStringHelper(getClass()) + .add("name", name) + .add("description", description) + .add("version", version) + .add("tasks", tasks) + .add("inputParameters", inputParameters) + .add("outputParameters", outputParameters) + .add("failureWorkflow", failureWorkflow) + .add("schemaVersion", schemaVersion) + .add("restartable", restartable) + .toString(); } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java index 1099a81c03..a86612f1d9 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -2,16 +2,29 @@ import com.netflix.conductor.common.metadata.tasks.TaskDef; import com.netflix.conductor.common.metadata.workflow.TaskType; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; public class AbstractEndToEndTest { + protected static final String TASK_DEFINITION_PREFIX = "task_"; + private static final String DEFAULT_DESCRIPTION = "description"; + // Represents null value deserialized from the redis in memory db + private static final String DEFAULT_NULL_VALUE = "null"; + protected WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); workflowTask.setWorkflowTaskType(TaskType.SIMPLE); workflowTask.setTaskReferenceName(name); + workflowTask.setDescription(getDefaultDescription(name)); + workflowTask.setDynamicTaskNameParam(DEFAULT_NULL_VALUE); + workflowTask.setCaseValueParam(DEFAULT_NULL_VALUE); + workflowTask.setCaseExpression(DEFAULT_NULL_VALUE); + workflowTask.setDynamicForkTasksParam(DEFAULT_NULL_VALUE); + workflowTask.setDynamicForkTasksInputParamName(DEFAULT_NULL_VALUE); + workflowTask.setSink(DEFAULT_NULL_VALUE); return workflowTask; } @@ -20,4 +33,16 @@ protected TaskDef createTaskDefinition(String name) { taskDefinition.setName(name); return taskDefinition; } + + protected WorkflowDef createWorkflowDefinition(String workflowName) { + WorkflowDef workflowDefinition = new WorkflowDef(); + workflowDefinition.setName(workflowName); + workflowDefinition.setDescription(getDefaultDescription(workflowName)); + workflowDefinition.setFailureWorkflow(DEFAULT_NULL_VALUE); + return workflowDefinition; + } + + private String getDefaultDescription(String nameResource) { + return nameResource + " " + DEFAULT_DESCRIPTION; + } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index e7fde7f176..c906bd7ced 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -112,27 +112,18 @@ public void testAll() throws Exception { assertEquals(taskName, def.getName()); } - WorkflowDef def = new WorkflowDef(); - def.setName("test"); - WorkflowTask t0 = new WorkflowTask(); - t0.setName("t0"); - t0.setWorkflowTaskType(TaskType.SIMPLE); - t0.setTaskReferenceName("t0"); - - WorkflowTask t1 = new WorkflowTask(); - t1.setName("t1"); - t1.setWorkflowTaskType(TaskType.SIMPLE); - t1.setTaskReferenceName("t1"); + WorkflowDef def = createWorkflowDefinition("test"); + WorkflowTask t0 = createWorkflowTask("t0"); + WorkflowTask t1 = createWorkflowTask("t1"); def.getTasks().add(t0); def.getTasks().add(t1); metadataClient.registerWorkflowDef(def); - WorkflowDef foundd = metadataClient.getWorkflowDef(def.getName(), null); - assertNotNull(foundd); - assertEquals(def.getName(), foundd.getName()); - assertEquals(def.getVersion(), foundd.getVersion()); + WorkflowDef found = metadataClient.getWorkflowDef(def.getName(), null); + assertNotNull(found); + assertEquals(def, found); String correlationId = "test_corr_id"; StartWorkflowRequest startWf = new StartWorkflowRequest(); @@ -229,10 +220,10 @@ public void testAll() throws Exception { @Test public void testEphemeralWorkflowsWithStoredTasks() throws Exception { - List definitions = createAndRegisterTaskDefinitions("storedTaskDef", 5); + createAndRegisterTaskDefinitions("storedTaskDef", 5); - WorkflowDef workflowDefinition = new WorkflowDef(); - workflowDefinition.setName("testEphemeralWorkflow"); + String workflowName = "testEphemeralWorkflow"; + WorkflowDef workflowDefinition = createWorkflowDefinition(workflowName); WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); @@ -251,13 +242,13 @@ public void testEphemeralWorkflowsWithStoredTasks() throws Exception { Workflow workflow = workflowClient.getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + assertEquals(workflowDefinition, ephemeralWorkflow); } @Test public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { - WorkflowDef workflowDefinition = new WorkflowDef(); - workflowDefinition.setName("testEphemeralWorkflowWithEphemeralTasks"); + String workflowName = "testEphemeralWorkflowWithEphemeralTasks"; + WorkflowDef workflowDefinition = createWorkflowDefinition(workflowName); WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); @@ -281,7 +272,7 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { Workflow workflow = workflowClient.getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + assertEquals(workflowDefinition, ephemeralWorkflow); List ephemeralTasks = ephemeralWorkflow.getTasks(); assertEquals(2, ephemeralTasks.size()); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index ece47e2e71..a1d13f28de 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -129,8 +129,7 @@ public void testAll() throws Exception { workflowClient.registerWorkflow(def); WorkflowDef workflowDefinitionFromSystem = workflowClient.getWorkflowDef(def.getName(), null); assertNotNull(workflowDefinitionFromSystem); - assertEquals(def.getName(), workflowDefinitionFromSystem.getName()); - assertEquals(def.getVersion(), workflowDefinitionFromSystem.getVersion()); + assertEquals(def, workflowDefinitionFromSystem); String correlationId = "test_corr_id"; String workflowId = workflowClient.startWorkflow(def.getName(), null, correlationId, new HashMap<>()); @@ -254,8 +253,7 @@ public void testEphemeralWorkflowsWithStoredTasks() throws Exception { assertNotNull(found); assertTrue(definitions.size() > 0); - WorkflowDef workflowDefinition = new WorkflowDef(); - workflowDefinition.setName("testEphemeralWorkflow"); + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflow"); WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); @@ -274,13 +272,12 @@ public void testEphemeralWorkflowsWithStoredTasks() throws Exception { Workflow workflow = workflowClient.getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + assertEquals(workflowDefinition, ephemeralWorkflow); } @Test public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { - WorkflowDef workflowDefinition = new WorkflowDef(); - workflowDefinition.setName("testEphemeralWorkflowWithEphemeralTasks"); + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowWithEphemeralTasks"); WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); @@ -304,7 +301,7 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { Workflow workflow = workflowClient.getWorkflow(workflowId, true); WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition.getName(), ephemeralWorkflow.getName()); + assertEquals(workflowDefinition, ephemeralWorkflow); List ephemeralTasks = ephemeralWorkflow.getTasks(); assertEquals(2, ephemeralTasks.size()); From 22661806e24b63e056ed3c02f470108b4d0a2bbc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 5 Sep 2018 14:27:19 -0700 Subject: [PATCH 158/163] Abstracted registerTaskDefinitions to AbstractEndToEndTest class --- .../integration/AbstractEndToEndTest.java | 23 +++++++++++++--- .../tests/integration/End2EndGrpcTests.java | 23 +++++----------- .../tests/integration/End2EndTests.java | 26 +++++-------------- 3 files changed, 32 insertions(+), 40 deletions(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java index a86612f1d9..fc7764e6a2 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -5,14 +5,17 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -public class AbstractEndToEndTest { +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; - protected static final String TASK_DEFINITION_PREFIX = "task_"; +public abstract class AbstractEndToEndTest { + + private static final String TASK_DEFINITION_PREFIX = "task_"; private static final String DEFAULT_DESCRIPTION = "description"; // Represents null value deserialized from the redis in memory db private static final String DEFAULT_NULL_VALUE = "null"; - protected WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); @@ -42,7 +45,21 @@ protected WorkflowDef createWorkflowDefinition(String workflowName) { return workflowDefinition; } + protected List createAndRegisterTaskDefinitions(String prefixTaskDefinition, int numberOfTaskDefinitions) { + String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); + List definitions = new LinkedList<>(); + for (int i = 0; i < numberOfTaskDefinitions; i++) { + TaskDef def = new TaskDef(prefix + i, "task " + i + DEFAULT_DESCRIPTION); + def.setTimeoutPolicy(TaskDef.TimeoutPolicy.RETRY); + definitions.add(def); + } + this.registerTaskDefinitions(definitions); + return definitions; + } + private String getDefaultDescription(String nameResource) { return nameResource + " " + DEFAULT_DESCRIPTION; } + + protected abstract void registerTaskDefinitions(List taskDefinitionList); } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index c906bd7ced..3efb3193d1 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -1,11 +1,11 @@ /** * Copyright 2016 Netflix, Inc. - * + *

    * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at - * + *

    * http://www.apache.org/licenses/LICENSE-2.0 - * + *

    * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. @@ -17,7 +17,6 @@ import com.google.inject.Guice; import com.google.inject.Injector; - import com.netflix.conductor.bootstrap.BootstrapModule; import com.netflix.conductor.bootstrap.ModulesProvider; import com.netflix.conductor.client.grpc.MetadataClient; @@ -31,7 +30,6 @@ import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; -import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.run.SearchResult; import com.netflix.conductor.common.run.Workflow; import com.netflix.conductor.common.run.Workflow.WorkflowStatus; @@ -43,7 +41,6 @@ import com.netflix.conductor.grpc.server.GRPCServerConfiguration; import com.netflix.conductor.grpc.server.GRPCServerProvider; import com.netflix.conductor.tests.utils.TestEnvironment; - import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; @@ -282,16 +279,8 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { } - // Helper method for creating task definitions on the server - private List createAndRegisterTaskDefinitions(String prefixTaskDefinition, int numberOfTaskDefinitions) { - String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); - List definitions = new LinkedList<>(); - for (int i = 0; i < numberOfTaskDefinitions; i++) { - TaskDef def = new TaskDef(prefix + i, "task " + i + "description"); - def.setTimeoutPolicy(TimeoutPolicy.RETRY); - definitions.add(def); - } - metadataClient.registerTaskDefs(definitions); - return definitions; + @Override + protected void registerTaskDefinitions(List taskDefinitionList) { + metadataClient.registerTaskDefs(taskDefinitionList); } } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index a1d13f28de..1febc6506f 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -1,11 +1,11 @@ /** * Copyright 2016 Netflix, Inc. - * + *

    * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at - * + *

    * http://www.apache.org/licenses/LICENSE-2.0 - * + *

    * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. @@ -25,7 +25,6 @@ import com.netflix.conductor.common.metadata.tasks.Task; import com.netflix.conductor.common.metadata.tasks.Task.Status; import com.netflix.conductor.common.metadata.tasks.TaskDef; -import com.netflix.conductor.common.metadata.tasks.TaskDef.TimeoutPolicy; import com.netflix.conductor.common.metadata.tasks.TaskResult; import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; import com.netflix.conductor.common.metadata.workflow.TaskType; @@ -47,9 +46,7 @@ import org.junit.Test; import java.util.HashMap; -import java.util.LinkedList; import java.util.List; -import java.util.Optional; import java.util.stream.Collectors; import static org.junit.Assert.assertEquals; @@ -308,21 +305,10 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { for (WorkflowTask ephemeralTask : ephemeralTasks) { assertNotNull(ephemeralTask.getTaskDefinition()); } - } - // Helper method for creating task definitions on the server - private List createAndRegisterTaskDefinitions(String prefixTaskDefinition, int numberOfTaskDefinitions) { - assertNotNull(taskClient); - String prefix = Optional.ofNullable(prefixTaskDefinition).orElse(TASK_DEFINITION_PREFIX); - List definitions = new LinkedList<>(); - for (int i = 0; i < numberOfTaskDefinitions; i++) { - TaskDef def = new TaskDef(prefix + i, "task " + i + "description"); - def.setTimeoutPolicy(TimeoutPolicy.RETRY); - definitions.add(def); - } - metadataClient.registerTaskDefs(definitions); - return definitions; + @Override + protected void registerTaskDefinitions(List taskDefinitionList) { + metadataClient.registerTaskDefs(taskDefinitionList); } - } From 053ed9764b3e9028a795077c907da334c7f2a943 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 5 Sep 2018 15:26:08 -0700 Subject: [PATCH 159/163] Added integration tests for ephemeral workflows with {stored,ephemeral} tasks --- .../tests/integration/End2EndGrpcTests.java | 37 ++++++++++++++++++- .../tests/integration/End2EndTests.java | 37 ++++++++++++++++++- 2 files changed, 72 insertions(+), 2 deletions(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 3efb3193d1..33226eb4de 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -55,7 +55,6 @@ /** * @author Viren - * */ public class End2EndGrpcTests extends AbstractEndToEndTest { private static TaskClient taskClient; @@ -279,6 +278,42 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { } + @Test + public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception { + createAndRegisterTaskDefinitions("storedTask", 1); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); + + WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); + TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); + workflowTask1.setTaskDefinition(taskDefinition1); + + WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + String workflowId = workflowClient.startWorkflow(workflowRequest); + assertNotNull(workflowId); + + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition, ephemeralWorkflow); + + TaskDef storedTaskDefinition = metadataClient.getTaskDef("storedTask0"); + List tasks = ephemeralWorkflow.getTasks(); + assertEquals(2, tasks.size()); + assertEquals(workflowTask1, tasks.get(0)); + assertEquals(storedTaskDefinition, tasks.get(1).getTaskDefinition()); + + } + @Override protected void registerTaskDefinitions(List taskDefinitionList) { metadataClient.registerTaskDefs(taskDefinitionList); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 1febc6506f..3150db8f63 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -55,7 +55,6 @@ /** * @author Viren - * */ public class End2EndTests extends AbstractEndToEndTest { @@ -307,6 +306,42 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { } } + @Test + public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception { + createAndRegisterTaskDefinitions("storedTask", 1); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); + + WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); + TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); + workflowTask1.setTaskDefinition(taskDefinition1); + + WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + String workflowId = workflowClient.startWorkflow(workflowRequest); + assertNotNull(workflowId); + + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition, ephemeralWorkflow); + + TaskDef storedTaskDefinition = metadataClient.getTaskDef("storedTask0"); + List tasks = ephemeralWorkflow.getTasks(); + assertEquals(2, tasks.size()); + assertEquals(workflowTask1, tasks.get(0)); + assertEquals(storedTaskDefinition, tasks.get(1).getTaskDefinition()); + + } + @Override protected void registerTaskDefinitions(List taskDefinitionList) { metadataClient.registerTaskDefs(taskDefinitionList); From 926fbd6e9974410e2796b653503b4e0c5b8777a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 5 Sep 2018 21:55:56 -0700 Subject: [PATCH 160/163] Abstracted end to end tests for ephemeral workflows to base class --- .../integration/AbstractEndToEndTest.java | 101 ++++++++++++++ .../tests/integration/End2EndGrpcTests.java | 100 ++------------ .../tests/integration/End2EndTests.java | 129 ++++-------------- 3 files changed, 134 insertions(+), 196 deletions(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java index fc7764e6a2..600b7df9ff 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -4,11 +4,16 @@ import com.netflix.conductor.common.metadata.workflow.TaskType; import com.netflix.conductor.common.metadata.workflow.WorkflowDef; import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; +import org.junit.Test; import java.util.LinkedList; import java.util.List; import java.util.Optional; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + public abstract class AbstractEndToEndTest { private static final String TASK_DEFINITION_PREFIX = "task_"; @@ -16,6 +21,102 @@ public abstract class AbstractEndToEndTest { // Represents null value deserialized from the redis in memory db private static final String DEFAULT_NULL_VALUE = "null"; + @Test + public void testEphemeralWorkflowsWithStoredTasks() throws Exception { + createAndRegisterTaskDefinitions("storedTaskDef", 5); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflow"); + + WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); + WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflow"; + String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); + assertNotNull(workflowId); + + Workflow workflow = getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition, ephemeralWorkflow); + } + + @Test + public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowWithEphemeralTasks"); + + WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); + TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); + workflowTask1.setTaskDefinition(taskDefinition1); + + WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); + TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); + workflowTask2.setTaskDefinition(taskDefinition2); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; + + String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); + assertNotNull(workflowId); + + Workflow workflow = getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition, ephemeralWorkflow); + + List ephemeralTasks = ephemeralWorkflow.getTasks(); + assertEquals(2, ephemeralTasks.size()); + for (WorkflowTask ephemeralTask : ephemeralTasks) { + assertNotNull(ephemeralTask.getTaskDefinition()); + } + } + + @Test + public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception { + createAndRegisterTaskDefinitions("storedTask", 1); + + WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); + + WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); + TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); + workflowTask1.setTaskDefinition(taskDefinition1); + + WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); + + workflowDefinition.getTasks().add(workflowTask1); + workflowDefinition.getTasks().add(workflowTask2); + + String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; + + String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); + assertNotNull(workflowId); + + Workflow workflow = getWorkflow(workflowId, true); + WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); + assertNotNull(ephemeralWorkflow); + assertEquals(workflowDefinition, ephemeralWorkflow); + + TaskDef storedTaskDefinition = getTaskDefinition("storedTask0"); + List tasks = ephemeralWorkflow.getTasks(); + assertEquals(2, tasks.size()); + assertEquals(workflowTask1, tasks.get(0)); + TaskDef currentStoredTaskDefinition = tasks.get(1).getTaskDefinition(); + assertNotNull(currentStoredTaskDefinition); + assertEquals(storedTaskDefinition, currentStoredTaskDefinition); + + } + + protected abstract String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition); + + protected abstract Workflow getWorkflow(String workflowId, boolean includeTasks); + + protected abstract TaskDef getTaskDefinition(String taskName); + + protected WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java index 33226eb4de..cddd44c0d9 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndGrpcTests.java @@ -214,104 +214,22 @@ public void testAll() throws Exception { assertEquals(1, wf.getTasks().size()); } - @Test - public void testEphemeralWorkflowsWithStoredTasks() throws Exception { - createAndRegisterTaskDefinitions("storedTaskDef", 5); - - String workflowName = "testEphemeralWorkflow"; - WorkflowDef workflowDefinition = createWorkflowDefinition(workflowName); - - WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); - WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflow"; + @Override + protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { StartWorkflowRequest workflowRequest = new StartWorkflowRequest() .withName(workflowExecutionName) .withWorkflowDef(workflowDefinition); - - String workflowId = workflowClient.startWorkflow(workflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); + return workflowClient.startWorkflow(workflowRequest); } - @Test - public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { - String workflowName = "testEphemeralWorkflowWithEphemeralTasks"; - WorkflowDef workflowDefinition = createWorkflowDefinition(workflowName); - - WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); - TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); - workflowTask1.setTaskDefinition(taskDefinition1); - - WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); - TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); - workflowTask2.setTaskDefinition(taskDefinition2); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; - StartWorkflowRequest workflowRequest = new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - - String workflowId = workflowClient.startWorkflow(workflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - - List ephemeralTasks = ephemeralWorkflow.getTasks(); - assertEquals(2, ephemeralTasks.size()); - for (WorkflowTask ephemeralTask : ephemeralTasks) { - assertNotNull(ephemeralTask.getTaskDefinition()); - } - + @Override + protected Workflow getWorkflow(String workflowId, boolean includeTasks) { + return workflowClient.getWorkflow(workflowId, includeTasks); } - @Test - public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception { - createAndRegisterTaskDefinitions("storedTask", 1); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); - - WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); - TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); - workflowTask1.setTaskDefinition(taskDefinition1); - - WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; - StartWorkflowRequest workflowRequest = new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - - String workflowId = workflowClient.startWorkflow(workflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - - TaskDef storedTaskDefinition = metadataClient.getTaskDef("storedTask0"); - List tasks = ephemeralWorkflow.getTasks(); - assertEquals(2, tasks.size()); - assertEquals(workflowTask1, tasks.get(0)); - assertEquals(storedTaskDefinition, tasks.get(1).getTaskDefinition()); - + @Override + protected TaskDef getTaskDefinition(String taskName) { + return metadataClient.getTaskDef(taskName); } @Override diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 3150db8f63..870727f6fb 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -96,6 +96,30 @@ public static void teardown() throws Exception { search.stop(); } + @Override + protected String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition) { + StartWorkflowRequest workflowRequest = new StartWorkflowRequest() + .withName(workflowExecutionName) + .withWorkflowDef(workflowDefinition); + + return workflowClient.startWorkflow(workflowRequest); + } + + @Override + protected Workflow getWorkflow(String workflowId, boolean includeTasks) { + return workflowClient.getWorkflow(workflowId, includeTasks); + } + + @Override + protected TaskDef getTaskDefinition(String taskName) { + return metadataClient.getTaskDef(taskName); + } + + @Override + protected void registerTaskDefinitions(List taskDefinitionList) { + metadataClient.registerTaskDefs(taskDefinitionList); + } + @Test public void testAll() throws Exception { List definitions = createAndRegisterTaskDefinitions("t", 5); @@ -241,109 +265,4 @@ public void testMetadataWorkflowDefinition() { } } - @Test - public void testEphemeralWorkflowsWithStoredTasks() throws Exception { - List definitions = createAndRegisterTaskDefinitions("storedTaskDef", 5); - - List found = taskClient.getTaskDef(); - assertNotNull(found); - assertTrue(definitions.size() > 0); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflow"); - - WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); - WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflow"; - StartWorkflowRequest workflowRequest = new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - - String workflowId = workflowClient.startWorkflow(workflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - } - - @Test - public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { - WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowWithEphemeralTasks"); - - WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); - TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); - workflowTask1.setTaskDefinition(taskDefinition1); - - WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); - TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); - workflowTask2.setTaskDefinition(taskDefinition2); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; - StartWorkflowRequest workflowRequest = new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - - String workflowId = workflowClient.startWorkflow(workflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - - List ephemeralTasks = ephemeralWorkflow.getTasks(); - assertEquals(2, ephemeralTasks.size()); - for (WorkflowTask ephemeralTask : ephemeralTasks) { - assertNotNull(ephemeralTask.getTaskDefinition()); - } - } - - @Test - public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception { - createAndRegisterTaskDefinitions("storedTask", 1); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); - - WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); - TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); - workflowTask1.setTaskDefinition(taskDefinition1); - - WorkflowTask workflowTask2 = createWorkflowTask("storedTask0"); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflowWithEphemeralAndStoredTasks"; - StartWorkflowRequest workflowRequest = new StartWorkflowRequest() - .withName(workflowExecutionName) - .withWorkflowDef(workflowDefinition); - - String workflowId = workflowClient.startWorkflow(workflowRequest); - assertNotNull(workflowId); - - Workflow workflow = workflowClient.getWorkflow(workflowId, true); - WorkflowDef ephemeralWorkflow = workflow.getWorkflowDefinition(); - assertNotNull(ephemeralWorkflow); - assertEquals(workflowDefinition, ephemeralWorkflow); - - TaskDef storedTaskDefinition = metadataClient.getTaskDef("storedTask0"); - List tasks = ephemeralWorkflow.getTasks(); - assertEquals(2, tasks.size()); - assertEquals(workflowTask1, tasks.get(0)); - assertEquals(storedTaskDefinition, tasks.get(1).getTaskDefinition()); - - } - - @Override - protected void registerTaskDefinitions(List taskDefinitionList) { - metadataClient.registerTaskDefs(taskDefinitionList); - } } From 41b46e13c2233aa57957bef3da9d8bb197d379e4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Wed, 5 Sep 2018 22:01:13 -0700 Subject: [PATCH 161/163] Grouped abstract methods for end to end tests --- .../tests/integration/AbstractEndToEndTest.java | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java index 600b7df9ff..6e7b1c9c69 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -110,13 +110,6 @@ public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception } - protected abstract String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition); - - protected abstract Workflow getWorkflow(String workflowId, boolean includeTasks); - - protected abstract TaskDef getTaskDefinition(String taskName); - - protected WorkflowTask createWorkflowTask(String name) { WorkflowTask workflowTask = new WorkflowTask(); workflowTask.setName(name); @@ -162,5 +155,11 @@ private String getDefaultDescription(String nameResource) { return nameResource + " " + DEFAULT_DESCRIPTION; } + protected abstract String startWorkflow(String workflowExecutionName, WorkflowDef workflowDefinition); + + protected abstract Workflow getWorkflow(String workflowId, boolean includeTasks); + + protected abstract TaskDef getTaskDefinition(String taskName); + protected abstract void registerTaskDefinitions(List taskDefinitionList); } From f6c742831d390e94821edd1a35f462d77b7475a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jos=C3=A9=20David=20Baena?= Date: Thu, 6 Sep 2018 10:41:59 -0700 Subject: [PATCH 162/163] Go client gogrpc generation based on latest code adding missing proto field feature --- .../conductor/grpc/events/event_service.pb.go | 52 ++--- .../grpc/metadata/metadata_service.pb.go | 52 ++--- .../gogrpc/conductor/grpc/search/search.pb.go | 16 +- .../conductor/grpc/tasks/task_service.pb.go | 130 +++++------ .../grpc/workflows/workflow_service.pb.go | 122 +++++----- .../conductor/model/dynamicforkjointask.pb.go | 16 +- .../model/dynamicforkjointasklist.pb.go | 8 +- .../conductor/model/eventexecution.pb.go | 24 +- .../gogrpc/conductor/model/eventhandler.pb.go | 52 ++--- client/gogrpc/conductor/model/polldata.pb.go | 14 +- .../model/rerunworkflowrequest.pb.go | 16 +- .../conductor/model/skiptaskrequest.pb.go | 14 +- .../model/startworkflowrequest.pb.go | 78 ++++--- .../conductor/model/subworkflowparams.pb.go | 49 ++-- client/gogrpc/conductor/model/task.pb.go | 205 +++++++++-------- client/gogrpc/conductor/model/taskdef.pb.go | 120 +++++----- .../gogrpc/conductor/model/taskexeclog.pb.go | 12 +- .../gogrpc/conductor/model/taskresult.pb.go | 24 +- .../gogrpc/conductor/model/tasksummary.pb.go | 38 ++-- client/gogrpc/conductor/model/workflow.pb.go | 143 ++++++------ .../gogrpc/conductor/model/workflowdef.pb.go | 24 +- .../conductor/model/workflowsummary.pb.go | 34 +-- .../gogrpc/conductor/model/workflowtask.pb.go | 210 ++++++++---------- 23 files changed, 727 insertions(+), 726 deletions(-) diff --git a/client/gogrpc/conductor/grpc/events/event_service.pb.go b/client/gogrpc/conductor/grpc/events/event_service.pb.go index 3283eca7f0..46b19b0231 100644 --- a/client/gogrpc/conductor/grpc/events/event_service.pb.go +++ b/client/gogrpc/conductor/grpc/events/event_service.pb.go @@ -25,7 +25,7 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type AddEventHandlerRequest struct { - Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler" json:"handler,omitempty"` + Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler,proto3" json:"handler,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -35,7 +35,7 @@ func (m *AddEventHandlerRequest) Reset() { *m = AddEventHandlerRequest{} func (m *AddEventHandlerRequest) String() string { return proto.CompactTextString(m) } func (*AddEventHandlerRequest) ProtoMessage() {} func (*AddEventHandlerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{0} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{0} } func (m *AddEventHandlerRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddEventHandlerRequest.Unmarshal(m, b) @@ -72,7 +72,7 @@ func (m *AddEventHandlerResponse) Reset() { *m = AddEventHandlerResponse func (m *AddEventHandlerResponse) String() string { return proto.CompactTextString(m) } func (*AddEventHandlerResponse) ProtoMessage() {} func (*AddEventHandlerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{1} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{1} } func (m *AddEventHandlerResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddEventHandlerResponse.Unmarshal(m, b) @@ -93,7 +93,7 @@ func (m *AddEventHandlerResponse) XXX_DiscardUnknown() { var xxx_messageInfo_AddEventHandlerResponse proto.InternalMessageInfo type UpdateEventHandlerRequest struct { - Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler" json:"handler,omitempty"` + Handler *model.EventHandler `protobuf:"bytes,1,opt,name=handler,proto3" json:"handler,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -103,7 +103,7 @@ func (m *UpdateEventHandlerRequest) Reset() { *m = UpdateEventHandlerReq func (m *UpdateEventHandlerRequest) String() string { return proto.CompactTextString(m) } func (*UpdateEventHandlerRequest) ProtoMessage() {} func (*UpdateEventHandlerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{2} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{2} } func (m *UpdateEventHandlerRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateEventHandlerRequest.Unmarshal(m, b) @@ -140,7 +140,7 @@ func (m *UpdateEventHandlerResponse) Reset() { *m = UpdateEventHandlerRe func (m *UpdateEventHandlerResponse) String() string { return proto.CompactTextString(m) } func (*UpdateEventHandlerResponse) ProtoMessage() {} func (*UpdateEventHandlerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{3} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{3} } func (m *UpdateEventHandlerResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateEventHandlerResponse.Unmarshal(m, b) @@ -161,7 +161,7 @@ func (m *UpdateEventHandlerResponse) XXX_DiscardUnknown() { var xxx_messageInfo_UpdateEventHandlerResponse proto.InternalMessageInfo type RemoveEventHandlerRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -171,7 +171,7 @@ func (m *RemoveEventHandlerRequest) Reset() { *m = RemoveEventHandlerReq func (m *RemoveEventHandlerRequest) String() string { return proto.CompactTextString(m) } func (*RemoveEventHandlerRequest) ProtoMessage() {} func (*RemoveEventHandlerRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{4} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{4} } func (m *RemoveEventHandlerRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveEventHandlerRequest.Unmarshal(m, b) @@ -208,7 +208,7 @@ func (m *RemoveEventHandlerResponse) Reset() { *m = RemoveEventHandlerRe func (m *RemoveEventHandlerResponse) String() string { return proto.CompactTextString(m) } func (*RemoveEventHandlerResponse) ProtoMessage() {} func (*RemoveEventHandlerResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{5} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{5} } func (m *RemoveEventHandlerResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveEventHandlerResponse.Unmarshal(m, b) @@ -238,7 +238,7 @@ func (m *GetEventHandlersRequest) Reset() { *m = GetEventHandlersRequest func (m *GetEventHandlersRequest) String() string { return proto.CompactTextString(m) } func (*GetEventHandlersRequest) ProtoMessage() {} func (*GetEventHandlersRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{6} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{6} } func (m *GetEventHandlersRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetEventHandlersRequest.Unmarshal(m, b) @@ -259,8 +259,8 @@ func (m *GetEventHandlersRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetEventHandlersRequest proto.InternalMessageInfo type GetEventHandlersForEventRequest struct { - Event string `protobuf:"bytes,1,opt,name=event" json:"event,omitempty"` - ActiveOnly bool `protobuf:"varint,2,opt,name=active_only,json=activeOnly" json:"active_only,omitempty"` + Event string `protobuf:"bytes,1,opt,name=event,proto3" json:"event,omitempty"` + ActiveOnly bool `protobuf:"varint,2,opt,name=active_only,json=activeOnly,proto3" json:"active_only,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -270,7 +270,7 @@ func (m *GetEventHandlersForEventRequest) Reset() { *m = GetEventHandler func (m *GetEventHandlersForEventRequest) String() string { return proto.CompactTextString(m) } func (*GetEventHandlersForEventRequest) ProtoMessage() {} func (*GetEventHandlersForEventRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{7} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{7} } func (m *GetEventHandlersForEventRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetEventHandlersForEventRequest.Unmarshal(m, b) @@ -314,7 +314,7 @@ func (m *GetQueuesRequest) Reset() { *m = GetQueuesRequest{} } func (m *GetQueuesRequest) String() string { return proto.CompactTextString(m) } func (*GetQueuesRequest) ProtoMessage() {} func (*GetQueuesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{8} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{8} } func (m *GetQueuesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueuesRequest.Unmarshal(m, b) @@ -335,7 +335,7 @@ func (m *GetQueuesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetQueuesRequest proto.InternalMessageInfo type GetQueuesResponse struct { - EventToQueueUri map[string]string `protobuf:"bytes,1,rep,name=event_to_queue_uri,json=eventToQueueUri" json:"event_to_queue_uri,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + EventToQueueUri map[string]string `protobuf:"bytes,1,rep,name=event_to_queue_uri,json=eventToQueueUri,proto3" json:"event_to_queue_uri,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -345,7 +345,7 @@ func (m *GetQueuesResponse) Reset() { *m = GetQueuesResponse{} } func (m *GetQueuesResponse) String() string { return proto.CompactTextString(m) } func (*GetQueuesResponse) ProtoMessage() {} func (*GetQueuesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{9} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{9} } func (m *GetQueuesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueuesResponse.Unmarshal(m, b) @@ -382,7 +382,7 @@ func (m *GetQueueSizesRequest) Reset() { *m = GetQueueSizesRequest{} } func (m *GetQueueSizesRequest) String() string { return proto.CompactTextString(m) } func (*GetQueueSizesRequest) ProtoMessage() {} func (*GetQueueSizesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{10} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{10} } func (m *GetQueueSizesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueueSizesRequest.Unmarshal(m, b) @@ -403,7 +403,7 @@ func (m *GetQueueSizesRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetQueueSizesRequest proto.InternalMessageInfo type GetQueueSizesResponse struct { - EventToQueueInfo map[string]*GetQueueSizesResponse_QueueInfo `protobuf:"bytes,2,rep,name=event_to_queue_info,json=eventToQueueInfo" json:"event_to_queue_info,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + EventToQueueInfo map[string]*GetQueueSizesResponse_QueueInfo `protobuf:"bytes,2,rep,name=event_to_queue_info,json=eventToQueueInfo,proto3" json:"event_to_queue_info,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -413,7 +413,7 @@ func (m *GetQueueSizesResponse) Reset() { *m = GetQueueSizesResponse{} } func (m *GetQueueSizesResponse) String() string { return proto.CompactTextString(m) } func (*GetQueueSizesResponse) ProtoMessage() {} func (*GetQueueSizesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{11} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{11} } func (m *GetQueueSizesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueueSizesResponse.Unmarshal(m, b) @@ -441,7 +441,7 @@ func (m *GetQueueSizesResponse) GetEventToQueueInfo() map[string]*GetQueueSizesR } type GetQueueSizesResponse_QueueInfo struct { - QueueSizes map[string]int64 `protobuf:"bytes,1,rep,name=queue_sizes,json=queueSizes" json:"queue_sizes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + QueueSizes map[string]int64 `protobuf:"bytes,1,rep,name=queue_sizes,json=queueSizes,proto3" json:"queue_sizes,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -451,7 +451,7 @@ func (m *GetQueueSizesResponse_QueueInfo) Reset() { *m = GetQueueSizesRe func (m *GetQueueSizesResponse_QueueInfo) String() string { return proto.CompactTextString(m) } func (*GetQueueSizesResponse_QueueInfo) ProtoMessage() {} func (*GetQueueSizesResponse_QueueInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{11, 0} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{11, 0} } func (m *GetQueueSizesResponse_QueueInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueueSizesResponse_QueueInfo.Unmarshal(m, b) @@ -488,7 +488,7 @@ func (m *GetQueueProvidersRequest) Reset() { *m = GetQueueProvidersReque func (m *GetQueueProvidersRequest) String() string { return proto.CompactTextString(m) } func (*GetQueueProvidersRequest) ProtoMessage() {} func (*GetQueueProvidersRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{12} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{12} } func (m *GetQueueProvidersRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueueProvidersRequest.Unmarshal(m, b) @@ -509,7 +509,7 @@ func (m *GetQueueProvidersRequest) XXX_DiscardUnknown() { var xxx_messageInfo_GetQueueProvidersRequest proto.InternalMessageInfo type GetQueueProvidersResponse struct { - Providers []string `protobuf:"bytes,1,rep,name=providers" json:"providers,omitempty"` + Providers []string `protobuf:"bytes,1,rep,name=providers,proto3" json:"providers,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -519,7 +519,7 @@ func (m *GetQueueProvidersResponse) Reset() { *m = GetQueueProvidersResp func (m *GetQueueProvidersResponse) String() string { return proto.CompactTextString(m) } func (*GetQueueProvidersResponse) ProtoMessage() {} func (*GetQueueProvidersResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_event_service_913a1fde08d4f277, []int{13} + return fileDescriptor_event_service_30d3c8d74d6840aa, []int{13} } func (m *GetQueueProvidersResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetQueueProvidersResponse.Unmarshal(m, b) @@ -940,10 +940,10 @@ var _EventService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("grpc/event_service.proto", fileDescriptor_event_service_913a1fde08d4f277) + proto.RegisterFile("grpc/event_service.proto", fileDescriptor_event_service_30d3c8d74d6840aa) } -var fileDescriptor_event_service_913a1fde08d4f277 = []byte{ +var fileDescriptor_event_service_30d3c8d74d6840aa = []byte{ // 687 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x56, 0x5d, 0x6f, 0xd3, 0x3c, 0x18, 0x55, 0xd6, 0xf7, 0x65, 0xf4, 0x29, 0xb0, 0x61, 0xf6, 0x91, 0x5a, 0x43, 0x9b, 0x7a, 0x43, diff --git a/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go b/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go index 95d4c3a980..0d431bc56b 100644 --- a/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go +++ b/client/gogrpc/conductor/grpc/metadata/metadata_service.pb.go @@ -25,7 +25,7 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type CreateWorkflowRequest struct { - Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -35,7 +35,7 @@ func (m *CreateWorkflowRequest) Reset() { *m = CreateWorkflowRequest{} } func (m *CreateWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*CreateWorkflowRequest) ProtoMessage() {} func (*CreateWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{0} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{0} } func (m *CreateWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateWorkflowRequest.Unmarshal(m, b) @@ -72,7 +72,7 @@ func (m *CreateWorkflowResponse) Reset() { *m = CreateWorkflowResponse{} func (m *CreateWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*CreateWorkflowResponse) ProtoMessage() {} func (*CreateWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{1} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{1} } func (m *CreateWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateWorkflowResponse.Unmarshal(m, b) @@ -93,7 +93,7 @@ func (m *CreateWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_CreateWorkflowResponse proto.InternalMessageInfo type UpdateWorkflowsRequest struct { - Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` + Defs []*model.WorkflowDef `protobuf:"bytes,1,rep,name=defs,proto3" json:"defs,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -103,7 +103,7 @@ func (m *UpdateWorkflowsRequest) Reset() { *m = UpdateWorkflowsRequest{} func (m *UpdateWorkflowsRequest) String() string { return proto.CompactTextString(m) } func (*UpdateWorkflowsRequest) ProtoMessage() {} func (*UpdateWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{2} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{2} } func (m *UpdateWorkflowsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateWorkflowsRequest.Unmarshal(m, b) @@ -140,7 +140,7 @@ func (m *UpdateWorkflowsResponse) Reset() { *m = UpdateWorkflowsResponse func (m *UpdateWorkflowsResponse) String() string { return proto.CompactTextString(m) } func (*UpdateWorkflowsResponse) ProtoMessage() {} func (*UpdateWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{3} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{3} } func (m *UpdateWorkflowsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateWorkflowsResponse.Unmarshal(m, b) @@ -161,8 +161,8 @@ func (m *UpdateWorkflowsResponse) XXX_DiscardUnknown() { var xxx_messageInfo_UpdateWorkflowsResponse proto.InternalMessageInfo type GetWorkflowRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -172,7 +172,7 @@ func (m *GetWorkflowRequest) Reset() { *m = GetWorkflowRequest{} } func (m *GetWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*GetWorkflowRequest) ProtoMessage() {} func (*GetWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{4} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{4} } func (m *GetWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowRequest.Unmarshal(m, b) @@ -207,7 +207,7 @@ func (m *GetWorkflowRequest) GetVersion() int32 { } type GetWorkflowResponse struct { - Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + Workflow *model.WorkflowDef `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -217,7 +217,7 @@ func (m *GetWorkflowResponse) Reset() { *m = GetWorkflowResponse{} } func (m *GetWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*GetWorkflowResponse) ProtoMessage() {} func (*GetWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{5} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{5} } func (m *GetWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowResponse.Unmarshal(m, b) @@ -245,7 +245,7 @@ func (m *GetWorkflowResponse) GetWorkflow() *model.WorkflowDef { } type CreateTasksRequest struct { - Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs" json:"defs,omitempty"` + Defs []*model.TaskDef `protobuf:"bytes,1,rep,name=defs,proto3" json:"defs,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -255,7 +255,7 @@ func (m *CreateTasksRequest) Reset() { *m = CreateTasksRequest{} } func (m *CreateTasksRequest) String() string { return proto.CompactTextString(m) } func (*CreateTasksRequest) ProtoMessage() {} func (*CreateTasksRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{6} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{6} } func (m *CreateTasksRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateTasksRequest.Unmarshal(m, b) @@ -292,7 +292,7 @@ func (m *CreateTasksResponse) Reset() { *m = CreateTasksResponse{} } func (m *CreateTasksResponse) String() string { return proto.CompactTextString(m) } func (*CreateTasksResponse) ProtoMessage() {} func (*CreateTasksResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{7} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{7} } func (m *CreateTasksResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_CreateTasksResponse.Unmarshal(m, b) @@ -313,7 +313,7 @@ func (m *CreateTasksResponse) XXX_DiscardUnknown() { var xxx_messageInfo_CreateTasksResponse proto.InternalMessageInfo type UpdateTaskRequest struct { - Task *model.TaskDef `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + Task *model.TaskDef `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -323,7 +323,7 @@ func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } func (*UpdateTaskRequest) ProtoMessage() {} func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{8} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{8} } func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) @@ -360,7 +360,7 @@ func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } func (*UpdateTaskResponse) ProtoMessage() {} func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{9} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{9} } func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) @@ -381,7 +381,7 @@ func (m *UpdateTaskResponse) XXX_DiscardUnknown() { var xxx_messageInfo_UpdateTaskResponse proto.InternalMessageInfo type GetTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -391,7 +391,7 @@ func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } func (*GetTaskRequest) ProtoMessage() {} func (*GetTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{10} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{10} } func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) @@ -419,7 +419,7 @@ func (m *GetTaskRequest) GetTaskType() string { } type GetTaskResponse struct { - Task *model.TaskDef `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + Task *model.TaskDef `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -429,7 +429,7 @@ func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } func (*GetTaskResponse) ProtoMessage() {} func (*GetTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{11} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{11} } func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) @@ -457,7 +457,7 @@ func (m *GetTaskResponse) GetTask() *model.TaskDef { } type DeleteTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -467,7 +467,7 @@ func (m *DeleteTaskRequest) Reset() { *m = DeleteTaskRequest{} } func (m *DeleteTaskRequest) String() string { return proto.CompactTextString(m) } func (*DeleteTaskRequest) ProtoMessage() {} func (*DeleteTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{12} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{12} } func (m *DeleteTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeleteTaskRequest.Unmarshal(m, b) @@ -504,7 +504,7 @@ func (m *DeleteTaskResponse) Reset() { *m = DeleteTaskResponse{} } func (m *DeleteTaskResponse) String() string { return proto.CompactTextString(m) } func (*DeleteTaskResponse) ProtoMessage() {} func (*DeleteTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_metadata_service_4778cc9d199e5aef, []int{13} + return fileDescriptor_metadata_service_aad2a84548370e06, []int{13} } func (m *DeleteTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DeleteTaskResponse.Unmarshal(m, b) @@ -826,10 +826,10 @@ var _MetadataService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_4778cc9d199e5aef) + proto.RegisterFile("grpc/metadata_service.proto", fileDescriptor_metadata_service_aad2a84548370e06) } -var fileDescriptor_metadata_service_4778cc9d199e5aef = []byte{ +var fileDescriptor_metadata_service_aad2a84548370e06 = []byte{ // 526 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x55, 0xdf, 0x6b, 0xd3, 0x50, 0x18, 0xa5, 0xba, 0xb9, 0xed, 0x1b, 0xac, 0xf4, 0x76, 0x5b, 0x63, 0xe6, 0x43, 0xc9, 0x8b, 0xc5, diff --git a/client/gogrpc/conductor/grpc/search/search.pb.go b/client/gogrpc/conductor/grpc/search/search.pb.go index 1c7ade8fd2..cdf1557c30 100644 --- a/client/gogrpc/conductor/grpc/search/search.pb.go +++ b/client/gogrpc/conductor/grpc/search/search.pb.go @@ -19,11 +19,11 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type Request struct { - Start int32 `protobuf:"varint,1,opt,name=start" json:"start,omitempty"` - Size int32 `protobuf:"varint,2,opt,name=size" json:"size,omitempty"` - Sort string `protobuf:"bytes,3,opt,name=sort" json:"sort,omitempty"` - FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText" json:"free_text,omitempty"` - Query string `protobuf:"bytes,5,opt,name=query" json:"query,omitempty"` + Start int32 `protobuf:"varint,1,opt,name=start,proto3" json:"start,omitempty"` + Size int32 `protobuf:"varint,2,opt,name=size,proto3" json:"size,omitempty"` + Sort string `protobuf:"bytes,3,opt,name=sort,proto3" json:"sort,omitempty"` + FreeText string `protobuf:"bytes,4,opt,name=free_text,json=freeText,proto3" json:"free_text,omitempty"` + Query string `protobuf:"bytes,5,opt,name=query,proto3" json:"query,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -33,7 +33,7 @@ func (m *Request) Reset() { *m = Request{} } func (m *Request) String() string { return proto.CompactTextString(m) } func (*Request) ProtoMessage() {} func (*Request) Descriptor() ([]byte, []int) { - return fileDescriptor_search_855a611014aa2143, []int{0} + return fileDescriptor_search_a50e745244da0fb1, []int{0} } func (m *Request) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Request.Unmarshal(m, b) @@ -92,9 +92,9 @@ func init() { proto.RegisterType((*Request)(nil), "conductor.grpc.search.Request") } -func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_855a611014aa2143) } +func init() { proto.RegisterFile("grpc/search.proto", fileDescriptor_search_a50e745244da0fb1) } -var fileDescriptor_search_855a611014aa2143 = []byte{ +var fileDescriptor_search_a50e745244da0fb1 = []byte{ // 212 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x54, 0x90, 0xc1, 0x4a, 0xc4, 0x30, 0x10, 0x86, 0xa9, 0x6e, 0x75, 0x37, 0x37, 0x83, 0x42, 0xd0, 0xcb, 0xe2, 0x69, 0x4f, 0xc9, 0xc1, diff --git a/client/gogrpc/conductor/grpc/tasks/task_service.pb.go b/client/gogrpc/conductor/grpc/tasks/task_service.pb.go index 6c74f322a2..f6a6dbc0dd 100644 --- a/client/gogrpc/conductor/grpc/tasks/task_service.pb.go +++ b/client/gogrpc/conductor/grpc/tasks/task_service.pb.go @@ -25,9 +25,9 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type PollRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -37,7 +37,7 @@ func (m *PollRequest) Reset() { *m = PollRequest{} } func (m *PollRequest) String() string { return proto.CompactTextString(m) } func (*PollRequest) ProtoMessage() {} func (*PollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{0} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{0} } func (m *PollRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PollRequest.Unmarshal(m, b) @@ -79,7 +79,7 @@ func (m *PollRequest) GetDomain() string { } type PollResponse struct { - Task *model.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -89,7 +89,7 @@ func (m *PollResponse) Reset() { *m = PollResponse{} } func (m *PollResponse) String() string { return proto.CompactTextString(m) } func (*PollResponse) ProtoMessage() {} func (*PollResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{1} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{1} } func (m *PollResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PollResponse.Unmarshal(m, b) @@ -117,11 +117,11 @@ func (m *PollResponse) GetTask() *model.Task { } type BatchPollRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - Domain string `protobuf:"bytes,3,opt,name=domain" json:"domain,omitempty"` - Count int32 `protobuf:"varint,4,opt,name=count" json:"count,omitempty"` - Timeout int32 `protobuf:"varint,5,opt,name=timeout" json:"timeout,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + Domain string `protobuf:"bytes,3,opt,name=domain,proto3" json:"domain,omitempty"` + Count int32 `protobuf:"varint,4,opt,name=count,proto3" json:"count,omitempty"` + Timeout int32 `protobuf:"varint,5,opt,name=timeout,proto3" json:"timeout,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -131,7 +131,7 @@ func (m *BatchPollRequest) Reset() { *m = BatchPollRequest{} } func (m *BatchPollRequest) String() string { return proto.CompactTextString(m) } func (*BatchPollRequest) ProtoMessage() {} func (*BatchPollRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{2} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{2} } func (m *BatchPollRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_BatchPollRequest.Unmarshal(m, b) @@ -187,9 +187,9 @@ func (m *BatchPollRequest) GetTimeout() int32 { } type TasksInProgressRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey" json:"start_key,omitempty"` - Count int32 `protobuf:"varint,3,opt,name=count" json:"count,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + StartKey string `protobuf:"bytes,2,opt,name=start_key,json=startKey,proto3" json:"start_key,omitempty"` + Count int32 `protobuf:"varint,3,opt,name=count,proto3" json:"count,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -199,7 +199,7 @@ func (m *TasksInProgressRequest) Reset() { *m = TasksInProgressRequest{} func (m *TasksInProgressRequest) String() string { return proto.CompactTextString(m) } func (*TasksInProgressRequest) ProtoMessage() {} func (*TasksInProgressRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{3} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{3} } func (m *TasksInProgressRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TasksInProgressRequest.Unmarshal(m, b) @@ -241,7 +241,7 @@ func (m *TasksInProgressRequest) GetCount() int32 { } type TasksInProgressResponse struct { - Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty"` + Tasks []*model.Task `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -251,7 +251,7 @@ func (m *TasksInProgressResponse) Reset() { *m = TasksInProgressResponse func (m *TasksInProgressResponse) String() string { return proto.CompactTextString(m) } func (*TasksInProgressResponse) ProtoMessage() {} func (*TasksInProgressResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{4} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{4} } func (m *TasksInProgressResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TasksInProgressResponse.Unmarshal(m, b) @@ -279,8 +279,8 @@ func (m *TasksInProgressResponse) GetTasks() []*model.Task { } type PendingTaskRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName" json:"task_ref_name,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName,proto3" json:"task_ref_name,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -290,7 +290,7 @@ func (m *PendingTaskRequest) Reset() { *m = PendingTaskRequest{} } func (m *PendingTaskRequest) String() string { return proto.CompactTextString(m) } func (*PendingTaskRequest) ProtoMessage() {} func (*PendingTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{5} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{5} } func (m *PendingTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PendingTaskRequest.Unmarshal(m, b) @@ -325,7 +325,7 @@ func (m *PendingTaskRequest) GetTaskRefName() string { } type PendingTaskResponse struct { - Task *model.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -335,7 +335,7 @@ func (m *PendingTaskResponse) Reset() { *m = PendingTaskResponse{} } func (m *PendingTaskResponse) String() string { return proto.CompactTextString(m) } func (*PendingTaskResponse) ProtoMessage() {} func (*PendingTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{6} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{6} } func (m *PendingTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PendingTaskResponse.Unmarshal(m, b) @@ -363,7 +363,7 @@ func (m *PendingTaskResponse) GetTask() *model.Task { } type UpdateTaskRequest struct { - Result *model.TaskResult `protobuf:"bytes,1,opt,name=result" json:"result,omitempty"` + Result *model.TaskResult `protobuf:"bytes,1,opt,name=result,proto3" json:"result,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -373,7 +373,7 @@ func (m *UpdateTaskRequest) Reset() { *m = UpdateTaskRequest{} } func (m *UpdateTaskRequest) String() string { return proto.CompactTextString(m) } func (*UpdateTaskRequest) ProtoMessage() {} func (*UpdateTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{7} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{7} } func (m *UpdateTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateTaskRequest.Unmarshal(m, b) @@ -401,7 +401,7 @@ func (m *UpdateTaskRequest) GetResult() *model.TaskResult { } type UpdateTaskResponse struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -411,7 +411,7 @@ func (m *UpdateTaskResponse) Reset() { *m = UpdateTaskResponse{} } func (m *UpdateTaskResponse) String() string { return proto.CompactTextString(m) } func (*UpdateTaskResponse) ProtoMessage() {} func (*UpdateTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{8} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{8} } func (m *UpdateTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_UpdateTaskResponse.Unmarshal(m, b) @@ -439,8 +439,8 @@ func (m *UpdateTaskResponse) GetTaskId() string { } type AckTaskRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + WorkerId string `protobuf:"bytes,2,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -450,7 +450,7 @@ func (m *AckTaskRequest) Reset() { *m = AckTaskRequest{} } func (m *AckTaskRequest) String() string { return proto.CompactTextString(m) } func (*AckTaskRequest) ProtoMessage() {} func (*AckTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{9} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{9} } func (m *AckTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AckTaskRequest.Unmarshal(m, b) @@ -485,7 +485,7 @@ func (m *AckTaskRequest) GetWorkerId() string { } type AckTaskResponse struct { - Ack bool `protobuf:"varint,1,opt,name=ack" json:"ack,omitempty"` + Ack bool `protobuf:"varint,1,opt,name=ack,proto3" json:"ack,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -495,7 +495,7 @@ func (m *AckTaskResponse) Reset() { *m = AckTaskResponse{} } func (m *AckTaskResponse) String() string { return proto.CompactTextString(m) } func (*AckTaskResponse) ProtoMessage() {} func (*AckTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{10} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{10} } func (m *AckTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AckTaskResponse.Unmarshal(m, b) @@ -523,8 +523,8 @@ func (m *AckTaskResponse) GetAck() bool { } type AddLogRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - Log string `protobuf:"bytes,2,opt,name=log" json:"log,omitempty"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + Log string `protobuf:"bytes,2,opt,name=log,proto3" json:"log,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -534,7 +534,7 @@ func (m *AddLogRequest) Reset() { *m = AddLogRequest{} } func (m *AddLogRequest) String() string { return proto.CompactTextString(m) } func (*AddLogRequest) ProtoMessage() {} func (*AddLogRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{11} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{11} } func (m *AddLogRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddLogRequest.Unmarshal(m, b) @@ -578,7 +578,7 @@ func (m *AddLogResponse) Reset() { *m = AddLogResponse{} } func (m *AddLogResponse) String() string { return proto.CompactTextString(m) } func (*AddLogResponse) ProtoMessage() {} func (*AddLogResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{12} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{12} } func (m *AddLogResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_AddLogResponse.Unmarshal(m, b) @@ -599,7 +599,7 @@ func (m *AddLogResponse) XXX_DiscardUnknown() { var xxx_messageInfo_AddLogResponse proto.InternalMessageInfo type GetTaskLogsRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -609,7 +609,7 @@ func (m *GetTaskLogsRequest) Reset() { *m = GetTaskLogsRequest{} } func (m *GetTaskLogsRequest) String() string { return proto.CompactTextString(m) } func (*GetTaskLogsRequest) ProtoMessage() {} func (*GetTaskLogsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{13} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{13} } func (m *GetTaskLogsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTaskLogsRequest.Unmarshal(m, b) @@ -637,7 +637,7 @@ func (m *GetTaskLogsRequest) GetTaskId() string { } type GetTaskLogsResponse struct { - Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs" json:"logs,omitempty"` + Logs []*model.TaskExecLog `protobuf:"bytes,1,rep,name=logs,proto3" json:"logs,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -647,7 +647,7 @@ func (m *GetTaskLogsResponse) Reset() { *m = GetTaskLogsResponse{} } func (m *GetTaskLogsResponse) String() string { return proto.CompactTextString(m) } func (*GetTaskLogsResponse) ProtoMessage() {} func (*GetTaskLogsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{14} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{14} } func (m *GetTaskLogsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTaskLogsResponse.Unmarshal(m, b) @@ -675,7 +675,7 @@ func (m *GetTaskLogsResponse) GetLogs() []*model.TaskExecLog { } type GetTaskRequest struct { - TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + TaskId string `protobuf:"bytes,1,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -685,7 +685,7 @@ func (m *GetTaskRequest) Reset() { *m = GetTaskRequest{} } func (m *GetTaskRequest) String() string { return proto.CompactTextString(m) } func (*GetTaskRequest) ProtoMessage() {} func (*GetTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{15} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{15} } func (m *GetTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTaskRequest.Unmarshal(m, b) @@ -713,7 +713,7 @@ func (m *GetTaskRequest) GetTaskId() string { } type GetTaskResponse struct { - Task *model.Task `protobuf:"bytes,1,opt,name=task" json:"task,omitempty"` + Task *model.Task `protobuf:"bytes,1,opt,name=task,proto3" json:"task,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -723,7 +723,7 @@ func (m *GetTaskResponse) Reset() { *m = GetTaskResponse{} } func (m *GetTaskResponse) String() string { return proto.CompactTextString(m) } func (*GetTaskResponse) ProtoMessage() {} func (*GetTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{16} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{16} } func (m *GetTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetTaskResponse.Unmarshal(m, b) @@ -751,8 +751,8 @@ func (m *GetTaskResponse) GetTask() *model.Task { } type RemoveTaskRequest struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -762,7 +762,7 @@ func (m *RemoveTaskRequest) Reset() { *m = RemoveTaskRequest{} } func (m *RemoveTaskRequest) String() string { return proto.CompactTextString(m) } func (*RemoveTaskRequest) ProtoMessage() {} func (*RemoveTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{17} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{17} } func (m *RemoveTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveTaskRequest.Unmarshal(m, b) @@ -806,7 +806,7 @@ func (m *RemoveTaskResponse) Reset() { *m = RemoveTaskResponse{} } func (m *RemoveTaskResponse) String() string { return proto.CompactTextString(m) } func (*RemoveTaskResponse) ProtoMessage() {} func (*RemoveTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{18} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{18} } func (m *RemoveTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveTaskResponse.Unmarshal(m, b) @@ -827,7 +827,7 @@ func (m *RemoveTaskResponse) XXX_DiscardUnknown() { var xxx_messageInfo_RemoveTaskResponse proto.InternalMessageInfo type QueueSizesRequest struct { - TaskTypes []string `protobuf:"bytes,1,rep,name=task_types,json=taskTypes" json:"task_types,omitempty"` + TaskTypes []string `protobuf:"bytes,1,rep,name=task_types,json=taskTypes,proto3" json:"task_types,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -837,7 +837,7 @@ func (m *QueueSizesRequest) Reset() { *m = QueueSizesRequest{} } func (m *QueueSizesRequest) String() string { return proto.CompactTextString(m) } func (*QueueSizesRequest) ProtoMessage() {} func (*QueueSizesRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{19} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{19} } func (m *QueueSizesRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueSizesRequest.Unmarshal(m, b) @@ -865,7 +865,7 @@ func (m *QueueSizesRequest) GetTaskTypes() []string { } type QueueSizesResponse struct { - QueueForTask map[string]int32 `protobuf:"bytes,1,rep,name=queue_for_task,json=queueForTask" json:"queue_for_task,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + QueueForTask map[string]int32 `protobuf:"bytes,1,rep,name=queue_for_task,json=queueForTask,proto3" json:"queue_for_task,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -875,7 +875,7 @@ func (m *QueueSizesResponse) Reset() { *m = QueueSizesResponse{} } func (m *QueueSizesResponse) String() string { return proto.CompactTextString(m) } func (*QueueSizesResponse) ProtoMessage() {} func (*QueueSizesResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{20} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{20} } func (m *QueueSizesResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueSizesResponse.Unmarshal(m, b) @@ -912,7 +912,7 @@ func (m *QueueInfoRequest) Reset() { *m = QueueInfoRequest{} } func (m *QueueInfoRequest) String() string { return proto.CompactTextString(m) } func (*QueueInfoRequest) ProtoMessage() {} func (*QueueInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{21} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{21} } func (m *QueueInfoRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueInfoRequest.Unmarshal(m, b) @@ -933,7 +933,7 @@ func (m *QueueInfoRequest) XXX_DiscardUnknown() { var xxx_messageInfo_QueueInfoRequest proto.InternalMessageInfo type QueueInfoResponse struct { - Queues map[string]int64 `protobuf:"bytes,1,rep,name=queues" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"varint,2,opt,name=value"` + Queues map[string]int64 `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -943,7 +943,7 @@ func (m *QueueInfoResponse) Reset() { *m = QueueInfoResponse{} } func (m *QueueInfoResponse) String() string { return proto.CompactTextString(m) } func (*QueueInfoResponse) ProtoMessage() {} func (*QueueInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{22} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{22} } func (m *QueueInfoResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueInfoResponse.Unmarshal(m, b) @@ -980,7 +980,7 @@ func (m *QueueAllInfoRequest) Reset() { *m = QueueAllInfoRequest{} } func (m *QueueAllInfoRequest) String() string { return proto.CompactTextString(m) } func (*QueueAllInfoRequest) ProtoMessage() {} func (*QueueAllInfoRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{23} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{23} } func (m *QueueAllInfoRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueAllInfoRequest.Unmarshal(m, b) @@ -1001,7 +1001,7 @@ func (m *QueueAllInfoRequest) XXX_DiscardUnknown() { var xxx_messageInfo_QueueAllInfoRequest proto.InternalMessageInfo type QueueAllInfoResponse struct { - Queues map[string]*QueueAllInfoResponse_QueueInfo `protobuf:"bytes,1,rep,name=queues" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Queues map[string]*QueueAllInfoResponse_QueueInfo `protobuf:"bytes,1,rep,name=queues,proto3" json:"queues,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -1011,7 +1011,7 @@ func (m *QueueAllInfoResponse) Reset() { *m = QueueAllInfoResponse{} } func (m *QueueAllInfoResponse) String() string { return proto.CompactTextString(m) } func (*QueueAllInfoResponse) ProtoMessage() {} func (*QueueAllInfoResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{24} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{24} } func (m *QueueAllInfoResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueAllInfoResponse.Unmarshal(m, b) @@ -1039,8 +1039,8 @@ func (m *QueueAllInfoResponse) GetQueues() map[string]*QueueAllInfoResponse_Queu } type QueueAllInfoResponse_ShardInfo struct { - Size int64 `protobuf:"varint,1,opt,name=size" json:"size,omitempty"` - Uacked int64 `protobuf:"varint,2,opt,name=uacked" json:"uacked,omitempty"` + Size int64 `protobuf:"varint,1,opt,name=size,proto3" json:"size,omitempty"` + Uacked int64 `protobuf:"varint,2,opt,name=uacked,proto3" json:"uacked,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -1050,7 +1050,7 @@ func (m *QueueAllInfoResponse_ShardInfo) Reset() { *m = QueueAllInfoResp func (m *QueueAllInfoResponse_ShardInfo) String() string { return proto.CompactTextString(m) } func (*QueueAllInfoResponse_ShardInfo) ProtoMessage() {} func (*QueueAllInfoResponse_ShardInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 0} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{24, 0} } func (m *QueueAllInfoResponse_ShardInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueAllInfoResponse_ShardInfo.Unmarshal(m, b) @@ -1085,7 +1085,7 @@ func (m *QueueAllInfoResponse_ShardInfo) GetUacked() int64 { } type QueueAllInfoResponse_QueueInfo struct { - Shards map[string]*QueueAllInfoResponse_ShardInfo `protobuf:"bytes,1,rep,name=shards" json:"shards,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Shards map[string]*QueueAllInfoResponse_ShardInfo `protobuf:"bytes,1,rep,name=shards,proto3" json:"shards,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -1095,7 +1095,7 @@ func (m *QueueAllInfoResponse_QueueInfo) Reset() { *m = QueueAllInfoResp func (m *QueueAllInfoResponse_QueueInfo) String() string { return proto.CompactTextString(m) } func (*QueueAllInfoResponse_QueueInfo) ProtoMessage() {} func (*QueueAllInfoResponse_QueueInfo) Descriptor() ([]byte, []int) { - return fileDescriptor_task_service_2cd893b942ad08bb, []int{24, 1} + return fileDescriptor_task_service_1133a2fd800ff6c6, []int{24, 1} } func (m *QueueAllInfoResponse_QueueInfo) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_QueueAllInfoResponse_QueueInfo.Unmarshal(m, b) @@ -1679,10 +1679,10 @@ var _TaskService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_2cd893b942ad08bb) + proto.RegisterFile("grpc/task_service.proto", fileDescriptor_task_service_1133a2fd800ff6c6) } -var fileDescriptor_task_service_2cd893b942ad08bb = []byte{ +var fileDescriptor_task_service_1133a2fd800ff6c6 = []byte{ // 1114 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x57, 0xdd, 0x72, 0xdb, 0x54, 0x10, 0x1e, 0xc5, 0x89, 0x53, 0xaf, 0x93, 0xd4, 0x39, 0xf9, 0x33, 0x2a, 0x0c, 0x41, 0x2d, 0x6d, diff --git a/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go b/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go index fd87283a0e..51abf61f5c 100644 --- a/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go +++ b/client/gogrpc/conductor/grpc/workflows/workflow_service.pb.go @@ -26,7 +26,7 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type StartWorkflowResponse struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -36,7 +36,7 @@ func (m *StartWorkflowResponse) Reset() { *m = StartWorkflowResponse{} } func (m *StartWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*StartWorkflowResponse) ProtoMessage() {} func (*StartWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{0} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{0} } func (m *StartWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StartWorkflowResponse.Unmarshal(m, b) @@ -64,10 +64,10 @@ func (m *StartWorkflowResponse) GetWorkflowId() string { } type GetWorkflowsRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed" json:"include_closed,omitempty"` - IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + CorrelationId []string `protobuf:"bytes,2,rep,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + IncludeClosed bool `protobuf:"varint,3,opt,name=include_closed,json=includeClosed,proto3" json:"include_closed,omitempty"` + IncludeTasks bool `protobuf:"varint,4,opt,name=include_tasks,json=includeTasks,proto3" json:"include_tasks,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -77,7 +77,7 @@ func (m *GetWorkflowsRequest) Reset() { *m = GetWorkflowsRequest{} } func (m *GetWorkflowsRequest) String() string { return proto.CompactTextString(m) } func (*GetWorkflowsRequest) ProtoMessage() {} func (*GetWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{1} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{1} } func (m *GetWorkflowsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowsRequest.Unmarshal(m, b) @@ -126,7 +126,7 @@ func (m *GetWorkflowsRequest) GetIncludeTasks() bool { } type GetWorkflowsResponse struct { - WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + WorkflowsById map[string]*GetWorkflowsResponse_Workflows `protobuf:"bytes,1,rep,name=workflows_by_id,json=workflowsById,proto3" json:"workflows_by_id,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -136,7 +136,7 @@ func (m *GetWorkflowsResponse) Reset() { *m = GetWorkflowsResponse{} } func (m *GetWorkflowsResponse) String() string { return proto.CompactTextString(m) } func (*GetWorkflowsResponse) ProtoMessage() {} func (*GetWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{2} } func (m *GetWorkflowsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowsResponse.Unmarshal(m, b) @@ -164,7 +164,7 @@ func (m *GetWorkflowsResponse) GetWorkflowsById() map[string]*GetWorkflowsRespon } type GetWorkflowsResponse_Workflows struct { - Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows" json:"workflows,omitempty"` + Workflows []*model.Workflow `protobuf:"bytes,1,rep,name=workflows,proto3" json:"workflows,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -174,7 +174,7 @@ func (m *GetWorkflowsResponse_Workflows) Reset() { *m = GetWorkflowsResp func (m *GetWorkflowsResponse_Workflows) String() string { return proto.CompactTextString(m) } func (*GetWorkflowsResponse_Workflows) ProtoMessage() {} func (*GetWorkflowsResponse_Workflows) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{2, 0} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{2, 0} } func (m *GetWorkflowsResponse_Workflows) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowsResponse_Workflows.Unmarshal(m, b) @@ -202,8 +202,8 @@ func (m *GetWorkflowsResponse_Workflows) GetWorkflows() []*model.Workflow { } type GetWorkflowStatusRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks" json:"include_tasks,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + IncludeTasks bool `protobuf:"varint,2,opt,name=include_tasks,json=includeTasks,proto3" json:"include_tasks,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -213,7 +213,7 @@ func (m *GetWorkflowStatusRequest) Reset() { *m = GetWorkflowStatusReque func (m *GetWorkflowStatusRequest) String() string { return proto.CompactTextString(m) } func (*GetWorkflowStatusRequest) ProtoMessage() {} func (*GetWorkflowStatusRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{3} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{3} } func (m *GetWorkflowStatusRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowStatusRequest.Unmarshal(m, b) @@ -248,7 +248,7 @@ func (m *GetWorkflowStatusRequest) GetIncludeTasks() bool { } type GetWorkflowStatusResponse struct { - Workflow *model.Workflow `protobuf:"bytes,1,opt,name=workflow" json:"workflow,omitempty"` + Workflow *model.Workflow `protobuf:"bytes,1,opt,name=workflow,proto3" json:"workflow,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -258,7 +258,7 @@ func (m *GetWorkflowStatusResponse) Reset() { *m = GetWorkflowStatusResp func (m *GetWorkflowStatusResponse) String() string { return proto.CompactTextString(m) } func (*GetWorkflowStatusResponse) ProtoMessage() {} func (*GetWorkflowStatusResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{4} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{4} } func (m *GetWorkflowStatusResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetWorkflowStatusResponse.Unmarshal(m, b) @@ -286,8 +286,8 @@ func (m *GetWorkflowStatusResponse) GetWorkflow() *model.Workflow { } type RemoveWorkflowRequest struct { - WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId" json:"workflod_id,omitempty"` - ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow" json:"archive_workflow,omitempty"` + WorkflodId string `protobuf:"bytes,1,opt,name=workflod_id,json=workflodId,proto3" json:"workflod_id,omitempty"` + ArchiveWorkflow bool `protobuf:"varint,2,opt,name=archive_workflow,json=archiveWorkflow,proto3" json:"archive_workflow,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -297,7 +297,7 @@ func (m *RemoveWorkflowRequest) Reset() { *m = RemoveWorkflowRequest{} } func (m *RemoveWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*RemoveWorkflowRequest) ProtoMessage() {} func (*RemoveWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{5} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{5} } func (m *RemoveWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveWorkflowRequest.Unmarshal(m, b) @@ -341,7 +341,7 @@ func (m *RemoveWorkflowResponse) Reset() { *m = RemoveWorkflowResponse{} func (m *RemoveWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*RemoveWorkflowResponse) ProtoMessage() {} func (*RemoveWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{6} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{6} } func (m *RemoveWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RemoveWorkflowResponse.Unmarshal(m, b) @@ -362,10 +362,10 @@ func (m *RemoveWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_RemoveWorkflowResponse proto.InternalMessageInfo type GetRunningWorkflowsRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` - StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime" json:"start_time,omitempty"` - EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime" json:"end_time,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + StartTime int64 `protobuf:"varint,3,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,4,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -375,7 +375,7 @@ func (m *GetRunningWorkflowsRequest) Reset() { *m = GetRunningWorkflowsR func (m *GetRunningWorkflowsRequest) String() string { return proto.CompactTextString(m) } func (*GetRunningWorkflowsRequest) ProtoMessage() {} func (*GetRunningWorkflowsRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{7} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{7} } func (m *GetRunningWorkflowsRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetRunningWorkflowsRequest.Unmarshal(m, b) @@ -424,7 +424,7 @@ func (m *GetRunningWorkflowsRequest) GetEndTime() int64 { } type GetRunningWorkflowsResponse struct { - WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds" json:"workflow_ids,omitempty"` + WorkflowIds []string `protobuf:"bytes,1,rep,name=workflow_ids,json=workflowIds,proto3" json:"workflow_ids,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -434,7 +434,7 @@ func (m *GetRunningWorkflowsResponse) Reset() { *m = GetRunningWorkflows func (m *GetRunningWorkflowsResponse) String() string { return proto.CompactTextString(m) } func (*GetRunningWorkflowsResponse) ProtoMessage() {} func (*GetRunningWorkflowsResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{8} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{8} } func (m *GetRunningWorkflowsResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_GetRunningWorkflowsResponse.Unmarshal(m, b) @@ -462,7 +462,7 @@ func (m *GetRunningWorkflowsResponse) GetWorkflowIds() []string { } type DecideWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -472,7 +472,7 @@ func (m *DecideWorkflowRequest) Reset() { *m = DecideWorkflowRequest{} } func (m *DecideWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*DecideWorkflowRequest) ProtoMessage() {} func (*DecideWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{9} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{9} } func (m *DecideWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DecideWorkflowRequest.Unmarshal(m, b) @@ -509,7 +509,7 @@ func (m *DecideWorkflowResponse) Reset() { *m = DecideWorkflowResponse{} func (m *DecideWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*DecideWorkflowResponse) ProtoMessage() {} func (*DecideWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{10} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{10} } func (m *DecideWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DecideWorkflowResponse.Unmarshal(m, b) @@ -530,7 +530,7 @@ func (m *DecideWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_DecideWorkflowResponse proto.InternalMessageInfo type PauseWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -540,7 +540,7 @@ func (m *PauseWorkflowRequest) Reset() { *m = PauseWorkflowRequest{} } func (m *PauseWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*PauseWorkflowRequest) ProtoMessage() {} func (*PauseWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{11} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{11} } func (m *PauseWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PauseWorkflowRequest.Unmarshal(m, b) @@ -577,7 +577,7 @@ func (m *PauseWorkflowResponse) Reset() { *m = PauseWorkflowResponse{} } func (m *PauseWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*PauseWorkflowResponse) ProtoMessage() {} func (*PauseWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{12} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{12} } func (m *PauseWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PauseWorkflowResponse.Unmarshal(m, b) @@ -598,7 +598,7 @@ func (m *PauseWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_PauseWorkflowResponse proto.InternalMessageInfo type ResumeWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -608,7 +608,7 @@ func (m *ResumeWorkflowRequest) Reset() { *m = ResumeWorkflowRequest{} } func (m *ResumeWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*ResumeWorkflowRequest) ProtoMessage() {} func (*ResumeWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{13} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{13} } func (m *ResumeWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResumeWorkflowRequest.Unmarshal(m, b) @@ -645,7 +645,7 @@ func (m *ResumeWorkflowResponse) Reset() { *m = ResumeWorkflowResponse{} func (m *ResumeWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*ResumeWorkflowResponse) ProtoMessage() {} func (*ResumeWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{14} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{14} } func (m *ResumeWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResumeWorkflowResponse.Unmarshal(m, b) @@ -666,9 +666,9 @@ func (m *ResumeWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ResumeWorkflowResponse proto.InternalMessageInfo type SkipTaskRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName" json:"task_reference_name,omitempty"` - Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request" json:"request,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName,proto3" json:"task_reference_name,omitempty"` + Request *model.SkipTaskRequest `protobuf:"bytes,3,opt,name=request,proto3" json:"request,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -678,7 +678,7 @@ func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } func (*SkipTaskRequest) ProtoMessage() {} func (*SkipTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{15} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{15} } func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) @@ -729,7 +729,7 @@ func (m *SkipTaskResponse) Reset() { *m = SkipTaskResponse{} } func (m *SkipTaskResponse) String() string { return proto.CompactTextString(m) } func (*SkipTaskResponse) ProtoMessage() {} func (*SkipTaskResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{16} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{16} } func (m *SkipTaskResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_SkipTaskResponse.Unmarshal(m, b) @@ -750,7 +750,7 @@ func (m *SkipTaskResponse) XXX_DiscardUnknown() { var xxx_messageInfo_SkipTaskResponse proto.InternalMessageInfo type RerunWorkflowResponse struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -760,7 +760,7 @@ func (m *RerunWorkflowResponse) Reset() { *m = RerunWorkflowResponse{} } func (m *RerunWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*RerunWorkflowResponse) ProtoMessage() {} func (*RerunWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{17} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{17} } func (m *RerunWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RerunWorkflowResponse.Unmarshal(m, b) @@ -788,7 +788,7 @@ func (m *RerunWorkflowResponse) GetWorkflowId() string { } type RestartWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -798,7 +798,7 @@ func (m *RestartWorkflowRequest) Reset() { *m = RestartWorkflowRequest{} func (m *RestartWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*RestartWorkflowRequest) ProtoMessage() {} func (*RestartWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{18} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{18} } func (m *RestartWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RestartWorkflowRequest.Unmarshal(m, b) @@ -835,7 +835,7 @@ func (m *RestartWorkflowResponse) Reset() { *m = RestartWorkflowResponse func (m *RestartWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*RestartWorkflowResponse) ProtoMessage() {} func (*RestartWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{19} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{19} } func (m *RestartWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RestartWorkflowResponse.Unmarshal(m, b) @@ -856,7 +856,7 @@ func (m *RestartWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_RestartWorkflowResponse proto.InternalMessageInfo type RetryWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -866,7 +866,7 @@ func (m *RetryWorkflowRequest) Reset() { *m = RetryWorkflowRequest{} } func (m *RetryWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*RetryWorkflowRequest) ProtoMessage() {} func (*RetryWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{20} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{20} } func (m *RetryWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RetryWorkflowRequest.Unmarshal(m, b) @@ -903,7 +903,7 @@ func (m *RetryWorkflowResponse) Reset() { *m = RetryWorkflowResponse{} } func (m *RetryWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*RetryWorkflowResponse) ProtoMessage() {} func (*RetryWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{21} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{21} } func (m *RetryWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RetryWorkflowResponse.Unmarshal(m, b) @@ -924,7 +924,7 @@ func (m *RetryWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_RetryWorkflowResponse proto.InternalMessageInfo type ResetWorkflowCallbacksRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -934,7 +934,7 @@ func (m *ResetWorkflowCallbacksRequest) Reset() { *m = ResetWorkflowCall func (m *ResetWorkflowCallbacksRequest) String() string { return proto.CompactTextString(m) } func (*ResetWorkflowCallbacksRequest) ProtoMessage() {} func (*ResetWorkflowCallbacksRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{22} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{22} } func (m *ResetWorkflowCallbacksRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResetWorkflowCallbacksRequest.Unmarshal(m, b) @@ -971,7 +971,7 @@ func (m *ResetWorkflowCallbacksResponse) Reset() { *m = ResetWorkflowCal func (m *ResetWorkflowCallbacksResponse) String() string { return proto.CompactTextString(m) } func (*ResetWorkflowCallbacksResponse) ProtoMessage() {} func (*ResetWorkflowCallbacksResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{23} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{23} } func (m *ResetWorkflowCallbacksResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_ResetWorkflowCallbacksResponse.Unmarshal(m, b) @@ -992,8 +992,8 @@ func (m *ResetWorkflowCallbacksResponse) XXX_DiscardUnknown() { var xxx_messageInfo_ResetWorkflowCallbacksResponse proto.InternalMessageInfo type TerminateWorkflowRequest struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - Reason string `protobuf:"bytes,2,opt,name=reason" json:"reason,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + Reason string `protobuf:"bytes,2,opt,name=reason,proto3" json:"reason,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -1003,7 +1003,7 @@ func (m *TerminateWorkflowRequest) Reset() { *m = TerminateWorkflowReque func (m *TerminateWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*TerminateWorkflowRequest) ProtoMessage() {} func (*TerminateWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{24} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{24} } func (m *TerminateWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TerminateWorkflowRequest.Unmarshal(m, b) @@ -1047,7 +1047,7 @@ func (m *TerminateWorkflowResponse) Reset() { *m = TerminateWorkflowResp func (m *TerminateWorkflowResponse) String() string { return proto.CompactTextString(m) } func (*TerminateWorkflowResponse) ProtoMessage() {} func (*TerminateWorkflowResponse) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{25} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{25} } func (m *TerminateWorkflowResponse) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TerminateWorkflowResponse.Unmarshal(m, b) @@ -1068,8 +1068,8 @@ func (m *TerminateWorkflowResponse) XXX_DiscardUnknown() { var xxx_messageInfo_TerminateWorkflowResponse proto.InternalMessageInfo type WorkflowSummarySearchResult struct { - TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits" json:"total_hits,omitempty"` - Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results" json:"results,omitempty"` + TotalHits int64 `protobuf:"varint,1,opt,name=total_hits,json=totalHits,proto3" json:"total_hits,omitempty"` + Results []*model.WorkflowSummary `protobuf:"bytes,2,rep,name=results,proto3" json:"results,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -1079,7 +1079,7 @@ func (m *WorkflowSummarySearchResult) Reset() { *m = WorkflowSummarySear func (m *WorkflowSummarySearchResult) String() string { return proto.CompactTextString(m) } func (*WorkflowSummarySearchResult) ProtoMessage() {} func (*WorkflowSummarySearchResult) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_service_fc7b0bf1a282d9fc, []int{26} + return fileDescriptor_workflow_service_ad4bde2e77de2037, []int{26} } func (m *WorkflowSummarySearchResult) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowSummarySearchResult.Unmarshal(m, b) @@ -1743,10 +1743,10 @@ var _WorkflowService_serviceDesc = grpc.ServiceDesc{ } func init() { - proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_fc7b0bf1a282d9fc) + proto.RegisterFile("grpc/workflow_service.proto", fileDescriptor_workflow_service_ad4bde2e77de2037) } -var fileDescriptor_workflow_service_fc7b0bf1a282d9fc = []byte{ +var fileDescriptor_workflow_service_ad4bde2e77de2037 = []byte{ // 1121 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xb4, 0x58, 0xcd, 0x6e, 0xdb, 0x46, 0x10, 0x86, 0xa4, 0xc4, 0xb6, 0xc6, 0x96, 0x7f, 0x36, 0xb6, 0x43, 0xd3, 0x48, 0xaa, 0xb2, 0x08, diff --git a/client/gogrpc/conductor/model/dynamicforkjointask.pb.go b/client/gogrpc/conductor/model/dynamicforkjointask.pb.go index f7d710983a..8c7110ef78 100644 --- a/client/gogrpc/conductor/model/dynamicforkjointask.pb.go +++ b/client/gogrpc/conductor/model/dynamicforkjointask.pb.go @@ -20,11 +20,11 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type DynamicForkJoinTask struct { - TaskName string `protobuf:"bytes,1,opt,name=task_name,json=taskName" json:"task_name,omitempty"` - WorkflowName string `protobuf:"bytes,2,opt,name=workflow_name,json=workflowName" json:"workflow_name,omitempty"` - ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName" json:"reference_name,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Type string `protobuf:"bytes,5,opt,name=type" json:"type,omitempty"` + TaskName string `protobuf:"bytes,1,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` + WorkflowName string `protobuf:"bytes,2,opt,name=workflow_name,json=workflowName,proto3" json:"workflow_name,omitempty"` + ReferenceName string `protobuf:"bytes,3,opt,name=reference_name,json=referenceName,proto3" json:"reference_name,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -34,7 +34,7 @@ func (m *DynamicForkJoinTask) Reset() { *m = DynamicForkJoinTask{} } func (m *DynamicForkJoinTask) String() string { return proto.CompactTextString(m) } func (*DynamicForkJoinTask) ProtoMessage() {} func (*DynamicForkJoinTask) Descriptor() ([]byte, []int) { - return fileDescriptor_dynamicforkjointask_60f4ea3626679478, []int{0} + return fileDescriptor_dynamicforkjointask_300c110eb897c85e, []int{0} } func (m *DynamicForkJoinTask) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DynamicForkJoinTask.Unmarshal(m, b) @@ -95,10 +95,10 @@ func init() { } func init() { - proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_60f4ea3626679478) + proto.RegisterFile("model/dynamicforkjointask.proto", fileDescriptor_dynamicforkjointask_300c110eb897c85e) } -var fileDescriptor_dynamicforkjointask_60f4ea3626679478 = []byte{ +var fileDescriptor_dynamicforkjointask_300c110eb897c85e = []byte{ // 325 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x91, 0x4f, 0x4b, 0x03, 0x31, 0x10, 0xc5, 0xe9, 0x3f, 0xb1, 0xa9, 0x55, 0x89, 0x28, 0xa5, 0x15, 0x2c, 0x8a, 0xd0, 0x83, 0x24, diff --git a/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go b/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go index 4dac9221d2..c02fc9b0d6 100644 --- a/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go +++ b/client/gogrpc/conductor/model/dynamicforkjointasklist.pb.go @@ -19,7 +19,7 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type DynamicForkJoinTaskList struct { - DynamicTasks []*DynamicForkJoinTask `protobuf:"bytes,1,rep,name=dynamic_tasks,json=dynamicTasks" json:"dynamic_tasks,omitempty"` + DynamicTasks []*DynamicForkJoinTask `protobuf:"bytes,1,rep,name=dynamic_tasks,json=dynamicTasks,proto3" json:"dynamic_tasks,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -29,7 +29,7 @@ func (m *DynamicForkJoinTaskList) Reset() { *m = DynamicForkJoinTaskList func (m *DynamicForkJoinTaskList) String() string { return proto.CompactTextString(m) } func (*DynamicForkJoinTaskList) ProtoMessage() {} func (*DynamicForkJoinTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e, []int{0} + return fileDescriptor_dynamicforkjointasklist_4634756916e85673, []int{0} } func (m *DynamicForkJoinTaskList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_DynamicForkJoinTaskList.Unmarshal(m, b) @@ -61,10 +61,10 @@ func init() { } func init() { - proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e) + proto.RegisterFile("model/dynamicforkjointasklist.proto", fileDescriptor_dynamicforkjointasklist_4634756916e85673) } -var fileDescriptor_dynamicforkjointasklist_5dc7aa3e0011d25e = []byte{ +var fileDescriptor_dynamicforkjointasklist_4634756916e85673 = []byte{ // 200 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x52, 0xce, 0xcd, 0x4f, 0x49, 0xcd, 0xd1, 0x4f, 0xa9, 0xcc, 0x4b, 0xcc, 0xcd, 0x4c, 0x4e, 0xcb, 0x2f, 0xca, 0xce, 0xca, 0xcf, diff --git a/client/gogrpc/conductor/model/eventexecution.pb.go b/client/gogrpc/conductor/model/eventexecution.pb.go index a08e1b4655..7c76aa9853 100644 --- a/client/gogrpc/conductor/model/eventexecution.pb.go +++ b/client/gogrpc/conductor/model/eventexecution.pb.go @@ -45,18 +45,18 @@ func (x EventExecution_Status) String() string { return proto.EnumName(EventExecution_Status_name, int32(x)) } func (EventExecution_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0, 0} + return fileDescriptor_eventexecution_461922c614048c4a, []int{0, 0} } type EventExecution struct { - Id string `protobuf:"bytes,1,opt,name=id" json:"id,omitempty"` - MessageId string `protobuf:"bytes,2,opt,name=message_id,json=messageId" json:"message_id,omitempty"` - Name string `protobuf:"bytes,3,opt,name=name" json:"name,omitempty"` - Event string `protobuf:"bytes,4,opt,name=event" json:"event,omitempty"` - Created int64 `protobuf:"varint,5,opt,name=created" json:"created,omitempty"` - Status EventExecution_Status `protobuf:"varint,6,opt,name=status,enum=conductor.proto.EventExecution_Status" json:"status,omitempty"` - Action EventHandler_Action_Type `protobuf:"varint,7,opt,name=action,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` - Output map[string]*_struct.Value `protobuf:"bytes,8,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` + MessageId string `protobuf:"bytes,2,opt,name=message_id,json=messageId,proto3" json:"message_id,omitempty"` + Name string `protobuf:"bytes,3,opt,name=name,proto3" json:"name,omitempty"` + Event string `protobuf:"bytes,4,opt,name=event,proto3" json:"event,omitempty"` + Created int64 `protobuf:"varint,5,opt,name=created,proto3" json:"created,omitempty"` + Status EventExecution_Status `protobuf:"varint,6,opt,name=status,proto3,enum=conductor.proto.EventExecution_Status" json:"status,omitempty"` + Action EventHandler_Action_Type `protobuf:"varint,7,opt,name=action,proto3,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` + Output map[string]*_struct.Value `protobuf:"bytes,8,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -66,7 +66,7 @@ func (m *EventExecution) Reset() { *m = EventExecution{} } func (m *EventExecution) String() string { return proto.CompactTextString(m) } func (*EventExecution) ProtoMessage() {} func (*EventExecution) Descriptor() ([]byte, []int) { - return fileDescriptor_eventexecution_680c67ac3fada8e2, []int{0} + return fileDescriptor_eventexecution_461922c614048c4a, []int{0} } func (m *EventExecution) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventExecution.Unmarshal(m, b) @@ -149,10 +149,10 @@ func init() { } func init() { - proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_680c67ac3fada8e2) + proto.RegisterFile("model/eventexecution.proto", fileDescriptor_eventexecution_461922c614048c4a) } -var fileDescriptor_eventexecution_680c67ac3fada8e2 = []byte{ +var fileDescriptor_eventexecution_461922c614048c4a = []byte{ // 434 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x84, 0x92, 0xc1, 0x8b, 0xd3, 0x40, 0x14, 0xc6, 0x4d, 0xb2, 0x4d, 0xed, 0x0b, 0x76, 0xc3, 0x20, 0x32, 0x54, 0x85, 0xb2, 0x07, 0xa9, diff --git a/client/gogrpc/conductor/model/eventhandler.pb.go b/client/gogrpc/conductor/model/eventhandler.pb.go index f64569a388..6a268b097f 100644 --- a/client/gogrpc/conductor/model/eventhandler.pb.go +++ b/client/gogrpc/conductor/model/eventhandler.pb.go @@ -43,15 +43,15 @@ func (x EventHandler_Action_Type) String() string { return proto.EnumName(EventHandler_Action_Type_name, int32(x)) } func (EventHandler_Action_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2, 0} + return fileDescriptor_eventhandler_9283dbc454fc79ea, []int{0, 2, 0} } type EventHandler struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Event string `protobuf:"bytes,2,opt,name=event" json:"event,omitempty"` - Condition string `protobuf:"bytes,3,opt,name=condition" json:"condition,omitempty"` - Actions []*EventHandler_Action `protobuf:"bytes,4,rep,name=actions" json:"actions,omitempty"` - Active bool `protobuf:"varint,5,opt,name=active" json:"active,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Event string `protobuf:"bytes,2,opt,name=event,proto3" json:"event,omitempty"` + Condition string `protobuf:"bytes,3,opt,name=condition,proto3" json:"condition,omitempty"` + Actions []*EventHandler_Action `protobuf:"bytes,4,rep,name=actions,proto3" json:"actions,omitempty"` + Active bool `protobuf:"varint,5,opt,name=active,proto3" json:"active,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -61,7 +61,7 @@ func (m *EventHandler) Reset() { *m = EventHandler{} } func (m *EventHandler) String() string { return proto.CompactTextString(m) } func (*EventHandler) ProtoMessage() {} func (*EventHandler) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0} + return fileDescriptor_eventhandler_9283dbc454fc79ea, []int{0} } func (m *EventHandler) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventHandler.Unmarshal(m, b) @@ -117,11 +117,11 @@ func (m *EventHandler) GetActive() bool { } type EventHandler_StartWorkflow struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` - CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - InputMessage *any.Any `protobuf:"bytes,5,opt,name=input_message,json=inputMessage" json:"input_message,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + InputMessage *any.Any `protobuf:"bytes,5,opt,name=input_message,json=inputMessage,proto3" json:"input_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -131,7 +131,7 @@ func (m *EventHandler_StartWorkflow) Reset() { *m = EventHandler_StartWo func (m *EventHandler_StartWorkflow) String() string { return proto.CompactTextString(m) } func (*EventHandler_StartWorkflow) ProtoMessage() {} func (*EventHandler_StartWorkflow) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 0} + return fileDescriptor_eventhandler_9283dbc454fc79ea, []int{0, 0} } func (m *EventHandler_StartWorkflow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventHandler_StartWorkflow.Unmarshal(m, b) @@ -187,10 +187,10 @@ func (m *EventHandler_StartWorkflow) GetInputMessage() *any.Any { } type EventHandler_TaskDetails struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName" json:"task_ref_name,omitempty"` - Output map[string]*_struct.Value `protobuf:"bytes,3,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - OutputMessage *any.Any `protobuf:"bytes,4,opt,name=output_message,json=outputMessage" json:"output_message,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + TaskRefName string `protobuf:"bytes,2,opt,name=task_ref_name,json=taskRefName,proto3" json:"task_ref_name,omitempty"` + Output map[string]*_struct.Value `protobuf:"bytes,3,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + OutputMessage *any.Any `protobuf:"bytes,4,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -200,7 +200,7 @@ func (m *EventHandler_TaskDetails) Reset() { *m = EventHandler_TaskDetai func (m *EventHandler_TaskDetails) String() string { return proto.CompactTextString(m) } func (*EventHandler_TaskDetails) ProtoMessage() {} func (*EventHandler_TaskDetails) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 1} + return fileDescriptor_eventhandler_9283dbc454fc79ea, []int{0, 1} } func (m *EventHandler_TaskDetails) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventHandler_TaskDetails.Unmarshal(m, b) @@ -249,11 +249,11 @@ func (m *EventHandler_TaskDetails) GetOutputMessage() *any.Any { } type EventHandler_Action struct { - Action EventHandler_Action_Type `protobuf:"varint,1,opt,name=action,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` - StartWorkflow *EventHandler_StartWorkflow `protobuf:"bytes,2,opt,name=start_workflow,json=startWorkflow" json:"start_workflow,omitempty"` - CompleteTask *EventHandler_TaskDetails `protobuf:"bytes,3,opt,name=complete_task,json=completeTask" json:"complete_task,omitempty"` - FailTask *EventHandler_TaskDetails `protobuf:"bytes,4,opt,name=fail_task,json=failTask" json:"fail_task,omitempty"` - ExpandInlineJson bool `protobuf:"varint,5,opt,name=expand_inline_json,json=expandInlineJson" json:"expand_inline_json,omitempty"` + Action EventHandler_Action_Type `protobuf:"varint,1,opt,name=action,proto3,enum=conductor.proto.EventHandler_Action_Type" json:"action,omitempty"` + StartWorkflow *EventHandler_StartWorkflow `protobuf:"bytes,2,opt,name=start_workflow,json=startWorkflow,proto3" json:"start_workflow,omitempty"` + CompleteTask *EventHandler_TaskDetails `protobuf:"bytes,3,opt,name=complete_task,json=completeTask,proto3" json:"complete_task,omitempty"` + FailTask *EventHandler_TaskDetails `protobuf:"bytes,4,opt,name=fail_task,json=failTask,proto3" json:"fail_task,omitempty"` + ExpandInlineJson bool `protobuf:"varint,5,opt,name=expand_inline_json,json=expandInlineJson,proto3" json:"expand_inline_json,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -263,7 +263,7 @@ func (m *EventHandler_Action) Reset() { *m = EventHandler_Action{} } func (m *EventHandler_Action) String() string { return proto.CompactTextString(m) } func (*EventHandler_Action) ProtoMessage() {} func (*EventHandler_Action) Descriptor() ([]byte, []int) { - return fileDescriptor_eventhandler_d75293086a3c9db8, []int{0, 2} + return fileDescriptor_eventhandler_9283dbc454fc79ea, []int{0, 2} } func (m *EventHandler_Action) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_EventHandler_Action.Unmarshal(m, b) @@ -329,10 +329,10 @@ func init() { } func init() { - proto.RegisterFile("model/eventhandler.proto", fileDescriptor_eventhandler_d75293086a3c9db8) + proto.RegisterFile("model/eventhandler.proto", fileDescriptor_eventhandler_9283dbc454fc79ea) } -var fileDescriptor_eventhandler_d75293086a3c9db8 = []byte{ +var fileDescriptor_eventhandler_9283dbc454fc79ea = []byte{ // 665 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x54, 0x6f, 0x4f, 0xd3, 0x40, 0x18, 0x77, 0x7f, 0x61, 0x4f, 0xe9, 0x9c, 0x17, 0x42, 0xea, 0x24, 0x91, 0x10, 0x4d, 0x30, 0x92, diff --git a/client/gogrpc/conductor/model/polldata.pb.go b/client/gogrpc/conductor/model/polldata.pb.go index 53207b224e..3b569f532b 100644 --- a/client/gogrpc/conductor/model/polldata.pb.go +++ b/client/gogrpc/conductor/model/polldata.pb.go @@ -19,10 +19,10 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type PollData struct { - QueueName string `protobuf:"bytes,1,opt,name=queue_name,json=queueName" json:"queue_name,omitempty"` - Domain string `protobuf:"bytes,2,opt,name=domain" json:"domain,omitempty"` - WorkerId string `protobuf:"bytes,3,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - LastPollTime int64 `protobuf:"varint,4,opt,name=last_poll_time,json=lastPollTime" json:"last_poll_time,omitempty"` + QueueName string `protobuf:"bytes,1,opt,name=queue_name,json=queueName,proto3" json:"queue_name,omitempty"` + Domain string `protobuf:"bytes,2,opt,name=domain,proto3" json:"domain,omitempty"` + WorkerId string `protobuf:"bytes,3,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + LastPollTime int64 `protobuf:"varint,4,opt,name=last_poll_time,json=lastPollTime,proto3" json:"last_poll_time,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -32,7 +32,7 @@ func (m *PollData) Reset() { *m = PollData{} } func (m *PollData) String() string { return proto.CompactTextString(m) } func (*PollData) ProtoMessage() {} func (*PollData) Descriptor() ([]byte, []int) { - return fileDescriptor_polldata_17cab9e308fb8d52, []int{0} + return fileDescriptor_polldata_c64f15389955536a, []int{0} } func (m *PollData) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_PollData.Unmarshal(m, b) @@ -84,9 +84,9 @@ func init() { proto.RegisterType((*PollData)(nil), "conductor.proto.PollData") } -func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_17cab9e308fb8d52) } +func init() { proto.RegisterFile("model/polldata.proto", fileDescriptor_polldata_c64f15389955536a) } -var fileDescriptor_polldata_17cab9e308fb8d52 = []byte{ +var fileDescriptor_polldata_c64f15389955536a = []byte{ // 229 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x90, 0xc1, 0x4a, 0x03, 0x31, 0x10, 0x86, 0x59, 0x2b, 0xa5, 0x3b, 0x88, 0x42, 0x10, 0x59, 0x28, 0x42, 0x11, 0x0f, 0x3d, 0x6d, diff --git a/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go b/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go index 2652f2ab07..cf04669222 100644 --- a/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go +++ b/client/gogrpc/conductor/model/rerunworkflowrequest.pb.go @@ -20,11 +20,11 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type RerunWorkflowRequest struct { - ReRunFromWorkflowId string `protobuf:"bytes,1,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId" json:"re_run_from_workflow_id,omitempty"` - WorkflowInput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=workflow_input,json=workflowInput" json:"workflow_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - ReRunFromTaskId string `protobuf:"bytes,3,opt,name=re_run_from_task_id,json=reRunFromTaskId" json:"re_run_from_task_id,omitempty"` - TaskInput map[string]*_struct.Value `protobuf:"bytes,4,rep,name=task_input,json=taskInput" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - CorrelationId string `protobuf:"bytes,5,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` + ReRunFromWorkflowId string `protobuf:"bytes,1,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId,proto3" json:"re_run_from_workflow_id,omitempty"` + WorkflowInput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=workflow_input,json=workflowInput,proto3" json:"workflow_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ReRunFromTaskId string `protobuf:"bytes,3,opt,name=re_run_from_task_id,json=reRunFromTaskId,proto3" json:"re_run_from_task_id,omitempty"` + TaskInput map[string]*_struct.Value `protobuf:"bytes,4,rep,name=task_input,json=taskInput,proto3" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + CorrelationId string `protobuf:"bytes,5,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -34,7 +34,7 @@ func (m *RerunWorkflowRequest) Reset() { *m = RerunWorkflowRequest{} } func (m *RerunWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*RerunWorkflowRequest) ProtoMessage() {} func (*RerunWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8, []int{0} + return fileDescriptor_rerunworkflowrequest_ec6d727a6700f219, []int{0} } func (m *RerunWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_RerunWorkflowRequest.Unmarshal(m, b) @@ -96,10 +96,10 @@ func init() { } func init() { - proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8) + proto.RegisterFile("model/rerunworkflowrequest.proto", fileDescriptor_rerunworkflowrequest_ec6d727a6700f219) } -var fileDescriptor_rerunworkflowrequest_54d9ae665213e0b8 = []byte{ +var fileDescriptor_rerunworkflowrequest_ec6d727a6700f219 = []byte{ // 369 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0x4f, 0xef, 0xd2, 0x30, 0x18, 0xc7, 0x33, 0xf8, 0x61, 0x42, 0x09, 0x60, 0x8a, 0x41, 0x82, 0x1e, 0x16, 0x13, 0x13, 0x0e, diff --git a/client/gogrpc/conductor/model/skiptaskrequest.pb.go b/client/gogrpc/conductor/model/skiptaskrequest.pb.go index 6a937fa148..42cd06bc1d 100644 --- a/client/gogrpc/conductor/model/skiptaskrequest.pb.go +++ b/client/gogrpc/conductor/model/skiptaskrequest.pb.go @@ -21,10 +21,10 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type SkipTaskRequest struct { - TaskInput map[string]*_struct.Value `protobuf:"bytes,1,rep,name=task_input,json=taskInput" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - TaskOutput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=task_output,json=taskOutput" json:"task_output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - TaskInputMessage *any.Any `protobuf:"bytes,3,opt,name=task_input_message,json=taskInputMessage" json:"task_input_message,omitempty"` - TaskOutputMessage *any.Any `protobuf:"bytes,4,opt,name=task_output_message,json=taskOutputMessage" json:"task_output_message,omitempty"` + TaskInput map[string]*_struct.Value `protobuf:"bytes,1,rep,name=task_input,json=taskInput,proto3" json:"task_input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TaskOutput map[string]*_struct.Value `protobuf:"bytes,2,rep,name=task_output,json=taskOutput,proto3" json:"task_output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TaskInputMessage *any.Any `protobuf:"bytes,3,opt,name=task_input_message,json=taskInputMessage,proto3" json:"task_input_message,omitempty"` + TaskOutputMessage *any.Any `protobuf:"bytes,4,opt,name=task_output_message,json=taskOutputMessage,proto3" json:"task_output_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -34,7 +34,7 @@ func (m *SkipTaskRequest) Reset() { *m = SkipTaskRequest{} } func (m *SkipTaskRequest) String() string { return proto.CompactTextString(m) } func (*SkipTaskRequest) ProtoMessage() {} func (*SkipTaskRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_skiptaskrequest_fb745ec89a45d156, []int{0} + return fileDescriptor_skiptaskrequest_3fbc52032537d94c, []int{0} } func (m *SkipTaskRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_SkipTaskRequest.Unmarshal(m, b) @@ -89,10 +89,10 @@ func init() { } func init() { - proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_fb745ec89a45d156) + proto.RegisterFile("model/skiptaskrequest.proto", fileDescriptor_skiptaskrequest_3fbc52032537d94c) } -var fileDescriptor_skiptaskrequest_fb745ec89a45d156 = []byte{ +var fileDescriptor_skiptaskrequest_3fbc52032537d94c = []byte{ // 348 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x92, 0xbd, 0x4e, 0xc3, 0x30, 0x14, 0x85, 0x95, 0x06, 0x90, 0xea, 0x4a, 0xb4, 0x35, 0x08, 0x85, 0x94, 0xa1, 0x62, 0xea, 0x80, diff --git a/client/gogrpc/conductor/model/startworkflowrequest.pb.go b/client/gogrpc/conductor/model/startworkflowrequest.pb.go index dbc07d7d64..bd2ca8e26f 100644 --- a/client/gogrpc/conductor/model/startworkflowrequest.pb.go +++ b/client/gogrpc/conductor/model/startworkflowrequest.pb.go @@ -20,11 +20,12 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type StartWorkflowRequest struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` - CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - TaskToDomain map[string]string `protobuf:"bytes,5,rep,name=task_to_domain,json=taskToDomain" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + TaskToDomain map[string]string `protobuf:"bytes,5,rep,name=task_to_domain,json=taskToDomain,proto3" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + WorkflowDef *WorkflowDef `protobuf:"bytes,6,opt,name=workflow_def,json=workflowDef,proto3" json:"workflow_def,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -34,7 +35,7 @@ func (m *StartWorkflowRequest) Reset() { *m = StartWorkflowRequest{} } func (m *StartWorkflowRequest) String() string { return proto.CompactTextString(m) } func (*StartWorkflowRequest) ProtoMessage() {} func (*StartWorkflowRequest) Descriptor() ([]byte, []int) { - return fileDescriptor_startworkflowrequest_76bb252c87e811af, []int{0} + return fileDescriptor_startworkflowrequest_57b778443ff5f3ba, []int{0} } func (m *StartWorkflowRequest) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_StartWorkflowRequest.Unmarshal(m, b) @@ -89,6 +90,13 @@ func (m *StartWorkflowRequest) GetTaskToDomain() map[string]string { return nil } +func (m *StartWorkflowRequest) GetWorkflowDef() *WorkflowDef { + if m != nil { + return m.WorkflowDef + } + return nil +} + func init() { proto.RegisterType((*StartWorkflowRequest)(nil), "conductor.proto.StartWorkflowRequest") proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.StartWorkflowRequest.InputEntry") @@ -96,32 +104,34 @@ func init() { } func init() { - proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_76bb252c87e811af) -} - -var fileDescriptor_startworkflowrequest_76bb252c87e811af = []byte{ - // 360 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0xc1, 0x4b, 0xe3, 0x40, - 0x14, 0xc6, 0x49, 0xd3, 0xec, 0xd2, 0xe9, 0x6e, 0x77, 0x1d, 0x4a, 0x09, 0xd5, 0x43, 0x10, 0x84, - 0x1e, 0x64, 0x22, 0xf5, 0xa0, 0xf4, 0x22, 0x14, 0x15, 0x7a, 0x2b, 0xb1, 0x28, 0x08, 0x52, 0x92, - 0xc9, 0x34, 0x0e, 0x49, 0xe6, 0xb5, 0x93, 0x49, 0x6b, 0xff, 0x20, 0xff, 0x4f, 0xc9, 0x24, 0xd5, - 0x50, 0x7b, 0xf0, 0x36, 0xef, 0xcb, 0xfb, 0x7e, 0xef, 0xbd, 0x8f, 0x20, 0x27, 0x85, 0x90, 0x25, - 0x6e, 0xa6, 0x7c, 0xa9, 0x36, 0x20, 0xe3, 0x45, 0x02, 0x1b, 0xc9, 0x56, 0x39, 0xcb, 0x14, 0x59, - 0x4a, 0x50, 0x80, 0xff, 0x51, 0x10, 0x61, 0x4e, 0x15, 0xc8, 0x52, 0xe8, 0x9f, 0x44, 0x00, 0x51, - 0xc2, 0x5c, 0x5d, 0x05, 0xf9, 0xc2, 0xcd, 0x94, 0xcc, 0x69, 0xd5, 0x7e, 0xfa, 0x6e, 0xa2, 0xee, - 0x43, 0x41, 0x7b, 0xaa, 0x68, 0x5e, 0x49, 0xc3, 0x18, 0x35, 0x85, 0x9f, 0x32, 0xdb, 0x70, 0x8c, - 0x41, 0xcb, 0xd3, 0x6f, 0x6c, 0xa3, 0xdf, 0x6b, 0x26, 0x33, 0x0e, 0xc2, 0x6e, 0x38, 0xc6, 0xc0, - 0xf2, 0x76, 0x25, 0x3e, 0x43, 0x1d, 0x0a, 0x52, 0xb2, 0xc4, 0x57, 0x1c, 0xc4, 0x9c, 0x87, 0xb6, - 0xa9, 0x7d, 0x7f, 0x6b, 0xea, 0x24, 0xc4, 0xf7, 0xc8, 0xe2, 0x62, 0x99, 0x2b, 0xbb, 0xe9, 0x98, - 0x83, 0xf6, 0xf0, 0x82, 0xec, 0x2d, 0x4b, 0x0e, 0xad, 0x42, 0x26, 0x85, 0xe5, 0x4e, 0x28, 0xb9, - 0xf5, 0x4a, 0x3b, 0x7e, 0x41, 0x1d, 0xe5, 0x67, 0xf1, 0x5c, 0xc1, 0x3c, 0x84, 0xd4, 0xe7, 0xc2, - 0xb6, 0x34, 0xf0, 0xea, 0x67, 0xc0, 0x99, 0x9f, 0xc5, 0x33, 0xb8, 0xd5, 0xce, 0x92, 0xfb, 0x47, - 0xd5, 0xa4, 0xfe, 0x14, 0xa1, 0xaf, 0x99, 0xf8, 0x3f, 0x32, 0x63, 0xb6, 0xad, 0x82, 0x28, 0x9e, - 0xf8, 0x1c, 0x59, 0x6b, 0x3f, 0xc9, 0x99, 0x4e, 0xa1, 0x3d, 0xec, 0x91, 0x32, 0x62, 0xb2, 0x8b, - 0x98, 0x3c, 0x16, 0x5f, 0xbd, 0xb2, 0x69, 0xd4, 0xb8, 0x36, 0xfa, 0x37, 0xe8, 0xe8, 0xdb, 0xd0, - 0x03, 0xe0, 0x6e, 0x1d, 0xdc, 0xaa, 0x01, 0xc6, 0x2b, 0x74, 0x4c, 0x21, 0x25, 0x82, 0xa9, 0x45, - 0xc2, 0xdf, 0xf6, 0xcf, 0x1c, 0xf7, 0x0e, 0xdd, 0x39, 0x0d, 0x9e, 0x47, 0x11, 0x57, 0xaf, 0x79, - 0x40, 0x28, 0xa4, 0x6e, 0xe5, 0x75, 0x3f, 0xbd, 0x2e, 0x4d, 0x38, 0x13, 0xca, 0x8d, 0x20, 0x92, - 0x4b, 0x5a, 0xd3, 0xf5, 0x4f, 0x16, 0xfc, 0xd2, 0xe8, 0xcb, 0x8f, 0x00, 0x00, 0x00, 0xff, 0xff, - 0x48, 0x61, 0x1b, 0x82, 0x74, 0x02, 0x00, 0x00, + proto.RegisterFile("model/startworkflowrequest.proto", fileDescriptor_startworkflowrequest_57b778443ff5f3ba) +} + +var fileDescriptor_startworkflowrequest_57b778443ff5f3ba = []byte{ + // 396 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x92, 0x51, 0xab, 0xd3, 0x30, + 0x14, 0x80, 0xe9, 0xed, 0xed, 0x95, 0x9b, 0x5e, 0xaf, 0x1a, 0x2e, 0xd7, 0x32, 0xf7, 0x50, 0x04, + 0xa1, 0x0f, 0x92, 0xca, 0x7c, 0x50, 0xf6, 0x32, 0x18, 0x53, 0xd8, 0xdb, 0xa8, 0x43, 0x41, 0x90, + 0xd2, 0xa6, 0x69, 0x0d, 0x6d, 0x73, 0xb6, 0x34, 0xdd, 0xdc, 0x1f, 0xf6, 0x77, 0x48, 0xd3, 0xd6, + 0x95, 0x6d, 0x0f, 0xf7, 0x2d, 0xe7, 0x24, 0xdf, 0x97, 0x93, 0x73, 0x82, 0xdc, 0x12, 0x12, 0x56, + 0xf8, 0x95, 0x8a, 0xa4, 0xda, 0x83, 0xcc, 0xd3, 0x02, 0xf6, 0x92, 0x6d, 0x6b, 0x56, 0x29, 0xb2, + 0x91, 0xa0, 0x00, 0xbf, 0xa0, 0x20, 0x92, 0x9a, 0x2a, 0x90, 0x6d, 0x62, 0xf4, 0xba, 0x45, 0xfa, + 0xd3, 0x09, 0x4b, 0xbb, 0x8d, 0x71, 0x06, 0x90, 0x15, 0xcc, 0xd7, 0x51, 0x5c, 0xa7, 0x7e, 0xa5, + 0x64, 0x4d, 0x3b, 0xcf, 0xdb, 0xbf, 0x26, 0x7a, 0xf8, 0xd6, 0x5c, 0xf3, 0xa3, 0x03, 0x83, 0xf6, + 0x1a, 0x8c, 0xd1, 0xb5, 0x88, 0x4a, 0xe6, 0x18, 0xae, 0xe1, 0xdd, 0x06, 0x7a, 0x8d, 0x1d, 0xf4, + 0x6c, 0xc7, 0x64, 0xc5, 0x41, 0x38, 0x57, 0xae, 0xe1, 0x59, 0x41, 0x1f, 0xe2, 0x77, 0xe8, 0x9e, + 0x82, 0x94, 0xac, 0x88, 0x14, 0x07, 0x11, 0xf2, 0xc4, 0x31, 0x35, 0xf7, 0x7c, 0x90, 0x5d, 0x26, + 0xf8, 0x2b, 0xb2, 0xb8, 0xd8, 0xd4, 0xca, 0xb9, 0x76, 0x4d, 0xcf, 0x9e, 0x7c, 0x20, 0x27, 0xaf, + 0x20, 0x97, 0x4a, 0x21, 0xcb, 0x06, 0xf9, 0x22, 0x94, 0x3c, 0x04, 0x2d, 0x8e, 0x7f, 0xa1, 0x7b, + 0x15, 0x55, 0x79, 0xa8, 0x20, 0x4c, 0xa0, 0x8c, 0xb8, 0x70, 0x2c, 0x2d, 0xfc, 0xf4, 0x34, 0xe1, + 0x3a, 0xaa, 0xf2, 0x35, 0x2c, 0x34, 0xd9, 0x7a, 0xef, 0xd4, 0x20, 0x85, 0x67, 0xe8, 0xae, 0xef, + 0x63, 0x98, 0xb0, 0xd4, 0xb9, 0x71, 0x0d, 0xcf, 0x9e, 0x8c, 0xcf, 0xe4, 0xbd, 0x77, 0xc1, 0xd2, + 0xc0, 0xde, 0x1f, 0x83, 0xd1, 0x0a, 0xa1, 0x63, 0xd1, 0xf8, 0x25, 0x32, 0x73, 0x76, 0xe8, 0x3a, + 0xd9, 0x2c, 0xf1, 0x7b, 0x64, 0xed, 0xa2, 0xa2, 0x66, 0xba, 0x8d, 0xf6, 0xe4, 0x91, 0xb4, 0x33, + 0x22, 0xfd, 0x8c, 0xc8, 0xf7, 0x66, 0x37, 0x68, 0x0f, 0x4d, 0xaf, 0x3e, 0x1b, 0xa3, 0x19, 0x7a, + 0x75, 0x56, 0xf5, 0x05, 0xf1, 0xc3, 0x50, 0x7c, 0x3b, 0x10, 0xcc, 0xb7, 0xe8, 0x0d, 0x85, 0x92, + 0x08, 0xa6, 0xd2, 0x82, 0xff, 0x39, 0x7d, 0xca, 0xfc, 0xf1, 0x52, 0xa3, 0x56, 0xf1, 0xcf, 0x69, + 0xc6, 0xd5, 0xef, 0x3a, 0x26, 0x14, 0x4a, 0xbf, 0x63, 0xfd, 0xff, 0xac, 0x4f, 0x0b, 0xce, 0x84, + 0xf2, 0x33, 0xc8, 0xe4, 0x86, 0x0e, 0xf2, 0xfa, 0x2f, 0xc6, 0x37, 0x5a, 0xfd, 0xf1, 0x5f, 0x00, + 0x00, 0x00, 0xff, 0xff, 0x26, 0x3f, 0xa7, 0x2d, 0xce, 0x02, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/subworkflowparams.pb.go b/client/gogrpc/conductor/model/subworkflowparams.pb.go index cac96fa19f..ac4a65deb1 100644 --- a/client/gogrpc/conductor/model/subworkflowparams.pb.go +++ b/client/gogrpc/conductor/model/subworkflowparams.pb.go @@ -6,7 +6,6 @@ package model // import "github.com/netflix/conductor/client/gogrpc/conductor/mo import proto "github.com/golang/protobuf/proto" import fmt "fmt" import math "math" -import _struct "github.com/golang/protobuf/ptypes/struct" // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal @@ -20,18 +19,18 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type SubWorkflowParams struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Version *_struct.Value `protobuf:"bytes,2,opt,name=version" json:"version,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *SubWorkflowParams) Reset() { *m = SubWorkflowParams{} } func (m *SubWorkflowParams) String() string { return proto.CompactTextString(m) } func (*SubWorkflowParams) ProtoMessage() {} func (*SubWorkflowParams) Descriptor() ([]byte, []int) { - return fileDescriptor_subworkflowparams_182a77e44709d20f, []int{0} + return fileDescriptor_subworkflowparams_957362175bba13c8, []int{0} } func (m *SubWorkflowParams) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_SubWorkflowParams.Unmarshal(m, b) @@ -58,11 +57,11 @@ func (m *SubWorkflowParams) GetName() string { return "" } -func (m *SubWorkflowParams) GetVersion() *_struct.Value { +func (m *SubWorkflowParams) GetVersion() int32 { if m != nil { return m.Version } - return nil + return 0 } func init() { @@ -70,23 +69,21 @@ func init() { } func init() { - proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_182a77e44709d20f) + proto.RegisterFile("model/subworkflowparams.proto", fileDescriptor_subworkflowparams_957362175bba13c8) } -var fileDescriptor_subworkflowparams_182a77e44709d20f = []byte{ - // 217 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x64, 0x90, 0x4f, 0x4b, 0xc4, 0x30, - 0x10, 0xc5, 0xa9, 0x88, 0x62, 0x3c, 0x88, 0x11, 0xa4, 0xf8, 0x07, 0x8a, 0xa7, 0x9e, 0x12, 0xd1, - 0x9b, 0xc7, 0x7e, 0x82, 0x52, 0x41, 0xd1, 0x5b, 0x92, 0xa6, 0x31, 0x98, 0x64, 0x4a, 0xfe, 0x6c, - 0xf7, 0xe3, 0x2f, 0xa4, 0xed, 0xb2, 0xec, 0xde, 0x66, 0xde, 0xcc, 0xfb, 0xcd, 0xf0, 0xd0, 0xb3, - 0x85, 0x5e, 0x1a, 0x1a, 0x12, 0x9f, 0xc0, 0xff, 0x0f, 0x06, 0xa6, 0x91, 0x79, 0x66, 0x03, 0x19, - 0x3d, 0x44, 0xc0, 0x37, 0x02, 0x5c, 0x9f, 0x44, 0x04, 0x3f, 0x0b, 0x0f, 0x4f, 0x0a, 0x40, 0x19, - 0x49, 0x73, 0xc7, 0xd3, 0x40, 0x43, 0xf4, 0x49, 0xc4, 0x79, 0xfa, 0xf2, 0x83, 0x6e, 0x3f, 0x13, - 0xff, 0x5e, 0x48, 0x6d, 0x26, 0x61, 0x8c, 0xce, 0x1d, 0xb3, 0xb2, 0x2c, 0xaa, 0xa2, 0xbe, 0xea, - 0x72, 0x8d, 0x5f, 0xd1, 0xe5, 0x46, 0xfa, 0xa0, 0xc1, 0x95, 0x67, 0x55, 0x51, 0x5f, 0xbf, 0xdd, - 0x93, 0x19, 0x4c, 0x56, 0x30, 0xf9, 0x62, 0x26, 0xc9, 0x6e, 0x5d, 0x6b, 0x1c, 0x7a, 0x14, 0x60, - 0x89, 0x93, 0x71, 0x30, 0x7a, 0x4b, 0x8e, 0xfe, 0x6a, 0xee, 0x4e, 0xee, 0xb6, 0xfc, 0xf7, 0x43, - 0xe9, 0xf8, 0x97, 0x38, 0x11, 0x60, 0xe9, 0x62, 0xa4, 0x7b, 0x23, 0x15, 0x46, 0x4b, 0x17, 0xa9, - 0x02, 0xe5, 0x47, 0x71, 0xa0, 0xe7, 0x44, 0xf8, 0x45, 0xe6, 0xbe, 0xef, 0x02, 0x00, 0x00, 0xff, - 0xff, 0xab, 0x91, 0x1f, 0xb2, 0x21, 0x01, 0x00, 0x00, +var fileDescriptor_subworkflowparams_957362175bba13c8 = []byte{ + // 183 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0x92, 0xcd, 0xcd, 0x4f, 0x49, + 0xcd, 0xd1, 0x2f, 0x2e, 0x4d, 0x2a, 0xcf, 0x2f, 0xca, 0x4e, 0xcb, 0xc9, 0x2f, 0x2f, 0x48, 0x2c, + 0x4a, 0xcc, 0x2d, 0xd6, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x4f, 0xce, 0xcf, 0x4b, 0x29, + 0x4d, 0x2e, 0xc9, 0x2f, 0x82, 0x08, 0x28, 0x39, 0x72, 0x09, 0x06, 0x97, 0x26, 0x85, 0x43, 0xd5, + 0x06, 0x80, 0xd5, 0x0a, 0x09, 0x71, 0xb1, 0xe4, 0x25, 0xe6, 0xa6, 0x4a, 0x30, 0x2a, 0x30, 0x6a, + 0x70, 0x06, 0x81, 0xd9, 0x42, 0x12, 0x5c, 0xec, 0x65, 0xa9, 0x45, 0xc5, 0x99, 0xf9, 0x79, 0x12, + 0x4c, 0x0a, 0x8c, 0x1a, 0xac, 0x41, 0x30, 0xae, 0x53, 0x1e, 0x97, 0x74, 0x72, 0x7e, 0xae, 0x5e, + 0x5e, 0x6a, 0x49, 0x5a, 0x4e, 0x66, 0x85, 0x1e, 0x9a, 0x0d, 0x4e, 0xc2, 0x18, 0xe6, 0x07, 0x24, + 0x45, 0x59, 0xa5, 0x67, 0x96, 0x64, 0x94, 0x26, 0xe9, 0x25, 0xe7, 0xe7, 0xea, 0x43, 0x35, 0xea, + 0xc3, 0x35, 0xea, 0x27, 0xe7, 0x64, 0xa6, 0xe6, 0x95, 0xe8, 0xa7, 0xe7, 0xa7, 0x17, 0x15, 0x24, + 0x23, 0x89, 0x83, 0xfd, 0x96, 0xc4, 0x06, 0x36, 0xd7, 0x18, 0x10, 0x00, 0x00, 0xff, 0xff, 0x52, + 0x9c, 0xc5, 0x01, 0xeb, 0x00, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/task.pb.go b/client/gogrpc/conductor/model/task.pb.go index 156bdab5d1..0c10b923ff 100644 --- a/client/gogrpc/conductor/model/task.pb.go +++ b/client/gogrpc/conductor/model/task.pb.go @@ -64,40 +64,41 @@ func (x Task_Status) String() string { return proto.EnumName(Task_Status_name, int32(x)) } func (Task_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_task_7843d3d2fd8c2dc8, []int{0, 0} + return fileDescriptor_task_755b02a04c3b7e73, []int{0, 0} } type Task struct { - TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - Status Task_Status `protobuf:"varint,2,opt,name=status,enum=conductor.proto.Task_Status" json:"status,omitempty"` - InputData map[string]*_struct.Value `protobuf:"bytes,3,rep,name=input_data,json=inputData" json:"input_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - ReferenceTaskName string `protobuf:"bytes,4,opt,name=reference_task_name,json=referenceTaskName" json:"reference_task_name,omitempty"` - RetryCount int32 `protobuf:"varint,5,opt,name=retry_count,json=retryCount" json:"retry_count,omitempty"` - Seq int32 `protobuf:"varint,6,opt,name=seq" json:"seq,omitempty"` - CorrelationId string `protobuf:"bytes,7,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - PollCount int32 `protobuf:"varint,8,opt,name=poll_count,json=pollCount" json:"poll_count,omitempty"` - TaskDefName string `protobuf:"bytes,9,opt,name=task_def_name,json=taskDefName" json:"task_def_name,omitempty"` - ScheduledTime int64 `protobuf:"varint,10,opt,name=scheduled_time,json=scheduledTime" json:"scheduled_time,omitempty"` - StartTime int64 `protobuf:"varint,11,opt,name=start_time,json=startTime" json:"start_time,omitempty"` - EndTime int64 `protobuf:"varint,12,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - UpdateTime int64 `protobuf:"varint,13,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` - StartDelayInSeconds int32 `protobuf:"varint,14,opt,name=start_delay_in_seconds,json=startDelayInSeconds" json:"start_delay_in_seconds,omitempty"` - RetriedTaskId string `protobuf:"bytes,15,opt,name=retried_task_id,json=retriedTaskId" json:"retried_task_id,omitempty"` - Retried bool `protobuf:"varint,16,opt,name=retried" json:"retried,omitempty"` - Executed bool `protobuf:"varint,17,opt,name=executed" json:"executed,omitempty"` - CallbackFromWorker bool `protobuf:"varint,18,opt,name=callback_from_worker,json=callbackFromWorker" json:"callback_from_worker,omitempty"` - ResponseTimeoutSeconds int32 `protobuf:"varint,19,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` - WorkflowInstanceId string `protobuf:"bytes,20,opt,name=workflow_instance_id,json=workflowInstanceId" json:"workflow_instance_id,omitempty"` - WorkflowType string `protobuf:"bytes,21,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` - TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,23,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` - CallbackAfterSeconds int64 `protobuf:"varint,24,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` - WorkerId string `protobuf:"bytes,25,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - OutputData map[string]*_struct.Value `protobuf:"bytes,26,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - WorkflowTask *WorkflowTask `protobuf:"bytes,27,opt,name=workflow_task,json=workflowTask" json:"workflow_task,omitempty"` - Domain string `protobuf:"bytes,28,opt,name=domain" json:"domain,omitempty"` - InputMessage *any.Any `protobuf:"bytes,29,opt,name=input_message,json=inputMessage" json:"input_message,omitempty"` - OutputMessage *any.Any `protobuf:"bytes,30,opt,name=output_message,json=outputMessage" json:"output_message,omitempty"` + TaskType string `protobuf:"bytes,1,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + Status Task_Status `protobuf:"varint,2,opt,name=status,proto3,enum=conductor.proto.Task_Status" json:"status,omitempty"` + InputData map[string]*_struct.Value `protobuf:"bytes,3,rep,name=input_data,json=inputData,proto3" json:"input_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + ReferenceTaskName string `protobuf:"bytes,4,opt,name=reference_task_name,json=referenceTaskName,proto3" json:"reference_task_name,omitempty"` + RetryCount int32 `protobuf:"varint,5,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` + Seq int32 `protobuf:"varint,6,opt,name=seq,proto3" json:"seq,omitempty"` + CorrelationId string `protobuf:"bytes,7,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + PollCount int32 `protobuf:"varint,8,opt,name=poll_count,json=pollCount,proto3" json:"poll_count,omitempty"` + TaskDefName string `protobuf:"bytes,9,opt,name=task_def_name,json=taskDefName,proto3" json:"task_def_name,omitempty"` + ScheduledTime int64 `protobuf:"varint,10,opt,name=scheduled_time,json=scheduledTime,proto3" json:"scheduled_time,omitempty"` + StartTime int64 `protobuf:"varint,11,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + EndTime int64 `protobuf:"varint,12,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + UpdateTime int64 `protobuf:"varint,13,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + StartDelayInSeconds int32 `protobuf:"varint,14,opt,name=start_delay_in_seconds,json=startDelayInSeconds,proto3" json:"start_delay_in_seconds,omitempty"` + RetriedTaskId string `protobuf:"bytes,15,opt,name=retried_task_id,json=retriedTaskId,proto3" json:"retried_task_id,omitempty"` + Retried bool `protobuf:"varint,16,opt,name=retried,proto3" json:"retried,omitempty"` + Executed bool `protobuf:"varint,17,opt,name=executed,proto3" json:"executed,omitempty"` + CallbackFromWorker bool `protobuf:"varint,18,opt,name=callback_from_worker,json=callbackFromWorker,proto3" json:"callback_from_worker,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,19,opt,name=response_timeout_seconds,json=responseTimeoutSeconds,proto3" json:"response_timeout_seconds,omitempty"` + WorkflowInstanceId string `protobuf:"bytes,20,opt,name=workflow_instance_id,json=workflowInstanceId,proto3" json:"workflow_instance_id,omitempty"` + WorkflowType string `protobuf:"bytes,21,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + TaskId string `protobuf:"bytes,22,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,23,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,24,opt,name=callback_after_seconds,json=callbackAfterSeconds,proto3" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,25,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,26,rep,name=output_data,json=outputData,proto3" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + WorkflowTask *WorkflowTask `protobuf:"bytes,27,opt,name=workflow_task,json=workflowTask,proto3" json:"workflow_task,omitempty"` + Domain string `protobuf:"bytes,28,opt,name=domain,proto3" json:"domain,omitempty"` + InputMessage *any.Any `protobuf:"bytes,29,opt,name=input_message,json=inputMessage,proto3" json:"input_message,omitempty"` + OutputMessage *any.Any `protobuf:"bytes,30,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` + RateLimitPerSecond int32 `protobuf:"varint,31,opt,name=rate_limit_per_second,json=rateLimitPerSecond,proto3" json:"rate_limit_per_second,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -107,7 +108,7 @@ func (m *Task) Reset() { *m = Task{} } func (m *Task) String() string { return proto.CompactTextString(m) } func (*Task) ProtoMessage() {} func (*Task) Descriptor() ([]byte, []int) { - return fileDescriptor_task_7843d3d2fd8c2dc8, []int{0} + return fileDescriptor_task_755b02a04c3b7e73, []int{0} } func (m *Task) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Task.Unmarshal(m, b) @@ -337,6 +338,13 @@ func (m *Task) GetOutputMessage() *any.Any { return nil } +func (m *Task) GetRateLimitPerSecond() int32 { + if m != nil { + return m.RateLimitPerSecond + } + return 0 +} + func init() { proto.RegisterType((*Task)(nil), "conductor.proto.Task") proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Task.InputDataEntry") @@ -344,70 +352,71 @@ func init() { proto.RegisterEnum("conductor.proto.Task_Status", Task_Status_name, Task_Status_value) } -func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_7843d3d2fd8c2dc8) } - -var fileDescriptor_task_7843d3d2fd8c2dc8 = []byte{ - // 980 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x6f, 0xdb, 0x36, - 0x14, 0x9d, 0xf3, 0xe1, 0x8f, 0xeb, 0xd8, 0x56, 0x99, 0xc4, 0x61, 0x9c, 0xa4, 0x35, 0xb2, 0x65, - 0xf0, 0xc3, 0x60, 0x17, 0x69, 0x31, 0x74, 0xdd, 0x93, 0x63, 0x2b, 0xab, 0xb0, 0x24, 0x0e, 0x64, - 0x67, 0xc1, 0xf6, 0x22, 0x30, 0x12, 0xed, 0x0a, 0x96, 0x48, 0x8f, 0xa2, 0xd6, 0xfa, 0xc7, 0xed, - 0x3f, 0xed, 0x27, 0x0c, 0x24, 0x25, 0x35, 0x4b, 0x8b, 0x3d, 0xf5, 0x8d, 0x3c, 0xe7, 0xdc, 0xe3, - 0x7b, 0xaf, 0x78, 0xaf, 0xc1, 0x8a, 0x79, 0x40, 0xa3, 0x81, 0x24, 0xc9, 0xb2, 0xbf, 0x12, 0x5c, - 0x72, 0xd4, 0xf2, 0x39, 0x0b, 0x52, 0x5f, 0x72, 0x61, 0x80, 0x0e, 0x36, 0x92, 0x0f, 0x5c, 0x2c, - 0xe7, 0x11, 0xff, 0xf0, 0x49, 0xda, 0x39, 0x5e, 0x70, 0xbe, 0x88, 0xe8, 0x40, 0xdf, 0x1e, 0xd2, - 0xf9, 0x20, 0x91, 0x22, 0xf5, 0x65, 0xc6, 0x1e, 0x3e, 0x65, 0x09, 0x5b, 0x1b, 0xea, 0xf4, 0x9f, - 0x1d, 0xd8, 0x9a, 0x91, 0x64, 0x89, 0x8e, 0xa0, 0xa6, 0xfc, 0x3c, 0xb9, 0x5e, 0x51, 0x5c, 0xea, - 0x96, 0x7a, 0x35, 0xb7, 0xaa, 0x80, 0xd9, 0x7a, 0x45, 0xd1, 0x6b, 0x28, 0x27, 0x92, 0xc8, 0x34, - 0xc1, 0x1b, 0xdd, 0x52, 0xaf, 0x79, 0x7e, 0xdc, 0x7f, 0x92, 0x5a, 0x5f, 0x79, 0xf4, 0xa7, 0x5a, - 0xe3, 0x66, 0x5a, 0x34, 0x02, 0x08, 0xd9, 0x2a, 0x95, 0x5e, 0x40, 0x24, 0xc1, 0x9b, 0xdd, 0xcd, - 0x5e, 0xfd, 0xfc, 0xbb, 0x2f, 0x47, 0x3a, 0x4a, 0x37, 0x26, 0x92, 0xd8, 0x4c, 0x8a, 0xb5, 0x5b, - 0x0b, 0xf3, 0x3b, 0xea, 0xc3, 0xae, 0xa0, 0x73, 0x2a, 0x28, 0xf3, 0xa9, 0xa7, 0x33, 0x64, 0x24, - 0xa6, 0x78, 0x4b, 0x67, 0xf8, 0xac, 0xa0, 0x94, 0xcb, 0x0d, 0x89, 0x29, 0x7a, 0x01, 0x75, 0x41, - 0xa5, 0x58, 0x7b, 0x3e, 0x4f, 0x99, 0xc4, 0xdb, 0xdd, 0x52, 0x6f, 0xdb, 0x05, 0x0d, 0x8d, 0x14, - 0x82, 0x2c, 0xd8, 0x4c, 0xe8, 0x9f, 0xb8, 0xac, 0x09, 0x75, 0x44, 0x67, 0xd0, 0xf4, 0xb9, 0x10, - 0x34, 0x22, 0x32, 0xe4, 0xcc, 0x0b, 0x03, 0x5c, 0xd1, 0xee, 0x8d, 0x47, 0xa8, 0x13, 0xa0, 0x13, - 0x80, 0x15, 0x8f, 0xa2, 0xcc, 0xb8, 0xaa, 0xe3, 0x6b, 0x0a, 0x31, 0xbe, 0xa7, 0xd0, 0xd0, 0xe9, - 0x05, 0x74, 0x6e, 0x52, 0xac, 0x69, 0x93, 0xba, 0x02, 0xc7, 0x74, 0xae, 0x93, 0x3b, 0x83, 0x66, - 0xe2, 0xbf, 0xa7, 0x41, 0x1a, 0xd1, 0xc0, 0x93, 0x61, 0x4c, 0x31, 0x74, 0x4b, 0xbd, 0x4d, 0xb7, - 0x51, 0xa0, 0xb3, 0x30, 0xa6, 0xea, 0x97, 0x12, 0x49, 0x84, 0x34, 0x92, 0xba, 0x96, 0xd4, 0x34, - 0xa2, 0xe9, 0x43, 0xa8, 0x52, 0x96, 0xc5, 0xef, 0x68, 0xb2, 0x42, 0x99, 0x89, 0x7c, 0x01, 0xf5, - 0x74, 0x15, 0x10, 0x49, 0x0d, 0xdb, 0xd0, 0x2c, 0x18, 0x48, 0x0b, 0x5e, 0x41, 0xdb, 0x58, 0x07, - 0x34, 0x22, 0x6b, 0x2f, 0x64, 0x5e, 0x42, 0xd5, 0x17, 0x49, 0x70, 0x53, 0x17, 0xb4, 0xab, 0xd9, - 0xb1, 0x22, 0x1d, 0x36, 0x35, 0x14, 0xfa, 0x1e, 0x5a, 0xaa, 0x81, 0xa1, 0x4a, 0x5a, 0x95, 0x18, - 0x06, 0xb8, 0x65, 0x3a, 0x94, 0xc1, 0xaa, 0xfb, 0x4e, 0x80, 0x30, 0x54, 0x32, 0x00, 0x5b, 0xdd, - 0x52, 0xaf, 0xea, 0xe6, 0x57, 0xd4, 0x81, 0x2a, 0xfd, 0x48, 0xfd, 0x54, 0xd2, 0x00, 0x3f, 0xd3, - 0x54, 0x71, 0x47, 0x2f, 0x61, 0xcf, 0x27, 0x51, 0xf4, 0x40, 0xfc, 0xa5, 0x37, 0x17, 0x3c, 0xf6, - 0xd4, 0xfb, 0xa6, 0x02, 0x23, 0xad, 0x43, 0x39, 0x77, 0x29, 0x78, 0x7c, 0xaf, 0x19, 0xf4, 0x06, - 0xb0, 0xa0, 0xc9, 0x8a, 0xb3, 0xc4, 0xd4, 0xc9, 0x53, 0x59, 0x94, 0xb1, 0xab, 0xcb, 0x68, 0xe7, - 0xfc, 0xcc, 0xd0, 0x79, 0x25, 0x2f, 0x61, 0x2f, 0x9f, 0x1e, 0x2f, 0x64, 0x89, 0x24, 0xea, 0x55, - 0x85, 0x01, 0xde, 0xd3, 0xe5, 0xa0, 0x9c, 0x73, 0x32, 0xca, 0x09, 0xd0, 0xb7, 0xd0, 0x28, 0x22, - 0xf4, 0x6c, 0xec, 0x6b, 0xe9, 0x4e, 0x0e, 0xea, 0xf9, 0x38, 0x80, 0x4a, 0xde, 0x98, 0xb6, 0xa6, - 0xcb, 0xd2, 0x74, 0xe4, 0x47, 0x38, 0x10, 0x94, 0x24, 0x9c, 0x79, 0x73, 0x2e, 0xbc, 0x90, 0xf9, - 0x3c, 0x5e, 0x45, 0x54, 0x3d, 0x28, 0x7c, 0xa0, 0x85, 0xfb, 0x86, 0xbe, 0xe4, 0xc2, 0x79, 0x44, - 0xa2, 0xd7, 0xd0, 0x2e, 0x7a, 0x42, 0xe6, 0x92, 0x8a, 0xa2, 0x3e, 0xac, 0x3f, 0x69, 0xd1, 0xb1, - 0xa1, 0x22, 0xf3, 0xea, 0x8e, 0xa0, 0x66, 0x7a, 0xa7, 0x12, 0x39, 0x34, 0x33, 0x6c, 0x00, 0x27, - 0x40, 0x97, 0x50, 0xe7, 0xa9, 0x2c, 0xc6, 0xb1, 0xa3, 0xc7, 0xf1, 0xec, 0xcb, 0xe3, 0x38, 0xd1, - 0xc2, 0x4f, 0xf3, 0x08, 0xbc, 0x00, 0xd0, 0xc5, 0xe3, 0x86, 0x90, 0x64, 0x89, 0x8f, 0xba, 0xa5, - 0x5e, 0xfd, 0xfc, 0xe4, 0x33, 0xa7, 0xfb, 0xbc, 0x43, 0x24, 0x59, 0x3e, 0xea, 0x97, 0x5a, 0x36, - 0x6d, 0x28, 0x07, 0x3c, 0x26, 0x21, 0xc3, 0xc7, 0xa6, 0x5d, 0xe6, 0x86, 0x7e, 0x82, 0x86, 0xd9, - 0x18, 0x31, 0x4d, 0x12, 0xb2, 0xa0, 0xf8, 0x44, 0x7b, 0xef, 0xf5, 0xcd, 0x02, 0xeb, 0xe7, 0x0b, - 0xac, 0x3f, 0x64, 0x6b, 0x77, 0x47, 0x4b, 0xaf, 0x8d, 0x12, 0xfd, 0x0c, 0xcd, 0xac, 0xbc, 0x3c, - 0xf6, 0xf9, 0xff, 0xc4, 0x36, 0x8c, 0x36, 0x0b, 0xee, 0xcc, 0xa0, 0xf9, 0xdf, 0x0d, 0xa4, 0xb6, - 0xc4, 0x92, 0xae, 0xb3, 0x45, 0xa8, 0x8e, 0xe8, 0x07, 0xd8, 0xfe, 0x8b, 0x44, 0x29, 0xd5, 0x2b, - 0xb0, 0x7e, 0xde, 0xfe, 0xcc, 0xf7, 0x37, 0xc5, 0xba, 0x46, 0xf4, 0x76, 0xe3, 0x4d, 0xa9, 0x73, - 0x07, 0xad, 0x27, 0x8d, 0xfc, 0x1a, 0xb6, 0xa7, 0x7f, 0x97, 0xa0, 0x6c, 0x36, 0x2d, 0x6a, 0x41, - 0xdd, 0xb9, 0xf1, 0x6e, 0xdd, 0xc9, 0x2f, 0xae, 0x3d, 0x9d, 0x5a, 0xdf, 0xa0, 0x1d, 0xa8, 0x8e, - 0x86, 0x37, 0x23, 0xfb, 0xca, 0x1e, 0x5b, 0x25, 0x04, 0x50, 0xbe, 0x1c, 0x3a, 0xea, 0xbc, 0x81, - 0x9e, 0x43, 0xc7, 0x9c, 0xbd, 0x7b, 0x67, 0xf6, 0xce, 0x9b, 0xd9, 0xee, 0xb5, 0x73, 0x33, 0xbc, - 0xf2, 0x6c, 0xd7, 0x9d, 0xb8, 0xd6, 0x26, 0x6a, 0x40, 0x6d, 0x34, 0xb9, 0xbe, 0xbd, 0xb2, 0x67, - 0xf6, 0xd8, 0xda, 0x42, 0x87, 0xb0, 0x5f, 0x5c, 0x4d, 0x84, 0x16, 0x4e, 0xad, 0x6d, 0xa5, 0x9c, - 0x8e, 0xde, 0xd9, 0xe3, 0x3b, 0x65, 0x5c, 0x56, 0xd7, 0x99, 0x73, 0x6d, 0x8f, 0xbd, 0xc9, 0xdd, - 0xcc, 0xaa, 0xa0, 0x5d, 0x68, 0xb9, 0xf6, 0x70, 0xfc, 0xbb, 0x77, 0x39, 0x71, 0x3d, 0xd7, 0x76, - 0xef, 0x6e, 0xac, 0x2a, 0xaa, 0x43, 0x65, 0xfa, 0xab, 0x73, 0x7b, 0x6b, 0x8f, 0xad, 0xda, 0x05, - 0x81, 0x23, 0x9f, 0xc7, 0x7d, 0x46, 0xe5, 0x3c, 0x0a, 0x3f, 0x3e, 0x7d, 0x36, 0x17, 0x65, 0xf5, - 0x42, 0x6e, 0x1f, 0xfe, 0x78, 0xbb, 0x08, 0xe5, 0xfb, 0xf4, 0xa1, 0xef, 0xf3, 0x78, 0x90, 0x69, - 0x07, 0x85, 0x76, 0xe0, 0x47, 0x21, 0x65, 0x72, 0xb0, 0xe0, 0x0b, 0xb1, 0xf2, 0x1f, 0xe1, 0xfa, - 0x1f, 0xf2, 0xa1, 0xac, 0xad, 0x5e, 0xfd, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x54, 0xf9, 0x20, 0xc1, - 0x54, 0x07, 0x00, 0x00, +func init() { proto.RegisterFile("model/task.proto", fileDescriptor_task_755b02a04c3b7e73) } + +var fileDescriptor_task_755b02a04c3b7e73 = []byte{ + // 1004 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x55, 0x5d, 0x73, 0xda, 0x46, + 0x14, 0x2d, 0xb1, 0xcd, 0xc7, 0xc5, 0x80, 0xb2, 0xb6, 0xf1, 0x1a, 0xdb, 0x31, 0xe3, 0xd6, 0x1d, + 0x1e, 0x3a, 0x90, 0x3a, 0x99, 0x4e, 0x9a, 0x3e, 0x61, 0x90, 0x1b, 0x4d, 0x6d, 0xc3, 0x08, 0x5c, + 0x4f, 0xfb, 0xb2, 0xb3, 0x96, 0x16, 0xa2, 0x41, 0xd2, 0xd2, 0xd5, 0xaa, 0x09, 0xbf, 0xa7, 0xbf, + 0xa3, 0xff, 0xad, 0xb3, 0xbb, 0x48, 0xa1, 0x4e, 0xa6, 0x4f, 0x7d, 0xdb, 0x3d, 0xe7, 0xdc, 0xcb, + 0xbd, 0x47, 0x7b, 0x2f, 0x60, 0x45, 0xdc, 0x67, 0x61, 0x4f, 0xd2, 0x64, 0xd1, 0x5d, 0x0a, 0x2e, + 0x39, 0x6a, 0x78, 0x3c, 0xf6, 0x53, 0x4f, 0x72, 0x61, 0x80, 0x16, 0x36, 0x92, 0x0f, 0x5c, 0x2c, + 0x66, 0x21, 0xff, 0xf0, 0x49, 0xda, 0x3a, 0x99, 0x73, 0x3e, 0x0f, 0x59, 0x4f, 0xdf, 0x1e, 0xd3, + 0x59, 0x2f, 0x91, 0x22, 0xf5, 0xe4, 0x9a, 0x3d, 0x7a, 0xca, 0xd2, 0x78, 0x65, 0xa8, 0xf3, 0xbf, + 0x6a, 0xb0, 0x3d, 0xa5, 0xc9, 0x02, 0x1d, 0x43, 0x45, 0xe5, 0x23, 0x72, 0xb5, 0x64, 0xb8, 0xd0, + 0x2e, 0x74, 0x2a, 0x6e, 0x59, 0x01, 0xd3, 0xd5, 0x92, 0xa1, 0xd7, 0x50, 0x4c, 0x24, 0x95, 0x69, + 0x82, 0x9f, 0xb5, 0x0b, 0x9d, 0xfa, 0xe5, 0x49, 0xf7, 0x49, 0x69, 0x5d, 0x95, 0xa3, 0x3b, 0xd1, + 0x1a, 0x77, 0xad, 0x45, 0x03, 0x80, 0x20, 0x5e, 0xa6, 0x92, 0xf8, 0x54, 0x52, 0xbc, 0xd5, 0xde, + 0xea, 0x54, 0x2f, 0xbf, 0xf9, 0x72, 0xa4, 0xa3, 0x74, 0x43, 0x2a, 0xa9, 0x1d, 0x4b, 0xb1, 0x72, + 0x2b, 0x41, 0x76, 0x47, 0x5d, 0xd8, 0x13, 0x6c, 0xc6, 0x04, 0x8b, 0x3d, 0x46, 0x74, 0x85, 0x31, + 0x8d, 0x18, 0xde, 0xd6, 0x15, 0x3e, 0xcf, 0x29, 0x95, 0xe5, 0x8e, 0x46, 0x0c, 0x9d, 0x41, 0x55, + 0x30, 0x29, 0x56, 0xc4, 0xe3, 0x69, 0x2c, 0xf1, 0x4e, 0xbb, 0xd0, 0xd9, 0x71, 0x41, 0x43, 0x03, + 0x85, 0x20, 0x0b, 0xb6, 0x12, 0xf6, 0x07, 0x2e, 0x6a, 0x42, 0x1d, 0xd1, 0x05, 0xd4, 0x3d, 0x2e, + 0x04, 0x0b, 0xa9, 0x0c, 0x78, 0x4c, 0x02, 0x1f, 0x97, 0x74, 0xf6, 0xda, 0x06, 0xea, 0xf8, 0xe8, + 0x14, 0x60, 0xc9, 0xc3, 0x70, 0x9d, 0xb8, 0xac, 0xe3, 0x2b, 0x0a, 0x31, 0x79, 0xcf, 0xa1, 0xa6, + 0xcb, 0xf3, 0xd9, 0xcc, 0x94, 0x58, 0xd1, 0x49, 0xaa, 0x0a, 0x1c, 0xb2, 0x99, 0x2e, 0xee, 0x02, + 0xea, 0x89, 0xf7, 0x9e, 0xf9, 0x69, 0xc8, 0x7c, 0x22, 0x83, 0x88, 0x61, 0x68, 0x17, 0x3a, 0x5b, + 0x6e, 0x2d, 0x47, 0xa7, 0x41, 0xc4, 0xd4, 0x2f, 0x25, 0x92, 0x0a, 0x69, 0x24, 0x55, 0x2d, 0xa9, + 0x68, 0x44, 0xd3, 0x47, 0x50, 0x66, 0xf1, 0x3a, 0x7e, 0x57, 0x93, 0x25, 0x16, 0x9b, 0xc8, 0x33, + 0xa8, 0xa6, 0x4b, 0x9f, 0x4a, 0x66, 0xd8, 0x9a, 0x66, 0xc1, 0x40, 0x5a, 0xf0, 0x0a, 0x9a, 0x26, + 0xb5, 0xcf, 0x42, 0xba, 0x22, 0x41, 0x4c, 0x12, 0xa6, 0xbe, 0x48, 0x82, 0xeb, 0xba, 0xa1, 0x3d, + 0xcd, 0x0e, 0x15, 0xe9, 0xc4, 0x13, 0x43, 0xa1, 0x6f, 0xa1, 0xa1, 0x0c, 0x0c, 0x54, 0xd1, 0xaa, + 0xc5, 0xc0, 0xc7, 0x0d, 0xe3, 0xd0, 0x1a, 0x56, 0xee, 0x3b, 0x3e, 0xc2, 0x50, 0x5a, 0x03, 0xd8, + 0x6a, 0x17, 0x3a, 0x65, 0x37, 0xbb, 0xa2, 0x16, 0x94, 0xd9, 0x47, 0xe6, 0xa5, 0x92, 0xf9, 0xf8, + 0xb9, 0xa6, 0xf2, 0x3b, 0x7a, 0x09, 0xfb, 0x1e, 0x0d, 0xc3, 0x47, 0xea, 0x2d, 0xc8, 0x4c, 0xf0, + 0x88, 0xa8, 0xf7, 0xcd, 0x04, 0x46, 0x5a, 0x87, 0x32, 0xee, 0x5a, 0xf0, 0xe8, 0x41, 0x33, 0xe8, + 0x0d, 0x60, 0xc1, 0x92, 0x25, 0x8f, 0x13, 0xd3, 0x27, 0x4f, 0x65, 0xde, 0xc6, 0x9e, 0x6e, 0xa3, + 0x99, 0xf1, 0x53, 0x43, 0x67, 0x9d, 0xbc, 0x84, 0xfd, 0x6c, 0x7a, 0x48, 0x10, 0x27, 0x92, 0xaa, + 0x57, 0x15, 0xf8, 0x78, 0x5f, 0xb7, 0x83, 0x32, 0xce, 0x59, 0x53, 0x8e, 0x8f, 0xbe, 0x86, 0x5a, + 0x1e, 0xa1, 0x67, 0xe3, 0x40, 0x4b, 0x77, 0x33, 0x50, 0xcf, 0xc7, 0x21, 0x94, 0x32, 0x63, 0x9a, + 0x9a, 0x2e, 0x4a, 0xe3, 0xc8, 0x0f, 0x70, 0x28, 0x18, 0x4d, 0x78, 0x4c, 0x66, 0x5c, 0x90, 0x20, + 0xf6, 0x78, 0xb4, 0x0c, 0x99, 0x7a, 0x50, 0xf8, 0x50, 0x0b, 0x0f, 0x0c, 0x7d, 0xcd, 0x85, 0xb3, + 0x41, 0xa2, 0xd7, 0xd0, 0xcc, 0x3d, 0xa1, 0x33, 0xc9, 0x44, 0xde, 0x1f, 0xd6, 0x9f, 0x34, 0x77, + 0xac, 0xaf, 0xc8, 0xac, 0xbb, 0x63, 0xa8, 0x18, 0xef, 0x54, 0x21, 0x47, 0x66, 0x86, 0x0d, 0xe0, + 0xf8, 0xe8, 0x1a, 0xaa, 0x3c, 0x95, 0xf9, 0x38, 0xb6, 0xf4, 0x38, 0x5e, 0x7c, 0x79, 0x1c, 0x47, + 0x5a, 0xf8, 0x69, 0x1e, 0x81, 0xe7, 0x00, 0xba, 0xda, 0x34, 0x84, 0x26, 0x0b, 0x7c, 0xdc, 0x2e, + 0x74, 0xaa, 0x97, 0xa7, 0x9f, 0x65, 0x7a, 0xc8, 0x1c, 0xa2, 0xc9, 0x62, 0xc3, 0x2f, 0xb5, 0x6c, + 0x9a, 0x50, 0xf4, 0x79, 0x44, 0x83, 0x18, 0x9f, 0x18, 0xbb, 0xcc, 0x0d, 0xfd, 0x08, 0x35, 0xb3, + 0x31, 0x22, 0x96, 0x24, 0x74, 0xce, 0xf0, 0xa9, 0xce, 0xbd, 0xdf, 0x35, 0x0b, 0xac, 0x9b, 0x2d, + 0xb0, 0x6e, 0x3f, 0x5e, 0xb9, 0xbb, 0x5a, 0x7a, 0x6b, 0x94, 0xe8, 0x27, 0xa8, 0xaf, 0xdb, 0xcb, + 0x62, 0x5f, 0xfc, 0x47, 0x6c, 0xcd, 0x68, 0xb3, 0xe0, 0xef, 0xe1, 0x40, 0xa8, 0xa1, 0x09, 0x83, + 0x28, 0x90, 0x64, 0x99, 0xdb, 0x8d, 0xcf, 0xf4, 0x6b, 0x42, 0x8a, 0xbc, 0x51, 0xdc, 0x38, 0x33, + 0xbb, 0x35, 0x85, 0xfa, 0xbf, 0x97, 0x96, 0x5a, 0x2c, 0x0b, 0xb6, 0x5a, 0xef, 0x4e, 0x75, 0x44, + 0xdf, 0xc1, 0xce, 0x9f, 0x34, 0x4c, 0x99, 0xde, 0x9a, 0xd5, 0xcb, 0xe6, 0x67, 0xa5, 0xfc, 0xaa, + 0x58, 0xd7, 0x88, 0xde, 0x3e, 0x7b, 0x53, 0x68, 0xdd, 0x43, 0xe3, 0x89, 0xf7, 0xff, 0x47, 0xda, + 0xf3, 0xbf, 0x0b, 0x50, 0x34, 0xcb, 0x19, 0x35, 0xa0, 0xea, 0xdc, 0x91, 0xb1, 0x3b, 0xfa, 0xd9, + 0xb5, 0x27, 0x13, 0xeb, 0x2b, 0xb4, 0x0b, 0xe5, 0x41, 0xff, 0x6e, 0x60, 0xdf, 0xd8, 0x43, 0xab, + 0x80, 0x00, 0x8a, 0xd7, 0x7d, 0x47, 0x9d, 0x9f, 0xa1, 0x17, 0xd0, 0x32, 0x67, 0xf2, 0xe0, 0x4c, + 0xdf, 0x91, 0xa9, 0xed, 0xde, 0x3a, 0x77, 0xfd, 0x1b, 0x62, 0xbb, 0xee, 0xc8, 0xb5, 0xb6, 0x50, + 0x0d, 0x2a, 0x83, 0xd1, 0xed, 0xf8, 0xc6, 0x9e, 0xda, 0x43, 0x6b, 0x1b, 0x1d, 0xc1, 0x41, 0x7e, + 0x35, 0x11, 0x5a, 0x38, 0xb1, 0x76, 0x94, 0x72, 0x32, 0x78, 0x67, 0x0f, 0xef, 0x55, 0xe2, 0xa2, + 0xba, 0x4e, 0x9d, 0x5b, 0x7b, 0x48, 0x46, 0xf7, 0x53, 0xab, 0x84, 0xf6, 0xa0, 0xe1, 0xda, 0xfd, + 0xe1, 0x6f, 0xe4, 0x7a, 0xe4, 0x12, 0xd7, 0x76, 0xef, 0xef, 0xac, 0x32, 0xaa, 0x42, 0x69, 0xf2, + 0x8b, 0x33, 0x1e, 0xdb, 0x43, 0xab, 0x72, 0x45, 0xe1, 0xd8, 0xe3, 0x51, 0x37, 0x66, 0x72, 0x16, + 0x06, 0x1f, 0x9f, 0xbe, 0xb4, 0xab, 0xa2, 0x7a, 0x54, 0xe3, 0xc7, 0xdf, 0xdf, 0xce, 0x03, 0xf9, + 0x3e, 0x7d, 0xec, 0x7a, 0x3c, 0xea, 0xad, 0xb5, 0xbd, 0x5c, 0xdb, 0xf3, 0xc2, 0x80, 0xc5, 0xb2, + 0x37, 0xe7, 0x73, 0xb1, 0xf4, 0x36, 0x70, 0xfd, 0xa7, 0xfa, 0x58, 0xd4, 0xa9, 0x5e, 0xfd, 0x13, + 0x00, 0x00, 0xff, 0xff, 0xe9, 0xb6, 0x2c, 0x87, 0x87, 0x07, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/taskdef.pb.go b/client/gogrpc/conductor/model/taskdef.pb.go index 8b07c0eb1e..8113caeefb 100644 --- a/client/gogrpc/conductor/model/taskdef.pb.go +++ b/client/gogrpc/conductor/model/taskdef.pb.go @@ -39,7 +39,7 @@ func (x TaskDef_RetryLogic) String() string { return proto.EnumName(TaskDef_RetryLogic_name, int32(x)) } func (TaskDef_RetryLogic) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_34514f3248f44dc2, []int{0, 0} + return fileDescriptor_taskdef_3520607b0d645224, []int{0, 0} } type TaskDef_TimeoutPolicy int32 @@ -65,22 +65,23 @@ func (x TaskDef_TimeoutPolicy) String() string { return proto.EnumName(TaskDef_TimeoutPolicy_name, int32(x)) } func (TaskDef_TimeoutPolicy) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskdef_34514f3248f44dc2, []int{0, 1} + return fileDescriptor_taskdef_3520607b0d645224, []int{0, 1} } type TaskDef struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` - RetryCount int32 `protobuf:"varint,3,opt,name=retry_count,json=retryCount" json:"retry_count,omitempty"` - TimeoutSeconds int64 `protobuf:"varint,4,opt,name=timeout_seconds,json=timeoutSeconds" json:"timeout_seconds,omitempty"` - InputKeys []string `protobuf:"bytes,5,rep,name=input_keys,json=inputKeys" json:"input_keys,omitempty"` - OutputKeys []string `protobuf:"bytes,6,rep,name=output_keys,json=outputKeys" json:"output_keys,omitempty"` - TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,enum=conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` - RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,enum=conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` - RetryDelaySeconds int32 `protobuf:"varint,9,opt,name=retry_delay_seconds,json=retryDelaySeconds" json:"retry_delay_seconds,omitempty"` - ResponseTimeoutSeconds int32 `protobuf:"varint,10,opt,name=response_timeout_seconds,json=responseTimeoutSeconds" json:"response_timeout_seconds,omitempty"` - ConcurrentExecLimit int32 `protobuf:"varint,11,opt,name=concurrent_exec_limit,json=concurrentExecLimit" json:"concurrent_exec_limit,omitempty"` - InputTemplate map[string]*_struct.Value `protobuf:"bytes,12,rep,name=input_template,json=inputTemplate" json:"input_template,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + RetryCount int32 `protobuf:"varint,3,opt,name=retry_count,json=retryCount,proto3" json:"retry_count,omitempty"` + TimeoutSeconds int64 `protobuf:"varint,4,opt,name=timeout_seconds,json=timeoutSeconds,proto3" json:"timeout_seconds,omitempty"` + InputKeys []string `protobuf:"bytes,5,rep,name=input_keys,json=inputKeys,proto3" json:"input_keys,omitempty"` + OutputKeys []string `protobuf:"bytes,6,rep,name=output_keys,json=outputKeys,proto3" json:"output_keys,omitempty"` + TimeoutPolicy TaskDef_TimeoutPolicy `protobuf:"varint,7,opt,name=timeout_policy,json=timeoutPolicy,proto3,enum=conductor.proto.TaskDef_TimeoutPolicy" json:"timeout_policy,omitempty"` + RetryLogic TaskDef_RetryLogic `protobuf:"varint,8,opt,name=retry_logic,json=retryLogic,proto3,enum=conductor.proto.TaskDef_RetryLogic" json:"retry_logic,omitempty"` + RetryDelaySeconds int32 `protobuf:"varint,9,opt,name=retry_delay_seconds,json=retryDelaySeconds,proto3" json:"retry_delay_seconds,omitempty"` + ResponseTimeoutSeconds int32 `protobuf:"varint,10,opt,name=response_timeout_seconds,json=responseTimeoutSeconds,proto3" json:"response_timeout_seconds,omitempty"` + ConcurrentExecLimit int32 `protobuf:"varint,11,opt,name=concurrent_exec_limit,json=concurrentExecLimit,proto3" json:"concurrent_exec_limit,omitempty"` + InputTemplate map[string]*_struct.Value `protobuf:"bytes,12,rep,name=input_template,json=inputTemplate,proto3" json:"input_template,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + RateLimitPerSecond int32 `protobuf:"varint,13,opt,name=rate_limit_per_second,json=rateLimitPerSecond,proto3" json:"rate_limit_per_second,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -90,7 +91,7 @@ func (m *TaskDef) Reset() { *m = TaskDef{} } func (m *TaskDef) String() string { return proto.CompactTextString(m) } func (*TaskDef) ProtoMessage() {} func (*TaskDef) Descriptor() ([]byte, []int) { - return fileDescriptor_taskdef_34514f3248f44dc2, []int{0} + return fileDescriptor_taskdef_3520607b0d645224, []int{0} } func (m *TaskDef) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskDef.Unmarshal(m, b) @@ -194,6 +195,13 @@ func (m *TaskDef) GetInputTemplate() map[string]*_struct.Value { return nil } +func (m *TaskDef) GetRateLimitPerSecond() int32 { + if m != nil { + return m.RateLimitPerSecond + } + return 0 +} + func init() { proto.RegisterType((*TaskDef)(nil), "conductor.proto.TaskDef") proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.TaskDef.InputTemplateEntry") @@ -201,44 +209,46 @@ func init() { proto.RegisterEnum("conductor.proto.TaskDef_TimeoutPolicy", TaskDef_TimeoutPolicy_name, TaskDef_TimeoutPolicy_value) } -func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_34514f3248f44dc2) } - -var fileDescriptor_taskdef_34514f3248f44dc2 = []byte{ - // 568 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x93, 0x61, 0x4f, 0x9b, 0x5e, - 0x14, 0xc6, 0xc5, 0x5a, 0xfd, 0xf7, 0xf4, 0x6f, 0xed, 0x6e, 0x33, 0x47, 0xdc, 0x96, 0x11, 0x97, - 0x6c, 0x24, 0x5b, 0x60, 0xe9, 0xde, 0x18, 0xf7, 0x4a, 0x2d, 0x4d, 0x1a, 0xab, 0x35, 0x77, 0x6c, - 0xd3, 0xbd, 0x21, 0xf4, 0x72, 0xca, 0x6e, 0x0a, 0x5c, 0x02, 0x97, 0x45, 0x3e, 0xca, 0xbe, 0xed, - 0xc2, 0x85, 0x6a, 0x75, 0xf1, 0xdd, 0xbd, 0xcf, 0xf3, 0x3b, 0x07, 0xce, 0xc3, 0x01, 0x06, 0xb1, - 0x08, 0x30, 0xb2, 0xa5, 0x9f, 0x2f, 0x03, 0x5c, 0x58, 0x69, 0x26, 0xa4, 0x20, 0x7b, 0x4c, 0x24, - 0x41, 0xc1, 0xa4, 0xc8, 0x6a, 0xe1, 0xe0, 0x55, 0x28, 0x44, 0x18, 0xa1, 0xad, 0x6e, 0xf3, 0x62, - 0x61, 0xe7, 0x32, 0x2b, 0x98, 0xac, 0xdd, 0xc3, 0x3f, 0xdb, 0xb0, 0xe3, 0xfa, 0xf9, 0x72, 0x84, - 0x0b, 0x42, 0x60, 0x2b, 0xf1, 0x63, 0xd4, 0x35, 0x43, 0x33, 0x3b, 0x54, 0x9d, 0x89, 0x01, 0xdd, - 0x00, 0x73, 0x96, 0xf1, 0x54, 0x72, 0x91, 0xe8, 0x9b, 0xca, 0x5a, 0x97, 0xc8, 0x1b, 0xe8, 0x66, - 0x28, 0xb3, 0xd2, 0x63, 0xa2, 0x48, 0xa4, 0xde, 0x32, 0x34, 0xb3, 0x4d, 0x41, 0x49, 0x67, 0x95, - 0x42, 0xde, 0xc3, 0x9e, 0xe4, 0x31, 0x8a, 0x42, 0x7a, 0x39, 0x56, 0x6f, 0x97, 0xeb, 0x5b, 0x86, - 0x66, 0xb6, 0x68, 0xaf, 0x91, 0xbf, 0xd6, 0x2a, 0x79, 0x0d, 0xc0, 0x93, 0xb4, 0x90, 0xde, 0x12, - 0xcb, 0x5c, 0x6f, 0x1b, 0x2d, 0xb3, 0x43, 0x3b, 0x4a, 0x39, 0xc7, 0x32, 0xaf, 0x1e, 0x24, 0x0a, - 0x79, 0xe7, 0x6f, 0x2b, 0x1f, 0x6a, 0x49, 0x01, 0x17, 0xb0, 0xea, 0xe8, 0xa5, 0x22, 0xe2, 0xac, - 0xd4, 0x77, 0x0c, 0xcd, 0xec, 0x0d, 0xdf, 0x59, 0x8f, 0x32, 0xb1, 0x9a, 0x89, 0x2d, 0xb7, 0xc6, - 0xaf, 0x14, 0x4d, 0x77, 0xe5, 0xfa, 0x95, 0x8c, 0x56, 0x83, 0x45, 0x22, 0xe4, 0x4c, 0xff, 0x4f, - 0xf5, 0x7a, 0xfb, 0x64, 0x2f, 0x5a, 0xb1, 0xd3, 0x0a, 0x6d, 0xa6, 0x57, 0x67, 0x62, 0xc1, 0xa0, - 0xee, 0x12, 0x60, 0xe4, 0x97, 0x77, 0x09, 0x74, 0x54, 0x4c, 0xcf, 0x94, 0x35, 0xaa, 0x9c, 0x55, - 0x08, 0x47, 0xa0, 0x67, 0x98, 0xa7, 0x22, 0xc9, 0xd1, 0x7b, 0x1c, 0x1b, 0xa8, 0xa2, 0xfd, 0x95, - 0xef, 0x3e, 0x8c, 0x6f, 0x08, 0xcf, 0x99, 0x48, 0x58, 0x91, 0x65, 0x98, 0x48, 0x0f, 0x6f, 0x91, - 0x79, 0x11, 0x8f, 0xb9, 0xd4, 0xbb, 0xaa, 0x6c, 0x70, 0x6f, 0x3a, 0xb7, 0xc8, 0xa6, 0x95, 0x45, - 0x28, 0xf4, 0xea, 0xc8, 0x25, 0xc6, 0x69, 0xe4, 0x4b, 0xd4, 0xff, 0x37, 0x5a, 0x66, 0x77, 0xf8, - 0xe1, 0xc9, 0x31, 0x27, 0x15, 0xee, 0x36, 0xb4, 0x93, 0xc8, 0xac, 0xa4, 0xbb, 0x7c, 0x5d, 0x3b, - 0xb8, 0x06, 0xf2, 0x2f, 0x44, 0xfa, 0xd0, 0x5a, 0x62, 0xd9, 0xec, 0x56, 0x75, 0x24, 0x1f, 0xa1, - 0xfd, 0xdb, 0x8f, 0x0a, 0x54, 0x4b, 0xd5, 0x1d, 0xee, 0x5b, 0xf5, 0xa2, 0x5a, 0xab, 0x45, 0xb5, - 0xbe, 0x57, 0x2e, 0xad, 0xa1, 0xe3, 0xcd, 0x23, 0xed, 0xf0, 0x13, 0xc0, 0x7d, 0xca, 0xa4, 0x03, - 0xed, 0xf1, 0xe4, 0xda, 0x19, 0xf5, 0x37, 0xc8, 0x0b, 0x18, 0x38, 0xd7, 0x57, 0xb3, 0x4b, 0xe7, - 0xd2, 0x9d, 0x9c, 0x4c, 0xbd, 0xd3, 0x93, 0xb3, 0xf3, 0xd9, 0x78, 0xdc, 0xd7, 0x0e, 0xbf, 0xc0, - 0xee, 0x83, 0x6f, 0x5c, 0x15, 0x51, 0xc7, 0xa5, 0x37, 0xfd, 0x0d, 0xb2, 0x07, 0x5d, 0x77, 0x72, - 0xe1, 0x78, 0xb3, 0x6f, 0xae, 0xf7, 0x63, 0xdc, 0xd7, 0x48, 0x0f, 0xe0, 0x64, 0xea, 0x50, 0xd7, - 0x9b, 0x5d, 0x4e, 0x6f, 0xfa, 0x9b, 0xa7, 0x01, 0xbc, 0x64, 0x22, 0xb6, 0x12, 0x94, 0x8b, 0x88, - 0xdf, 0x3e, 0x4e, 0xe4, 0xb4, 0xd3, 0x44, 0x72, 0x35, 0xff, 0x79, 0x1c, 0x72, 0xf9, 0xab, 0x98, - 0x5b, 0x4c, 0xc4, 0x76, 0x83, 0xdb, 0x77, 0xb8, 0xcd, 0x22, 0x8e, 0x89, 0xb4, 0x43, 0x11, 0x66, - 0x29, 0x5b, 0xd3, 0xd5, 0xef, 0x3b, 0xdf, 0x56, 0xdd, 0x3e, 0xff, 0x0d, 0x00, 0x00, 0xff, 0xff, - 0xb1, 0xb3, 0xea, 0x13, 0xce, 0x03, 0x00, 0x00, +func init() { proto.RegisterFile("model/taskdef.proto", fileDescriptor_taskdef_3520607b0d645224) } + +var fileDescriptor_taskdef_3520607b0d645224 = []byte{ + // 593 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x53, 0x51, 0x6f, 0xd3, 0x4c, + 0x10, 0xac, 0x9b, 0xa6, 0xfd, 0xb2, 0xf9, 0x92, 0x86, 0x8b, 0x5a, 0xac, 0x02, 0xc2, 0x2a, 0x12, + 0x44, 0x02, 0xd9, 0x10, 0x5e, 0xaa, 0xf2, 0xd4, 0x36, 0x8e, 0x14, 0x35, 0x6d, 0x22, 0x63, 0xa0, + 0xe5, 0xc5, 0x72, 0x2e, 0x1b, 0x63, 0xc5, 0xf6, 0x59, 0xe7, 0x33, 0xaa, 0xff, 0x23, 0x3f, 0x0a, + 0xdd, 0xd9, 0x69, 0xd3, 0xa0, 0xbe, 0xdd, 0xcd, 0xcc, 0xcd, 0xee, 0x8e, 0xd7, 0xd0, 0x8d, 0xd9, + 0x1c, 0x23, 0x4b, 0xf8, 0xd9, 0x72, 0x8e, 0x0b, 0x33, 0xe5, 0x4c, 0x30, 0xb2, 0x4f, 0x59, 0x32, + 0xcf, 0xa9, 0x60, 0xbc, 0x04, 0x8e, 0x5e, 0x06, 0x8c, 0x05, 0x11, 0x5a, 0xea, 0x36, 0xcb, 0x17, + 0x56, 0x26, 0x78, 0x4e, 0x45, 0xc9, 0x1e, 0xff, 0xd9, 0x85, 0x3d, 0xd7, 0xcf, 0x96, 0x03, 0x5c, + 0x10, 0x02, 0x3b, 0x89, 0x1f, 0xa3, 0xae, 0x19, 0x5a, 0xaf, 0xe1, 0xa8, 0x33, 0x31, 0xa0, 0x39, + 0xc7, 0x8c, 0xf2, 0x30, 0x15, 0x21, 0x4b, 0xf4, 0x6d, 0x45, 0xad, 0x43, 0xe4, 0x35, 0x34, 0x39, + 0x0a, 0x5e, 0x78, 0x94, 0xe5, 0x89, 0xd0, 0x6b, 0x86, 0xd6, 0xab, 0x3b, 0xa0, 0xa0, 0x0b, 0x89, + 0x90, 0x77, 0xb0, 0x2f, 0xc2, 0x18, 0x59, 0x2e, 0xbc, 0x0c, 0x65, 0x77, 0x99, 0xbe, 0x63, 0x68, + 0xbd, 0x9a, 0xd3, 0xae, 0xe0, 0xaf, 0x25, 0x4a, 0x5e, 0x01, 0x84, 0x49, 0x9a, 0x0b, 0x6f, 0x89, + 0x45, 0xa6, 0xd7, 0x8d, 0x5a, 0xaf, 0xe1, 0x34, 0x14, 0x72, 0x89, 0x45, 0x26, 0x0b, 0xb1, 0x5c, + 0xdc, 0xf3, 0xbb, 0x8a, 0x87, 0x12, 0x52, 0x82, 0x2b, 0x58, 0x39, 0x7a, 0x29, 0x8b, 0x42, 0x5a, + 0xe8, 0x7b, 0x86, 0xd6, 0x6b, 0xf7, 0xdf, 0x9a, 0x1b, 0x99, 0x98, 0xd5, 0xc4, 0xa6, 0x5b, 0xca, + 0xa7, 0x4a, 0xed, 0xb4, 0xc4, 0xfa, 0x95, 0x0c, 0x56, 0x83, 0x45, 0x2c, 0x08, 0xa9, 0xfe, 0x9f, + 0xf2, 0x7a, 0xf3, 0xa4, 0x97, 0x23, 0xb5, 0x63, 0x29, 0xad, 0xa6, 0x57, 0x67, 0x62, 0x42, 0xb7, + 0x74, 0x99, 0x63, 0xe4, 0x17, 0xf7, 0x09, 0x34, 0x54, 0x4c, 0xcf, 0x14, 0x35, 0x90, 0xcc, 0x2a, + 0x84, 0x13, 0xd0, 0x39, 0x66, 0x29, 0x4b, 0x32, 0xf4, 0x36, 0x63, 0x03, 0xf5, 0xe8, 0x70, 0xc5, + 0xbb, 0x8f, 0xe3, 0xeb, 0xc3, 0x01, 0x65, 0x09, 0xcd, 0x39, 0xc7, 0x44, 0x78, 0x78, 0x87, 0xd4, + 0x8b, 0xc2, 0x38, 0x14, 0x7a, 0x53, 0x3d, 0xeb, 0x3e, 0x90, 0xf6, 0x1d, 0xd2, 0xb1, 0xa4, 0x88, + 0x03, 0xed, 0x32, 0x72, 0x81, 0x71, 0x1a, 0xf9, 0x02, 0xf5, 0xff, 0x8d, 0x5a, 0xaf, 0xd9, 0x7f, + 0xff, 0xe4, 0x98, 0x23, 0x29, 0x77, 0x2b, 0xb5, 0x9d, 0x08, 0x5e, 0x38, 0xad, 0x70, 0x1d, 0x23, + 0x9f, 0xe0, 0x80, 0xfb, 0x02, 0xcb, 0xe2, 0x5e, 0x8a, 0xbc, 0xea, 0x5f, 0x6f, 0xa9, 0x3e, 0x88, + 0x24, 0x55, 0xf5, 0x29, 0xf2, 0xb2, 0xf7, 0xa3, 0x1b, 0x20, 0xff, 0xfa, 0x92, 0x0e, 0xd4, 0x96, + 0x58, 0x54, 0xeb, 0x28, 0x8f, 0xe4, 0x03, 0xd4, 0x7f, 0xfb, 0x51, 0x8e, 0x6a, 0x0f, 0x9b, 0xfd, + 0x43, 0xb3, 0xdc, 0x6d, 0x73, 0xb5, 0xdb, 0xe6, 0x77, 0xc9, 0x3a, 0xa5, 0xe8, 0x74, 0xfb, 0x44, + 0x3b, 0xfe, 0x08, 0xf0, 0xf0, 0x61, 0x48, 0x03, 0xea, 0xc3, 0xd1, 0x8d, 0x3d, 0xe8, 0x6c, 0x91, + 0xe7, 0xd0, 0xb5, 0x6f, 0xa6, 0x93, 0x6b, 0xfb, 0xda, 0x1d, 0x9d, 0x8d, 0xbd, 0xf3, 0xb3, 0x8b, + 0xcb, 0xc9, 0x70, 0xd8, 0xd1, 0x8e, 0xbf, 0x40, 0xeb, 0xd1, 0x5a, 0xc8, 0x47, 0x8e, 0xed, 0x3a, + 0xb7, 0x9d, 0x2d, 0xb2, 0x0f, 0x4d, 0x77, 0x74, 0x65, 0x7b, 0x93, 0x6f, 0xae, 0xf7, 0x63, 0xd8, + 0xd1, 0x48, 0x1b, 0xe0, 0x6c, 0x6c, 0x3b, 0xae, 0x37, 0xb9, 0x1e, 0xdf, 0x76, 0xb6, 0xcf, 0xe7, + 0xf0, 0x82, 0xb2, 0xd8, 0x4c, 0x50, 0x2c, 0xa2, 0xf0, 0x6e, 0x33, 0xc4, 0xf3, 0x46, 0x95, 0xe2, + 0x74, 0xf6, 0xf3, 0x34, 0x08, 0xc5, 0xaf, 0x7c, 0x66, 0x52, 0x16, 0x5b, 0x95, 0xdc, 0xba, 0x97, + 0x5b, 0x34, 0x0a, 0x31, 0x11, 0x56, 0xc0, 0x02, 0x9e, 0xd2, 0x35, 0x5c, 0xfd, 0xf1, 0xb3, 0x5d, + 0xe5, 0xf6, 0xf9, 0x6f, 0x00, 0x00, 0x00, 0xff, 0xff, 0xc2, 0xd8, 0x2b, 0x35, 0x01, 0x04, 0x00, + 0x00, } diff --git a/client/gogrpc/conductor/model/taskexeclog.pb.go b/client/gogrpc/conductor/model/taskexeclog.pb.go index d62a8be72b..6c65dd502e 100644 --- a/client/gogrpc/conductor/model/taskexeclog.pb.go +++ b/client/gogrpc/conductor/model/taskexeclog.pb.go @@ -19,9 +19,9 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type TaskExecLog struct { - Log string `protobuf:"bytes,1,opt,name=log" json:"log,omitempty"` - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - CreatedTime int64 `protobuf:"varint,3,opt,name=created_time,json=createdTime" json:"created_time,omitempty"` + Log string `protobuf:"bytes,1,opt,name=log,proto3" json:"log,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + CreatedTime int64 `protobuf:"varint,3,opt,name=created_time,json=createdTime,proto3" json:"created_time,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -31,7 +31,7 @@ func (m *TaskExecLog) Reset() { *m = TaskExecLog{} } func (m *TaskExecLog) String() string { return proto.CompactTextString(m) } func (*TaskExecLog) ProtoMessage() {} func (*TaskExecLog) Descriptor() ([]byte, []int) { - return fileDescriptor_taskexeclog_e9c8274b44d54689, []int{0} + return fileDescriptor_taskexeclog_31ce5708c84ca255, []int{0} } func (m *TaskExecLog) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskExecLog.Unmarshal(m, b) @@ -77,10 +77,10 @@ func init() { } func init() { - proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_e9c8274b44d54689) + proto.RegisterFile("model/taskexeclog.proto", fileDescriptor_taskexeclog_31ce5708c84ca255) } -var fileDescriptor_taskexeclog_e9c8274b44d54689 = []byte{ +var fileDescriptor_taskexeclog_31ce5708c84ca255 = []byte{ // 205 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x8f, 0x31, 0x4b, 0xc4, 0x40, 0x10, 0x85, 0x89, 0x81, 0x13, 0xf7, 0x14, 0x65, 0x9b, 0x0b, 0xd8, 0x9c, 0x56, 0x57, 0xed, 0x16, diff --git a/client/gogrpc/conductor/model/taskresult.pb.go b/client/gogrpc/conductor/model/taskresult.pb.go index 25846d54fa..8e90a79e56 100644 --- a/client/gogrpc/conductor/model/taskresult.pb.go +++ b/client/gogrpc/conductor/model/taskresult.pb.go @@ -49,18 +49,18 @@ func (x TaskResult_Status) String() string { return proto.EnumName(TaskResult_Status_name, int32(x)) } func (TaskResult_Status) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0, 0} + return fileDescriptor_taskresult_50ab9135b69785bc, []int{0, 0} } type TaskResult struct { - WorkflowInstanceId string `protobuf:"bytes,1,opt,name=workflow_instance_id,json=workflowInstanceId" json:"workflow_instance_id,omitempty"` - TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId" json:"task_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,3,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` - CallbackAfterSeconds int64 `protobuf:"varint,4,opt,name=callback_after_seconds,json=callbackAfterSeconds" json:"callback_after_seconds,omitempty"` - WorkerId string `protobuf:"bytes,5,opt,name=worker_id,json=workerId" json:"worker_id,omitempty"` - Status TaskResult_Status `protobuf:"varint,6,opt,name=status,enum=conductor.proto.TaskResult_Status" json:"status,omitempty"` - OutputData map[string]*_struct.Value `protobuf:"bytes,7,rep,name=output_data,json=outputData" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - OutputMessage *any.Any `protobuf:"bytes,8,opt,name=output_message,json=outputMessage" json:"output_message,omitempty"` + WorkflowInstanceId string `protobuf:"bytes,1,opt,name=workflow_instance_id,json=workflowInstanceId,proto3" json:"workflow_instance_id,omitempty"` + TaskId string `protobuf:"bytes,2,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,3,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + CallbackAfterSeconds int64 `protobuf:"varint,4,opt,name=callback_after_seconds,json=callbackAfterSeconds,proto3" json:"callback_after_seconds,omitempty"` + WorkerId string `protobuf:"bytes,5,opt,name=worker_id,json=workerId,proto3" json:"worker_id,omitempty"` + Status TaskResult_Status `protobuf:"varint,6,opt,name=status,proto3,enum=conductor.proto.TaskResult_Status" json:"status,omitempty"` + OutputData map[string]*_struct.Value `protobuf:"bytes,7,rep,name=output_data,json=outputData,proto3" json:"output_data,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + OutputMessage *any.Any `protobuf:"bytes,8,opt,name=output_message,json=outputMessage,proto3" json:"output_message,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -70,7 +70,7 @@ func (m *TaskResult) Reset() { *m = TaskResult{} } func (m *TaskResult) String() string { return proto.CompactTextString(m) } func (*TaskResult) ProtoMessage() {} func (*TaskResult) Descriptor() ([]byte, []int) { - return fileDescriptor_taskresult_ccaec941f8ac2f31, []int{0} + return fileDescriptor_taskresult_50ab9135b69785bc, []int{0} } func (m *TaskResult) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskResult.Unmarshal(m, b) @@ -152,9 +152,9 @@ func init() { proto.RegisterEnum("conductor.proto.TaskResult_Status", TaskResult_Status_name, TaskResult_Status_value) } -func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_ccaec941f8ac2f31) } +func init() { proto.RegisterFile("model/taskresult.proto", fileDescriptor_taskresult_50ab9135b69785bc) } -var fileDescriptor_taskresult_ccaec941f8ac2f31 = []byte{ +var fileDescriptor_taskresult_50ab9135b69785bc = []byte{ // 517 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x7c, 0x93, 0xdf, 0x6e, 0xda, 0x30, 0x14, 0xc6, 0x17, 0xa0, 0x69, 0x39, 0xac, 0x05, 0x59, 0x8c, 0x66, 0x74, 0x9a, 0x10, 0x57, 0x48, diff --git a/client/gogrpc/conductor/model/tasksummary.pb.go b/client/gogrpc/conductor/model/tasksummary.pb.go index abdeaab07e..94078ec7bf 100644 --- a/client/gogrpc/conductor/model/tasksummary.pb.go +++ b/client/gogrpc/conductor/model/tasksummary.pb.go @@ -19,22 +19,22 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type TaskSummary struct { - WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - WorkflowType string `protobuf:"bytes,2,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` - CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - ScheduledTime string `protobuf:"bytes,4,opt,name=scheduled_time,json=scheduledTime" json:"scheduled_time,omitempty"` - StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime" json:"start_time,omitempty"` - UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` - EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - Status Task_Status `protobuf:"varint,8,opt,name=status,enum=conductor.proto.Task_Status" json:"status,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,9,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` - ExecutionTime int64 `protobuf:"varint,10,opt,name=execution_time,json=executionTime" json:"execution_time,omitempty"` - QueueWaitTime int64 `protobuf:"varint,11,opt,name=queue_wait_time,json=queueWaitTime" json:"queue_wait_time,omitempty"` - TaskDefName string `protobuf:"bytes,12,opt,name=task_def_name,json=taskDefName" json:"task_def_name,omitempty"` - TaskType string `protobuf:"bytes,13,opt,name=task_type,json=taskType" json:"task_type,omitempty"` - Input string `protobuf:"bytes,14,opt,name=input" json:"input,omitempty"` - Output string `protobuf:"bytes,15,opt,name=output" json:"output,omitempty"` - TaskId string `protobuf:"bytes,16,opt,name=task_id,json=taskId" json:"task_id,omitempty"` + WorkflowId string `protobuf:"bytes,1,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + WorkflowType string `protobuf:"bytes,2,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + CorrelationId string `protobuf:"bytes,3,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + ScheduledTime string `protobuf:"bytes,4,opt,name=scheduled_time,json=scheduledTime,proto3" json:"scheduled_time,omitempty"` + StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + Status Task_Status `protobuf:"varint,8,opt,name=status,proto3,enum=conductor.proto.Task_Status" json:"status,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,9,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + ExecutionTime int64 `protobuf:"varint,10,opt,name=execution_time,json=executionTime,proto3" json:"execution_time,omitempty"` + QueueWaitTime int64 `protobuf:"varint,11,opt,name=queue_wait_time,json=queueWaitTime,proto3" json:"queue_wait_time,omitempty"` + TaskDefName string `protobuf:"bytes,12,opt,name=task_def_name,json=taskDefName,proto3" json:"task_def_name,omitempty"` + TaskType string `protobuf:"bytes,13,opt,name=task_type,json=taskType,proto3" json:"task_type,omitempty"` + Input string `protobuf:"bytes,14,opt,name=input,proto3" json:"input,omitempty"` + Output string `protobuf:"bytes,15,opt,name=output,proto3" json:"output,omitempty"` + TaskId string `protobuf:"bytes,16,opt,name=task_id,json=taskId,proto3" json:"task_id,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -44,7 +44,7 @@ func (m *TaskSummary) Reset() { *m = TaskSummary{} } func (m *TaskSummary) String() string { return proto.CompactTextString(m) } func (*TaskSummary) ProtoMessage() {} func (*TaskSummary) Descriptor() ([]byte, []int) { - return fileDescriptor_tasksummary_ab439d130c50da04, []int{0} + return fileDescriptor_tasksummary_ccb082d5e959585d, []int{0} } func (m *TaskSummary) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_TaskSummary.Unmarshal(m, b) @@ -181,10 +181,10 @@ func init() { } func init() { - proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ab439d130c50da04) + proto.RegisterFile("model/tasksummary.proto", fileDescriptor_tasksummary_ccb082d5e959585d) } -var fileDescriptor_tasksummary_ab439d130c50da04 = []byte{ +var fileDescriptor_tasksummary_ccb082d5e959585d = []byte{ // 446 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0xcf, 0x8b, 0x13, 0x31, 0x14, 0xc7, 0xa9, 0xbb, 0xdb, 0x1f, 0xaf, 0x3b, 0xed, 0x32, 0xa8, 0x1d, 0x5d, 0x65, 0xcb, 0x8a, diff --git a/client/gogrpc/conductor/model/workflow.pb.go b/client/gogrpc/conductor/model/workflow.pb.go index 9588282808..f0dfc42f9d 100644 --- a/client/gogrpc/conductor/model/workflow.pb.go +++ b/client/gogrpc/conductor/model/workflow.pb.go @@ -51,27 +51,28 @@ func (x Workflow_WorkflowStatus) String() string { return proto.EnumName(Workflow_WorkflowStatus_name, int32(x)) } func (Workflow_WorkflowStatus) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflow_ccfaf06783966538, []int{0, 0} + return fileDescriptor_workflow_b47c95a464502efa, []int{0, 0} } type Workflow struct { - Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` - EndTime int64 `protobuf:"varint,2,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - ParentWorkflowId string `protobuf:"bytes,4,opt,name=parent_workflow_id,json=parentWorkflowId" json:"parent_workflow_id,omitempty"` - ParentWorkflowTaskId string `protobuf:"bytes,5,opt,name=parent_workflow_task_id,json=parentWorkflowTaskId" json:"parent_workflow_task_id,omitempty"` - Tasks []*Task `protobuf:"bytes,6,rep,name=tasks" json:"tasks,omitempty"` - Input map[string]*_struct.Value `protobuf:"bytes,8,rep,name=input" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Output map[string]*_struct.Value `protobuf:"bytes,9,rep,name=output" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - WorkflowType string `protobuf:"bytes,10,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` - Version int32 `protobuf:"varint,11,opt,name=version" json:"version,omitempty"` - CorrelationId string `protobuf:"bytes,12,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - ReRunFromWorkflowId string `protobuf:"bytes,13,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId" json:"re_run_from_workflow_id,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,14,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` - SchemaVersion int32 `protobuf:"varint,15,opt,name=schema_version,json=schemaVersion" json:"schema_version,omitempty"` - Event string `protobuf:"bytes,16,opt,name=event" json:"event,omitempty"` - TaskToDomain map[string]string `protobuf:"bytes,17,rep,name=task_to_domain,json=taskToDomain" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - FailedReferenceTaskNames []string `protobuf:"bytes,18,rep,name=failed_reference_task_names,json=failedReferenceTaskNames" json:"failed_reference_task_names,omitempty"` + Status Workflow_WorkflowStatus `protobuf:"varint,1,opt,name=status,proto3,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + EndTime int64 `protobuf:"varint,2,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + ParentWorkflowId string `protobuf:"bytes,4,opt,name=parent_workflow_id,json=parentWorkflowId,proto3" json:"parent_workflow_id,omitempty"` + ParentWorkflowTaskId string `protobuf:"bytes,5,opt,name=parent_workflow_task_id,json=parentWorkflowTaskId,proto3" json:"parent_workflow_task_id,omitempty"` + Tasks []*Task `protobuf:"bytes,6,rep,name=tasks,proto3" json:"tasks,omitempty"` + Input map[string]*_struct.Value `protobuf:"bytes,8,rep,name=input,proto3" json:"input,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Output map[string]*_struct.Value `protobuf:"bytes,9,rep,name=output,proto3" json:"output,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + WorkflowType string `protobuf:"bytes,10,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + Version int32 `protobuf:"varint,11,opt,name=version,proto3" json:"version,omitempty"` + CorrelationId string `protobuf:"bytes,12,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + ReRunFromWorkflowId string `protobuf:"bytes,13,opt,name=re_run_from_workflow_id,json=reRunFromWorkflowId,proto3" json:"re_run_from_workflow_id,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,14,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + SchemaVersion int32 `protobuf:"varint,15,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` + Event string `protobuf:"bytes,16,opt,name=event,proto3" json:"event,omitempty"` + TaskToDomain map[string]string `protobuf:"bytes,17,rep,name=task_to_domain,json=taskToDomain,proto3" json:"task_to_domain,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + FailedReferenceTaskNames []string `protobuf:"bytes,18,rep,name=failed_reference_task_names,json=failedReferenceTaskNames,proto3" json:"failed_reference_task_names,omitempty"` + WorkflowDefinition *WorkflowDef `protobuf:"bytes,19,opt,name=workflow_definition,json=workflowDefinition,proto3" json:"workflow_definition,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -81,7 +82,7 @@ func (m *Workflow) Reset() { *m = Workflow{} } func (m *Workflow) String() string { return proto.CompactTextString(m) } func (*Workflow) ProtoMessage() {} func (*Workflow) Descriptor() ([]byte, []int) { - return fileDescriptor_workflow_ccfaf06783966538, []int{0} + return fileDescriptor_workflow_b47c95a464502efa, []int{0} } func (m *Workflow) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_Workflow.Unmarshal(m, b) @@ -220,6 +221,13 @@ func (m *Workflow) GetFailedReferenceTaskNames() []string { return nil } +func (m *Workflow) GetWorkflowDefinition() *WorkflowDef { + if m != nil { + return m.WorkflowDefinition + } + return nil +} + func init() { proto.RegisterType((*Workflow)(nil), "conductor.proto.Workflow") proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.Workflow.InputEntry") @@ -228,51 +236,54 @@ func init() { proto.RegisterEnum("conductor.proto.Workflow_WorkflowStatus", Workflow_WorkflowStatus_name, Workflow_WorkflowStatus_value) } -func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_ccfaf06783966538) } - -var fileDescriptor_workflow_ccfaf06783966538 = []byte{ - // 688 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0xdf, 0x4f, 0xdb, 0x48, - 0x10, 0xc7, 0x2f, 0x84, 0x04, 0x32, 0x21, 0xc1, 0xec, 0x85, 0xc3, 0x07, 0x27, 0x5d, 0x44, 0x8b, - 0x14, 0x09, 0xe4, 0x48, 0xf4, 0x87, 0x2a, 0x24, 0xd4, 0x42, 0x63, 0x2a, 0x4b, 0x10, 0x82, 0x31, - 0x20, 0xf5, 0xc5, 0x72, 0xec, 0x75, 0xb0, 0x62, 0xef, 0x5a, 0xeb, 0x35, 0x34, 0xcf, 0xfd, 0xc7, - 0xab, 0xdd, 0x8d, 0xc1, 0x40, 0x79, 0xeb, 0x9b, 0x77, 0xe6, 0xf3, 0xfd, 0x6a, 0x76, 0x66, 0xc7, - 0xd0, 0x49, 0x68, 0x80, 0xe3, 0xfe, 0x3d, 0x65, 0xd3, 0x30, 0xa6, 0xf7, 0x46, 0xca, 0x28, 0xa7, - 0x68, 0xd5, 0xa7, 0x24, 0xc8, 0x7d, 0x4e, 0x99, 0x0a, 0x6c, 0x6a, 0x0a, 0xe3, 0x5e, 0x36, 0x9d, - 0x47, 0xfe, 0x9b, 0x50, 0x3a, 0x89, 0x71, 0x5f, 0x9e, 0xc6, 0x79, 0xd8, 0xcf, 0x38, 0xcb, 0x7d, - 0xae, 0xb2, 0xdb, 0x3f, 0x1b, 0xb0, 0x7c, 0x33, 0xf7, 0x44, 0x5f, 0xa0, 0x9e, 0x71, 0x8f, 0xe7, - 0x99, 0x5e, 0xe9, 0x56, 0x7a, 0xed, 0xfd, 0x9e, 0xf1, 0xcc, 0xde, 0x28, 0xd0, 0x87, 0x8f, 0x4b, - 0xc9, 0xdb, 0x73, 0x1d, 0xfa, 0x17, 0x96, 0x31, 0x09, 0x5c, 0x1e, 0x25, 0x58, 0x5f, 0xe8, 0x56, - 0x7a, 0x55, 0x7b, 0x09, 0x93, 0xc0, 0x89, 0x12, 0x8c, 0xfe, 0x87, 0x66, 0x51, 0xbc, 0x1b, 0x05, - 0x7a, 0xb5, 0x5b, 0xe9, 0x35, 0x6c, 0x28, 0x42, 0x56, 0x80, 0xf6, 0x00, 0xa5, 0x1e, 0xc3, 0x84, - 0xbb, 0x65, 0x6e, 0x51, 0x72, 0x9a, 0xca, 0xdc, 0x3c, 0xd2, 0x1f, 0x60, 0xe3, 0x39, 0x2d, 0x2e, - 0x2d, 0x24, 0x35, 0x29, 0xe9, 0x3c, 0x95, 0x38, 0x5e, 0x36, 0xb5, 0x02, 0xb4, 0x0b, 0x35, 0x81, - 0x65, 0x7a, 0xbd, 0x5b, 0xed, 0x35, 0xf7, 0xd7, 0x5f, 0xdc, 0x50, 0x70, 0xb6, 0x62, 0xd0, 0x01, - 0xd4, 0x22, 0x92, 0xe6, 0x5c, 0x5f, 0x96, 0xf0, 0xdb, 0xd7, 0xdb, 0x61, 0x09, 0xcc, 0x24, 0x9c, - 0xcd, 0x6c, 0x25, 0x41, 0x87, 0x50, 0xa7, 0x39, 0x17, 0xe2, 0x86, 0x14, 0xef, 0xbc, 0x2e, 0x3e, - 0x97, 0x9c, 0x52, 0xcf, 0x45, 0xe8, 0x0d, 0xb4, 0x1e, 0xef, 0x35, 0x4b, 0xb1, 0x0e, 0xf2, 0x52, - 0x2b, 0x45, 0xd0, 0x99, 0xa5, 0x18, 0xe9, 0xb0, 0x74, 0x87, 0x59, 0x16, 0x51, 0xa2, 0x37, 0xbb, - 0x95, 0x5e, 0xcd, 0x2e, 0x8e, 0x68, 0x07, 0xda, 0x3e, 0x65, 0x0c, 0xc7, 0x1e, 0x8f, 0x28, 0x11, - 0x4d, 0x59, 0x91, 0xfa, 0x56, 0x29, 0x6a, 0x05, 0xe8, 0x3d, 0x6c, 0x30, 0xec, 0xb2, 0x9c, 0xb8, - 0x21, 0xa3, 0xc9, 0x93, 0xbe, 0xb7, 0x24, 0xff, 0x37, 0xc3, 0x76, 0x4e, 0x4e, 0x18, 0x4d, 0x4a, - 0xad, 0xff, 0x28, 0x54, 0x5e, 0x46, 0x89, 0x1b, 0x52, 0xe6, 0x46, 0xc4, 0xa7, 0x49, 0x1a, 0x63, - 0x61, 0xa9, 0xb7, 0xa5, 0x6a, 0x5d, 0xa5, 0x4f, 0x28, 0xb3, 0x4a, 0x49, 0x51, 0x54, 0xe6, 0xdf, - 0xe2, 0xc4, 0x73, 0x8b, 0xaa, 0x57, 0x65, 0xd5, 0x2d, 0x15, 0xbd, 0x9e, 0xd7, 0xde, 0x81, 0x1a, - 0xbe, 0xc3, 0x84, 0xeb, 0x9a, 0x34, 0x53, 0x07, 0x74, 0x01, 0x6d, 0x39, 0x5f, 0x4e, 0xdd, 0x80, - 0x26, 0x5e, 0x44, 0xf4, 0x35, 0xd9, 0xd7, 0xdd, 0xd7, 0xfb, 0x2a, 0x46, 0xe9, 0xd0, 0x81, 0xa4, - 0x55, 0x77, 0x57, 0x78, 0x29, 0x84, 0x0e, 0x61, 0x2b, 0xf4, 0xa2, 0x18, 0x07, 0x2e, 0xc3, 0x21, - 0x66, 0x98, 0xf8, 0x58, 0xbd, 0x21, 0xe2, 0x25, 0x38, 0xd3, 0x51, 0xb7, 0xda, 0x6b, 0xd8, 0xba, - 0x42, 0xec, 0x82, 0x10, 0xa6, 0x43, 0x91, 0xdf, 0x1c, 0x01, 0x3c, 0x8e, 0x1d, 0x69, 0x50, 0x9d, - 0xe2, 0x99, 0x5c, 0x9c, 0x86, 0x2d, 0x3e, 0xd1, 0x1e, 0xd4, 0xee, 0xbc, 0x38, 0x57, 0x8b, 0xd0, - 0xdc, 0xff, 0xc7, 0x50, 0x8b, 0x68, 0x14, 0x8b, 0x68, 0x5c, 0x8b, 0xac, 0xad, 0xa0, 0x83, 0x85, - 0x4f, 0x95, 0xcd, 0x0b, 0x68, 0x96, 0xde, 0xc2, 0x1f, 0xb1, 0xfc, 0x0c, 0x6b, 0x2f, 0xda, 0xf0, - 0x1b, 0xe3, 0x4e, 0xd9, 0xb8, 0x51, 0x32, 0xd8, 0xf6, 0xa1, 0xfd, 0x74, 0xd7, 0x51, 0x13, 0x96, - 0xec, 0xab, 0xe1, 0xd0, 0x1a, 0x7e, 0xd3, 0xfe, 0x42, 0x2d, 0x68, 0x7c, 0x3d, 0x3f, 0x1b, 0x9d, - 0x9a, 0x8e, 0x39, 0xd0, 0x2a, 0x08, 0xa0, 0x7e, 0x72, 0x64, 0x9d, 0x9a, 0x03, 0x6d, 0x41, 0xa4, - 0x1c, 0xeb, 0xcc, 0x1c, 0xb8, 0xe7, 0x57, 0x8e, 0x56, 0x45, 0x6d, 0x00, 0xc7, 0xb4, 0xcf, 0xac, - 0xe1, 0x91, 0x40, 0x17, 0x05, 0x3a, 0x3a, 0xba, 0xba, 0x34, 0x07, 0x5a, 0xed, 0x18, 0xc3, 0x96, - 0x4f, 0x13, 0x83, 0x60, 0x1e, 0xc6, 0xd1, 0x8f, 0xe7, 0x13, 0x3d, 0x86, 0xa2, 0x82, 0xd1, 0xf8, - 0xfb, 0xc1, 0x24, 0xe2, 0xb7, 0xf9, 0xd8, 0xf0, 0x69, 0xd2, 0x9f, 0xf3, 0xfd, 0x07, 0xbe, 0xef, - 0xc7, 0x11, 0x26, 0xbc, 0x3f, 0xa1, 0x13, 0x96, 0xfa, 0xa5, 0xb8, 0xfc, 0x2b, 0x8e, 0xeb, 0xd2, - 0xee, 0xdd, 0xaf, 0x00, 0x00, 0x00, 0xff, 0xff, 0x94, 0x27, 0xe7, 0x4f, 0x4c, 0x05, 0x00, 0x00, +func init() { proto.RegisterFile("model/workflow.proto", fileDescriptor_workflow_b47c95a464502efa) } + +var fileDescriptor_workflow_b47c95a464502efa = []byte{ + // 727 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xac, 0x94, 0x5f, 0x4f, 0xe3, 0x46, + 0x14, 0xc5, 0x6b, 0xb2, 0x09, 0xe4, 0x86, 0x64, 0xbd, 0x43, 0xb6, 0xb8, 0xec, 0x4a, 0x8d, 0xb6, + 0x5d, 0xc9, 0xd2, 0x22, 0x47, 0x4a, 0xff, 0xa8, 0x42, 0x42, 0x2d, 0x34, 0xa1, 0xb2, 0x44, 0x42, + 0x30, 0x06, 0xa4, 0xbe, 0x58, 0x8e, 0x3d, 0x0e, 0x56, 0xec, 0x99, 0x68, 0x3c, 0x86, 0xe6, 0x6b, + 0xf6, 0x13, 0x55, 0x33, 0x63, 0x13, 0x07, 0x9a, 0xb7, 0x7d, 0xcb, 0xdc, 0xfb, 0x3b, 0x27, 0x77, + 0xce, 0xcc, 0x18, 0xba, 0x29, 0x0d, 0x71, 0xd2, 0x7f, 0xa2, 0x6c, 0x11, 0x25, 0xf4, 0xc9, 0x5a, + 0x32, 0xca, 0x29, 0x7a, 0x1b, 0x50, 0x12, 0xe6, 0x01, 0xa7, 0x4c, 0x15, 0x8e, 0x0e, 0x37, 0xb1, + 0x10, 0x47, 0x45, 0x43, 0x57, 0x0d, 0xee, 0x67, 0x8b, 0xa2, 0xf2, 0x71, 0x4e, 0xe9, 0x3c, 0xc1, + 0x7d, 0xb9, 0x9a, 0xe5, 0x51, 0x3f, 0xe3, 0x2c, 0x0f, 0xb8, 0xea, 0x7e, 0xfa, 0xb7, 0x09, 0x7b, + 0xf7, 0x85, 0x0b, 0xfa, 0x03, 0x1a, 0x19, 0xf7, 0x79, 0x9e, 0x19, 0x5a, 0x4f, 0x33, 0x3b, 0x03, + 0xd3, 0x7a, 0xf1, 0xbf, 0x56, 0x89, 0x3e, 0xff, 0xb8, 0x91, 0xbc, 0x53, 0xe8, 0xd0, 0x77, 0xb0, + 0x87, 0x49, 0xe8, 0xf1, 0x38, 0xc5, 0xc6, 0x4e, 0x4f, 0x33, 0x6b, 0xce, 0x2e, 0x26, 0xa1, 0x1b, + 0xa7, 0x18, 0x7d, 0x0f, 0xad, 0x72, 0x5c, 0x2f, 0x0e, 0x8d, 0x5a, 0x4f, 0x33, 0x9b, 0x0e, 0x94, + 0x25, 0x3b, 0x44, 0xc7, 0x80, 0x96, 0x3e, 0xc3, 0x84, 0x7b, 0x55, 0xee, 0x8d, 0xe4, 0x74, 0xd5, + 0xb9, 0x5f, 0xd3, 0xbf, 0xc0, 0xe1, 0x4b, 0x5a, 0x6c, 0x5a, 0x48, 0xea, 0x52, 0xd2, 0xdd, 0x94, + 0xb8, 0x7e, 0xb6, 0xb0, 0x43, 0xf4, 0x05, 0xea, 0x02, 0xcb, 0x8c, 0x46, 0xaf, 0x66, 0xb6, 0x06, + 0xef, 0x5f, 0xed, 0x50, 0x70, 0x8e, 0x62, 0xd0, 0x09, 0xd4, 0x63, 0xb2, 0xcc, 0xb9, 0xb1, 0x27, + 0xe1, 0x1f, 0xb7, 0xc7, 0x61, 0x0b, 0x6c, 0x44, 0x38, 0x5b, 0x39, 0x4a, 0x82, 0x4e, 0xa1, 0x41, + 0x73, 0x2e, 0xc4, 0x4d, 0x29, 0xfe, 0xbc, 0x5d, 0x7c, 0x25, 0x39, 0xa5, 0x2e, 0x44, 0xe8, 0x07, + 0x68, 0xaf, 0xf7, 0xb5, 0x5a, 0x62, 0x03, 0xe4, 0xa6, 0xf6, 0xcb, 0xa2, 0xbb, 0x5a, 0x62, 0x64, + 0xc0, 0xee, 0x23, 0x66, 0x59, 0x4c, 0x89, 0xd1, 0xea, 0x69, 0x66, 0xdd, 0x29, 0x97, 0xe8, 0x33, + 0x74, 0x02, 0xca, 0x18, 0x4e, 0x7c, 0x1e, 0x53, 0x22, 0x42, 0xd9, 0x97, 0xfa, 0x76, 0xa5, 0x6a, + 0x87, 0xe8, 0x67, 0x38, 0x64, 0xd8, 0x63, 0x39, 0xf1, 0x22, 0x46, 0xd3, 0x8d, 0xdc, 0xdb, 0x92, + 0x3f, 0x60, 0xd8, 0xc9, 0xc9, 0x05, 0xa3, 0x69, 0x25, 0xfa, 0x5f, 0x85, 0xca, 0xcf, 0x28, 0xf1, + 0x22, 0xca, 0xbc, 0x98, 0x04, 0x34, 0x5d, 0x26, 0x58, 0x58, 0x1a, 0x1d, 0xa9, 0x7a, 0xaf, 0xda, + 0x17, 0x94, 0xd9, 0x95, 0xa6, 0x18, 0x2a, 0x0b, 0x1e, 0x70, 0xea, 0x7b, 0xe5, 0xd4, 0x6f, 0xe5, + 0xd4, 0x6d, 0x55, 0xbd, 0x2b, 0x66, 0xef, 0x42, 0x1d, 0x3f, 0x62, 0xc2, 0x0d, 0x5d, 0x9a, 0xa9, + 0x05, 0xba, 0x86, 0x8e, 0x3c, 0x5f, 0x4e, 0xbd, 0x90, 0xa6, 0x7e, 0x4c, 0x8c, 0x77, 0x32, 0xd7, + 0x2f, 0xdb, 0x73, 0x15, 0x47, 0xe9, 0xd2, 0xa1, 0xa4, 0x55, 0xba, 0xfb, 0xbc, 0x52, 0x42, 0xa7, + 0xf0, 0x21, 0xf2, 0xe3, 0x04, 0x87, 0x1e, 0xc3, 0x11, 0x66, 0x98, 0x04, 0x58, 0xdd, 0x21, 0xe2, + 0xa7, 0x38, 0x33, 0x50, 0xaf, 0x66, 0x36, 0x1d, 0x43, 0x21, 0x4e, 0x49, 0x08, 0xd3, 0x89, 0xe8, + 0xa3, 0x31, 0x1c, 0x3c, 0x07, 0x16, 0xe2, 0x28, 0x26, 0xb1, 0x8c, 0xe0, 0xa0, 0xa7, 0x99, 0xad, + 0xc1, 0xc7, 0xad, 0x63, 0x0d, 0x71, 0xe4, 0xa0, 0xa7, 0xf5, 0xa2, 0xd0, 0x1d, 0x4d, 0x01, 0xd6, + 0xb7, 0x08, 0xe9, 0x50, 0x5b, 0xe0, 0x95, 0x7c, 0x87, 0x4d, 0x47, 0xfc, 0x44, 0xc7, 0x50, 0x7f, + 0xf4, 0x93, 0x5c, 0xbd, 0xab, 0xd6, 0xe0, 0x5b, 0x4b, 0xbd, 0x6b, 0xab, 0x7c, 0xd7, 0xd6, 0x9d, + 0xe8, 0x3a, 0x0a, 0x3a, 0xd9, 0xf9, 0x4d, 0x3b, 0xba, 0x86, 0x56, 0xe5, 0x6a, 0x7d, 0x15, 0xcb, + 0xdf, 0xe1, 0xdd, 0xab, 0x54, 0xff, 0xc7, 0xb8, 0x5b, 0x35, 0x6e, 0x56, 0x0c, 0x3e, 0x05, 0xd0, + 0xd9, 0xfc, 0x74, 0xa0, 0x16, 0xec, 0x3a, 0xb7, 0x93, 0x89, 0x3d, 0xf9, 0x4b, 0xff, 0x06, 0xb5, + 0xa1, 0xf9, 0xe7, 0xd5, 0x78, 0x7a, 0x39, 0x72, 0x47, 0x43, 0x5d, 0x43, 0x00, 0x8d, 0x8b, 0x33, + 0xfb, 0x72, 0x34, 0xd4, 0x77, 0x44, 0xcb, 0xb5, 0xc7, 0xa3, 0xa1, 0x77, 0x75, 0xeb, 0xea, 0x35, + 0xd4, 0x01, 0x70, 0x47, 0xce, 0xd8, 0x9e, 0x9c, 0x09, 0xf4, 0x8d, 0x40, 0xa7, 0x67, 0xb7, 0x37, + 0xa3, 0xa1, 0x5e, 0x3f, 0xc7, 0xf0, 0x21, 0xa0, 0xa9, 0x45, 0x30, 0x8f, 0x92, 0xf8, 0x9f, 0x97, + 0x27, 0x71, 0x0e, 0xe5, 0x04, 0xd3, 0xd9, 0xdf, 0x27, 0xf3, 0x98, 0x3f, 0xe4, 0x33, 0x2b, 0xa0, + 0x69, 0xbf, 0xe0, 0xfb, 0xcf, 0x7c, 0x3f, 0x48, 0x62, 0x4c, 0x78, 0x7f, 0x4e, 0xe7, 0x6c, 0x19, + 0x54, 0xea, 0xf2, 0x23, 0x3b, 0x6b, 0x48, 0xbb, 0x9f, 0xfe, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1f, + 0xe2, 0x8d, 0x7f, 0xb4, 0x05, 0x00, 0x00, } diff --git a/client/gogrpc/conductor/model/workflowdef.pb.go b/client/gogrpc/conductor/model/workflowdef.pb.go index 1be3a0cac5..00cd04c69b 100644 --- a/client/gogrpc/conductor/model/workflowdef.pb.go +++ b/client/gogrpc/conductor/model/workflowdef.pb.go @@ -20,15 +20,15 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type WorkflowDef struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - Description string `protobuf:"bytes,2,opt,name=description" json:"description,omitempty"` - Version int32 `protobuf:"varint,3,opt,name=version" json:"version,omitempty"` - Tasks []*WorkflowTask `protobuf:"bytes,4,rep,name=tasks" json:"tasks,omitempty"` - InputParameters []string `protobuf:"bytes,5,rep,name=input_parameters,json=inputParameters" json:"input_parameters,omitempty"` - OutputParameters map[string]*_struct.Value `protobuf:"bytes,6,rep,name=output_parameters,json=outputParameters" json:"output_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - FailureWorkflow string `protobuf:"bytes,7,opt,name=failure_workflow,json=failureWorkflow" json:"failure_workflow,omitempty"` - SchemaVersion int32 `protobuf:"varint,8,opt,name=schema_version,json=schemaVersion" json:"schema_version,omitempty"` - Restartable bool `protobuf:"varint,9,opt,name=restartable" json:"restartable,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Description string `protobuf:"bytes,2,opt,name=description,proto3" json:"description,omitempty"` + Version int32 `protobuf:"varint,3,opt,name=version,proto3" json:"version,omitempty"` + Tasks []*WorkflowTask `protobuf:"bytes,4,rep,name=tasks,proto3" json:"tasks,omitempty"` + InputParameters []string `protobuf:"bytes,5,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty"` + OutputParameters map[string]*_struct.Value `protobuf:"bytes,6,rep,name=output_parameters,json=outputParameters,proto3" json:"output_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + FailureWorkflow string `protobuf:"bytes,7,opt,name=failure_workflow,json=failureWorkflow,proto3" json:"failure_workflow,omitempty"` + SchemaVersion int32 `protobuf:"varint,8,opt,name=schema_version,json=schemaVersion,proto3" json:"schema_version,omitempty"` + Restartable bool `protobuf:"varint,9,opt,name=restartable,proto3" json:"restartable,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -38,7 +38,7 @@ func (m *WorkflowDef) Reset() { *m = WorkflowDef{} } func (m *WorkflowDef) String() string { return proto.CompactTextString(m) } func (*WorkflowDef) ProtoMessage() {} func (*WorkflowDef) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowdef_3a04d4bf8b36be23, []int{0} + return fileDescriptor_workflowdef_7fb3769e2566471c, []int{0} } func (m *WorkflowDef) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowDef.Unmarshal(m, b) @@ -127,10 +127,10 @@ func init() { } func init() { - proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_3a04d4bf8b36be23) + proto.RegisterFile("model/workflowdef.proto", fileDescriptor_workflowdef_7fb3769e2566471c) } -var fileDescriptor_workflowdef_3a04d4bf8b36be23 = []byte{ +var fileDescriptor_workflowdef_7fb3769e2566471c = []byte{ // 404 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x92, 0xdf, 0x6b, 0xd5, 0x30, 0x14, 0xc7, 0xe9, 0xba, 0x6e, 0xbb, 0x29, 0xf3, 0x5e, 0x03, 0x6a, 0x98, 0x0a, 0x45, 0x10, 0x2a, diff --git a/client/gogrpc/conductor/model/workflowsummary.pb.go b/client/gogrpc/conductor/model/workflowsummary.pb.go index 8d1a794f5c..63847d00b8 100644 --- a/client/gogrpc/conductor/model/workflowsummary.pb.go +++ b/client/gogrpc/conductor/model/workflowsummary.pb.go @@ -19,20 +19,20 @@ var _ = math.Inf const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package type WorkflowSummary struct { - WorkflowType string `protobuf:"bytes,1,opt,name=workflow_type,json=workflowType" json:"workflow_type,omitempty"` - Version int32 `protobuf:"varint,2,opt,name=version" json:"version,omitempty"` - WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId" json:"workflow_id,omitempty"` - CorrelationId string `protobuf:"bytes,4,opt,name=correlation_id,json=correlationId" json:"correlation_id,omitempty"` - StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime" json:"start_time,omitempty"` - UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime" json:"update_time,omitempty"` - EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime" json:"end_time,omitempty"` - Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` - Input string `protobuf:"bytes,9,opt,name=input" json:"input,omitempty"` - Output string `protobuf:"bytes,10,opt,name=output" json:"output,omitempty"` - ReasonForIncompletion string `protobuf:"bytes,11,opt,name=reason_for_incompletion,json=reasonForIncompletion" json:"reason_for_incompletion,omitempty"` - ExecutionTime int64 `protobuf:"varint,12,opt,name=execution_time,json=executionTime" json:"execution_time,omitempty"` - Event string `protobuf:"bytes,13,opt,name=event" json:"event,omitempty"` - FailedReferenceTaskNames string `protobuf:"bytes,14,opt,name=failed_reference_task_names,json=failedReferenceTaskNames" json:"failed_reference_task_names,omitempty"` + WorkflowType string `protobuf:"bytes,1,opt,name=workflow_type,json=workflowType,proto3" json:"workflow_type,omitempty"` + Version int32 `protobuf:"varint,2,opt,name=version,proto3" json:"version,omitempty"` + WorkflowId string `protobuf:"bytes,3,opt,name=workflow_id,json=workflowId,proto3" json:"workflow_id,omitempty"` + CorrelationId string `protobuf:"bytes,4,opt,name=correlation_id,json=correlationId,proto3" json:"correlation_id,omitempty"` + StartTime string `protobuf:"bytes,5,opt,name=start_time,json=startTime,proto3" json:"start_time,omitempty"` + UpdateTime string `protobuf:"bytes,6,opt,name=update_time,json=updateTime,proto3" json:"update_time,omitempty"` + EndTime string `protobuf:"bytes,7,opt,name=end_time,json=endTime,proto3" json:"end_time,omitempty"` + Status Workflow_WorkflowStatus `protobuf:"varint,8,opt,name=status,proto3,enum=conductor.proto.Workflow_WorkflowStatus" json:"status,omitempty"` + Input string `protobuf:"bytes,9,opt,name=input,proto3" json:"input,omitempty"` + Output string `protobuf:"bytes,10,opt,name=output,proto3" json:"output,omitempty"` + ReasonForIncompletion string `protobuf:"bytes,11,opt,name=reason_for_incompletion,json=reasonForIncompletion,proto3" json:"reason_for_incompletion,omitempty"` + ExecutionTime int64 `protobuf:"varint,12,opt,name=execution_time,json=executionTime,proto3" json:"execution_time,omitempty"` + Event string `protobuf:"bytes,13,opt,name=event,proto3" json:"event,omitempty"` + FailedReferenceTaskNames string `protobuf:"bytes,14,opt,name=failed_reference_task_names,json=failedReferenceTaskNames,proto3" json:"failed_reference_task_names,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -42,7 +42,7 @@ func (m *WorkflowSummary) Reset() { *m = WorkflowSummary{} } func (m *WorkflowSummary) String() string { return proto.CompactTextString(m) } func (*WorkflowSummary) ProtoMessage() {} func (*WorkflowSummary) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowsummary_3f8ed40c0bd9261f, []int{0} + return fileDescriptor_workflowsummary_4b94875bbd67cbd7, []int{0} } func (m *WorkflowSummary) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowSummary.Unmarshal(m, b) @@ -165,10 +165,10 @@ func init() { } func init() { - proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_3f8ed40c0bd9261f) + proto.RegisterFile("model/workflowsummary.proto", fileDescriptor_workflowsummary_4b94875bbd67cbd7) } -var fileDescriptor_workflowsummary_3f8ed40c0bd9261f = []byte{ +var fileDescriptor_workflowsummary_4b94875bbd67cbd7 = []byte{ // 428 bytes of a gzipped FileDescriptorProto 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x5c, 0x92, 0x51, 0x8b, 0xd3, 0x40, 0x10, 0xc7, 0x89, 0x77, 0x6d, 0xaf, 0x7b, 0xd7, 0x1e, 0x2e, 0x55, 0x57, 0x8b, 0x58, 0x14, 0x21, diff --git a/client/gogrpc/conductor/model/workflowtask.pb.go b/client/gogrpc/conductor/model/workflowtask.pb.go index 9176ece14b..54ed68c273 100644 --- a/client/gogrpc/conductor/model/workflowtask.pb.go +++ b/client/gogrpc/conductor/model/workflowtask.pb.go @@ -19,72 +19,27 @@ var _ = math.Inf // proto package needs to be updated. const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package -type WorkflowTask_Type int32 - -const ( - WorkflowTask_SIMPLE WorkflowTask_Type = 0 - WorkflowTask_DYNAMIC WorkflowTask_Type = 1 - WorkflowTask_FORK_JOIN WorkflowTask_Type = 2 - WorkflowTask_FORK_JOIN_DYNAMIC WorkflowTask_Type = 3 - WorkflowTask_DECISION WorkflowTask_Type = 4 - WorkflowTask_JOIN WorkflowTask_Type = 5 - WorkflowTask_SUB_WORKFLOW WorkflowTask_Type = 6 - WorkflowTask_EVENT WorkflowTask_Type = 7 - WorkflowTask_WAIT WorkflowTask_Type = 8 - WorkflowTask_USER_DEFINED WorkflowTask_Type = 9 -) - -var WorkflowTask_Type_name = map[int32]string{ - 0: "SIMPLE", - 1: "DYNAMIC", - 2: "FORK_JOIN", - 3: "FORK_JOIN_DYNAMIC", - 4: "DECISION", - 5: "JOIN", - 6: "SUB_WORKFLOW", - 7: "EVENT", - 8: "WAIT", - 9: "USER_DEFINED", -} -var WorkflowTask_Type_value = map[string]int32{ - "SIMPLE": 0, - "DYNAMIC": 1, - "FORK_JOIN": 2, - "FORK_JOIN_DYNAMIC": 3, - "DECISION": 4, - "JOIN": 5, - "SUB_WORKFLOW": 6, - "EVENT": 7, - "WAIT": 8, - "USER_DEFINED": 9, -} - -func (x WorkflowTask_Type) String() string { - return proto.EnumName(WorkflowTask_Type_name, int32(x)) -} -func (WorkflowTask_Type) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_9c377873af38ad2e, []int{0, 0} -} - type WorkflowTask struct { - Name string `protobuf:"bytes,1,opt,name=name" json:"name,omitempty"` - TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName" json:"task_reference_name,omitempty"` - Description string `protobuf:"bytes,3,opt,name=description" json:"description,omitempty"` - InputParameters map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input_parameters,json=inputParameters" json:"input_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - Type string `protobuf:"bytes,5,opt,name=type" json:"type,omitempty"` - DynamicTaskNameParam string `protobuf:"bytes,6,opt,name=dynamic_task_name_param,json=dynamicTaskNameParam" json:"dynamic_task_name_param,omitempty"` - CaseValueParam string `protobuf:"bytes,7,opt,name=case_value_param,json=caseValueParam" json:"case_value_param,omitempty"` - CaseExpression string `protobuf:"bytes,8,opt,name=case_expression,json=caseExpression" json:"case_expression,omitempty"` - DecisionCases map[string]*WorkflowTask_WorkflowTaskList `protobuf:"bytes,9,rep,name=decision_cases,json=decisionCases" json:"decision_cases,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - DynamicForkTasksParam string `protobuf:"bytes,10,opt,name=dynamic_fork_tasks_param,json=dynamicForkTasksParam" json:"dynamic_fork_tasks_param,omitempty"` - DynamicForkTasksInputParamName string `protobuf:"bytes,11,opt,name=dynamic_fork_tasks_input_param_name,json=dynamicForkTasksInputParamName" json:"dynamic_fork_tasks_input_param_name,omitempty"` - DefaultCase []*WorkflowTask `protobuf:"bytes,12,rep,name=default_case,json=defaultCase" json:"default_case,omitempty"` - ForkTasks []*WorkflowTask_WorkflowTaskList `protobuf:"bytes,13,rep,name=fork_tasks,json=forkTasks" json:"fork_tasks,omitempty"` - StartDelay int32 `protobuf:"varint,14,opt,name=start_delay,json=startDelay" json:"start_delay,omitempty"` - SubWorkflowParam *SubWorkflowParams `protobuf:"bytes,15,opt,name=sub_workflow_param,json=subWorkflowParam" json:"sub_workflow_param,omitempty"` - JoinOn []string `protobuf:"bytes,16,rep,name=join_on,json=joinOn" json:"join_on,omitempty"` - Sink string `protobuf:"bytes,17,opt,name=sink" json:"sink,omitempty"` - Optional bool `protobuf:"varint,18,opt,name=optional" json:"optional,omitempty"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + TaskReferenceName string `protobuf:"bytes,2,opt,name=task_reference_name,json=taskReferenceName,proto3" json:"task_reference_name,omitempty"` + Description string `protobuf:"bytes,3,opt,name=description,proto3" json:"description,omitempty"` + InputParameters map[string]*_struct.Value `protobuf:"bytes,4,rep,name=input_parameters,json=inputParameters,proto3" json:"input_parameters,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Type string `protobuf:"bytes,5,opt,name=type,proto3" json:"type,omitempty"` + DynamicTaskNameParam string `protobuf:"bytes,6,opt,name=dynamic_task_name_param,json=dynamicTaskNameParam,proto3" json:"dynamic_task_name_param,omitempty"` + CaseValueParam string `protobuf:"bytes,7,opt,name=case_value_param,json=caseValueParam,proto3" json:"case_value_param,omitempty"` + CaseExpression string `protobuf:"bytes,8,opt,name=case_expression,json=caseExpression,proto3" json:"case_expression,omitempty"` + DecisionCases map[string]*WorkflowTask_WorkflowTaskList `protobuf:"bytes,9,rep,name=decision_cases,json=decisionCases,proto3" json:"decision_cases,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + DynamicForkTasksParam string `protobuf:"bytes,10,opt,name=dynamic_fork_tasks_param,json=dynamicForkTasksParam,proto3" json:"dynamic_fork_tasks_param,omitempty"` + DynamicForkTasksInputParamName string `protobuf:"bytes,11,opt,name=dynamic_fork_tasks_input_param_name,json=dynamicForkTasksInputParamName,proto3" json:"dynamic_fork_tasks_input_param_name,omitempty"` + DefaultCase []*WorkflowTask `protobuf:"bytes,12,rep,name=default_case,json=defaultCase,proto3" json:"default_case,omitempty"` + ForkTasks []*WorkflowTask_WorkflowTaskList `protobuf:"bytes,13,rep,name=fork_tasks,json=forkTasks,proto3" json:"fork_tasks,omitempty"` + StartDelay int32 `protobuf:"varint,14,opt,name=start_delay,json=startDelay,proto3" json:"start_delay,omitempty"` + SubWorkflowParam *SubWorkflowParams `protobuf:"bytes,15,opt,name=sub_workflow_param,json=subWorkflowParam,proto3" json:"sub_workflow_param,omitempty"` + JoinOn []string `protobuf:"bytes,16,rep,name=join_on,json=joinOn,proto3" json:"join_on,omitempty"` + Sink string `protobuf:"bytes,17,opt,name=sink,proto3" json:"sink,omitempty"` + Optional bool `protobuf:"varint,18,opt,name=optional,proto3" json:"optional,omitempty"` + TaskDefinition *TaskDef `protobuf:"bytes,19,opt,name=task_definition,json=taskDefinition,proto3" json:"task_definition,omitempty"` + RateLimited bool `protobuf:"varint,20,opt,name=rate_limited,json=rateLimited,proto3" json:"rate_limited,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -94,7 +49,7 @@ func (m *WorkflowTask) Reset() { *m = WorkflowTask{} } func (m *WorkflowTask) String() string { return proto.CompactTextString(m) } func (*WorkflowTask) ProtoMessage() {} func (*WorkflowTask) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_9c377873af38ad2e, []int{0} + return fileDescriptor_workflowtask_5d520fab7400c6cf, []int{0} } func (m *WorkflowTask) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowTask.Unmarshal(m, b) @@ -240,8 +195,22 @@ func (m *WorkflowTask) GetOptional() bool { return false } +func (m *WorkflowTask) GetTaskDefinition() *TaskDef { + if m != nil { + return m.TaskDefinition + } + return nil +} + +func (m *WorkflowTask) GetRateLimited() bool { + if m != nil { + return m.RateLimited + } + return false +} + type WorkflowTask_WorkflowTaskList struct { - Tasks []*WorkflowTask `protobuf:"bytes,1,rep,name=tasks" json:"tasks,omitempty"` + Tasks []*WorkflowTask `protobuf:"bytes,1,rep,name=tasks,proto3" json:"tasks,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -251,7 +220,7 @@ func (m *WorkflowTask_WorkflowTaskList) Reset() { *m = WorkflowTask_Work func (m *WorkflowTask_WorkflowTaskList) String() string { return proto.CompactTextString(m) } func (*WorkflowTask_WorkflowTaskList) ProtoMessage() {} func (*WorkflowTask_WorkflowTaskList) Descriptor() ([]byte, []int) { - return fileDescriptor_workflowtask_9c377873af38ad2e, []int{0, 0} + return fileDescriptor_workflowtask_5d520fab7400c6cf, []int{0, 0} } func (m *WorkflowTask_WorkflowTaskList) XXX_Unmarshal(b []byte) error { return xxx_messageInfo_WorkflowTask_WorkflowTaskList.Unmarshal(m, b) @@ -283,62 +252,57 @@ func init() { proto.RegisterMapType((map[string]*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.DecisionCasesEntry") proto.RegisterMapType((map[string]*_struct.Value)(nil), "conductor.proto.WorkflowTask.InputParametersEntry") proto.RegisterType((*WorkflowTask_WorkflowTaskList)(nil), "conductor.proto.WorkflowTask.WorkflowTaskList") - proto.RegisterEnum("conductor.proto.WorkflowTask_Type", WorkflowTask_Type_name, WorkflowTask_Type_value) } func init() { - proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_9c377873af38ad2e) -} - -var fileDescriptor_workflowtask_9c377873af38ad2e = []byte{ - // 771 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x5d, 0x6f, 0xda, 0x48, - 0x14, 0x5d, 0x87, 0xef, 0x0b, 0x01, 0x67, 0x36, 0xd9, 0x58, 0xec, 0x66, 0x17, 0x65, 0x1f, 0x96, - 0x87, 0x95, 0xa9, 0x88, 0xaa, 0x56, 0x79, 0x6a, 0x12, 0x4c, 0xe5, 0x26, 0x01, 0x64, 0x48, 0x50, - 0x23, 0x55, 0x96, 0xb1, 0x07, 0xea, 0x62, 0x3c, 0x96, 0x67, 0xdc, 0x84, 0x3f, 0xd2, 0x7f, 0xd5, - 0xff, 0x54, 0xcd, 0xd8, 0x06, 0x42, 0xa2, 0xa8, 0x7d, 0x9b, 0x39, 0xf7, 0x9c, 0x3b, 0xf7, 0xcc, - 0xdc, 0x3b, 0xa0, 0x2c, 0x88, 0x83, 0xbd, 0xd6, 0x3d, 0x09, 0xe7, 0x53, 0x8f, 0xdc, 0x33, 0x8b, - 0xce, 0xd5, 0x20, 0x24, 0x8c, 0xa0, 0x9a, 0x4d, 0x7c, 0x27, 0xb2, 0x19, 0x09, 0x63, 0xa0, 0x7e, - 0x14, 0x53, 0x69, 0x34, 0x49, 0xd9, 0x81, 0x15, 0x5a, 0x0b, 0x9a, 0x84, 0xff, 0x9a, 0x11, 0x32, - 0xf3, 0x70, 0x4b, 0xec, 0x26, 0xd1, 0xb4, 0x45, 0x59, 0x18, 0xd9, 0x2c, 0x8e, 0x1e, 0x7f, 0x07, - 0xa8, 0x8c, 0x13, 0xd9, 0xc8, 0xa2, 0x73, 0x84, 0x20, 0xeb, 0x5b, 0x0b, 0xac, 0x48, 0x0d, 0xa9, - 0x59, 0x32, 0xc4, 0x1a, 0xa9, 0xf0, 0x3b, 0x2f, 0xc0, 0x0c, 0xf1, 0x14, 0x87, 0xd8, 0xb7, 0xb1, - 0x29, 0x28, 0x3b, 0x82, 0xb2, 0xc7, 0x43, 0x46, 0x1a, 0xe9, 0x71, 0x7e, 0x03, 0xca, 0x0e, 0xa6, - 0x76, 0xe8, 0x06, 0xcc, 0x25, 0xbe, 0x92, 0x11, 0xbc, 0x4d, 0x08, 0x7d, 0x02, 0xd9, 0xf5, 0x83, - 0x88, 0x99, 0xa2, 0x54, 0xcc, 0x70, 0x48, 0x95, 0x6c, 0x23, 0xd3, 0x2c, 0xb7, 0xdb, 0xea, 0x96, - 0x3f, 0x75, 0xb3, 0x3c, 0x55, 0xe7, 0xaa, 0xc1, 0x4a, 0xa4, 0xf9, 0x2c, 0x5c, 0x1a, 0x35, 0xf7, - 0x31, 0xca, 0x4d, 0xb0, 0x65, 0x80, 0x95, 0x5c, 0x6c, 0x82, 0xaf, 0xd1, 0x6b, 0x38, 0x74, 0x96, - 0xbe, 0xb5, 0x70, 0x6d, 0x53, 0x98, 0xe1, 0x16, 0xe2, 0xe3, 0x95, 0xbc, 0xa0, 0xed, 0x27, 0x61, - 0x7e, 0x0e, 0xb7, 0x21, 0xf2, 0xa1, 0x26, 0xc8, 0xb6, 0x45, 0xb1, 0xf9, 0xd5, 0xf2, 0xa2, 0x94, - 0x5f, 0x10, 0xfc, 0x2a, 0xc7, 0x6f, 0x39, 0x1c, 0x33, 0xff, 0x83, 0x9a, 0x60, 0xe2, 0x87, 0x20, - 0xc4, 0x94, 0x72, 0xe7, 0xc5, 0x35, 0x51, 0x5b, 0xa1, 0x68, 0x0c, 0x55, 0x07, 0xdb, 0x2e, 0x5f, - 0x9b, 0x3c, 0x44, 0x95, 0x92, 0xb0, 0xfe, 0xea, 0x65, 0xeb, 0x9d, 0x44, 0x73, 0xc1, 0x25, 0xb1, - 0xf1, 0x5d, 0x67, 0x13, 0x43, 0x6f, 0x40, 0x49, 0x2d, 0x4e, 0x49, 0x38, 0x17, 0x3e, 0x69, 0x52, - 0x33, 0x88, 0x52, 0x0e, 0x92, 0x78, 0x97, 0x84, 0x73, 0x9e, 0x94, 0xc6, 0xa5, 0x5f, 0xc2, 0xbf, - 0xcf, 0x08, 0x37, 0x5e, 0x28, 0x7e, 0xf0, 0xb2, 0xc8, 0xf1, 0xf7, 0x76, 0x8e, 0xf5, 0x9b, 0x88, - 0xd7, 0x7f, 0x07, 0x15, 0x07, 0x4f, 0xad, 0xc8, 0x63, 0xc2, 0x9d, 0x52, 0x11, 0xe6, 0x8e, 0x5e, - 0x34, 0xc7, 0xbb, 0x43, 0x48, 0xb8, 0x11, 0x74, 0x0d, 0xb0, 0x2e, 0x43, 0xd9, 0x15, 0x7a, 0xf5, - 0xe5, 0xcb, 0xd9, 0xdc, 0x5c, 0xb9, 0x94, 0x19, 0xa5, 0x69, 0x5a, 0x1e, 0xfa, 0x07, 0xca, 0x94, - 0x59, 0x21, 0x33, 0x1d, 0xec, 0x59, 0x4b, 0xa5, 0xda, 0x90, 0x9a, 0x39, 0x03, 0x04, 0xd4, 0xe1, - 0x08, 0x1a, 0x00, 0xa2, 0xd1, 0xc4, 0x4c, 0xc7, 0x27, 0xb9, 0xb1, 0x5a, 0x43, 0x6a, 0x96, 0xdb, - 0xc7, 0x4f, 0xce, 0x1d, 0x46, 0x93, 0xf4, 0x34, 0x61, 0x9a, 0x1a, 0x32, 0xdd, 0x82, 0xd0, 0x21, - 0x14, 0xbe, 0x10, 0xd7, 0x37, 0x89, 0xaf, 0xc8, 0x8d, 0x4c, 0xb3, 0x64, 0xe4, 0xf9, 0xb6, 0xef, - 0xf3, 0xce, 0xa4, 0xae, 0x3f, 0x57, 0xf6, 0xe2, 0xce, 0xe4, 0x6b, 0x54, 0x87, 0x22, 0x11, 0x63, - 0x61, 0x79, 0x0a, 0x6a, 0x48, 0xcd, 0xa2, 0xb1, 0xda, 0xd7, 0xdf, 0x83, 0xbc, 0x6d, 0x0d, 0x9d, - 0x40, 0x2e, 0xbe, 0x19, 0xe9, 0x67, 0x6e, 0x36, 0xe6, 0xd6, 0xef, 0x60, 0xff, 0xb9, 0xd9, 0x41, - 0x32, 0x64, 0xe6, 0x78, 0x99, 0x8c, 0x3b, 0x5f, 0xa2, 0xff, 0x21, 0x27, 0x9a, 0x5d, 0xcc, 0x77, - 0xb9, 0xfd, 0x87, 0x1a, 0x7f, 0x20, 0x6a, 0xfa, 0x81, 0xa8, 0xa2, 0xe7, 0x8d, 0x98, 0x74, 0xba, - 0xf3, 0x56, 0xaa, 0x07, 0x80, 0x9e, 0x36, 0xe7, 0x33, 0x99, 0x3b, 0x8f, 0x33, 0xff, 0xea, 0x93, - 0xae, 0x4f, 0x3c, 0xfe, 0x26, 0x41, 0x76, 0xc4, 0xa7, 0x1a, 0x20, 0x3f, 0xd4, 0xaf, 0x07, 0x57, - 0x9a, 0xfc, 0x1b, 0x2a, 0x43, 0xa1, 0xf3, 0xb1, 0x77, 0x76, 0xad, 0x5f, 0xc8, 0x12, 0xda, 0x85, - 0x52, 0xb7, 0x6f, 0x5c, 0x9a, 0x1f, 0xfa, 0x7a, 0x4f, 0xde, 0x41, 0x07, 0xb0, 0xb7, 0xda, 0x9a, - 0x29, 0x2b, 0x83, 0x2a, 0x50, 0xec, 0x68, 0x17, 0xfa, 0x50, 0xef, 0xf7, 0xe4, 0x2c, 0x2a, 0x42, - 0x56, 0xd0, 0x73, 0x48, 0x86, 0xca, 0xf0, 0xe6, 0xdc, 0x1c, 0xf7, 0x8d, 0xcb, 0xee, 0x55, 0x7f, - 0x2c, 0xe7, 0x51, 0x09, 0x72, 0xda, 0xad, 0xd6, 0x1b, 0xc9, 0x05, 0x4e, 0x1b, 0x9f, 0xe9, 0x23, - 0xb9, 0xc8, 0x69, 0x37, 0x43, 0xcd, 0x30, 0x3b, 0x5a, 0x57, 0xef, 0x69, 0x1d, 0xb9, 0x74, 0xee, - 0xc2, 0x9f, 0x36, 0x59, 0xa8, 0x3e, 0x66, 0x53, 0xcf, 0x7d, 0xd8, 0x36, 0x78, 0x5e, 0xdd, 0x34, - 0x35, 0x98, 0xdc, 0x9d, 0xce, 0x5c, 0xf6, 0x39, 0x9a, 0xa8, 0x36, 0x59, 0xb4, 0x12, 0x4d, 0x6b, - 0xa5, 0x69, 0xd9, 0x9e, 0x8b, 0x7d, 0xd6, 0x9a, 0x91, 0x59, 0x18, 0xd8, 0x1b, 0xb8, 0xf8, 0xf0, - 0x27, 0x79, 0x91, 0xf2, 0xe4, 0x47, 0x00, 0x00, 0x00, 0xff, 0xff, 0x28, 0xc2, 0xb6, 0x19, 0x2b, - 0x06, 0x00, 0x00, + proto.RegisterFile("model/workflowtask.proto", fileDescriptor_workflowtask_5d520fab7400c6cf) +} + +var fileDescriptor_workflowtask_5d520fab7400c6cf = []byte{ + // 708 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0x51, 0x6f, 0xd3, 0x3a, + 0x14, 0x56, 0xd7, 0x75, 0x5b, 0x4f, 0xbb, 0xb6, 0xf3, 0x76, 0xef, 0xac, 0xde, 0xbb, 0x7b, 0xcb, + 0x78, 0xa0, 0x0f, 0x28, 0x45, 0x9d, 0x10, 0x68, 0x4f, 0x30, 0x3a, 0x10, 0x62, 0xc0, 0x14, 0x10, + 0x93, 0x26, 0xa1, 0x28, 0x4d, 0x9c, 0x62, 0x9a, 0xc6, 0x91, 0xed, 0xb0, 0xf5, 0xaf, 0xf0, 0x6b, + 0x91, 0x8f, 0x93, 0x36, 0xeb, 0xa6, 0x09, 0xde, 0xec, 0xef, 0x7c, 0xdf, 0x39, 0xe7, 0xb3, 0x7d, + 0x0c, 0x74, 0x26, 0x42, 0x16, 0x0f, 0xae, 0x84, 0x9c, 0x46, 0xb1, 0xb8, 0xd2, 0xbe, 0x9a, 0x3a, + 0xa9, 0x14, 0x5a, 0x90, 0x76, 0x20, 0x92, 0x30, 0x0b, 0xb4, 0x90, 0x16, 0xe8, 0xee, 0x5a, 0xaa, + 0xa1, 0x84, 0x2c, 0xca, 0xc1, 0x03, 0x0b, 0xaa, 0x6c, 0x5c, 0xa4, 0x48, 0x7d, 0xe9, 0xcf, 0x54, + 0x1e, 0xfe, 0x77, 0x22, 0xc4, 0x24, 0x66, 0x03, 0xdc, 0x8d, 0xb3, 0x68, 0xa0, 0xb4, 0xcc, 0x02, + 0x6d, 0xa3, 0x87, 0x3f, 0x01, 0x9a, 0x17, 0xb9, 0xec, 0xb3, 0xaf, 0xa6, 0x84, 0xc0, 0x7a, 0xe2, + 0xcf, 0x18, 0xad, 0xf4, 0x2a, 0xfd, 0xba, 0x8b, 0x6b, 0xe2, 0xc0, 0xae, 0x29, 0xe9, 0x49, 0x16, + 0x31, 0xc9, 0x92, 0x80, 0x79, 0x48, 0x59, 0x43, 0xca, 0x8e, 0x09, 0xb9, 0x45, 0xe4, 0x83, 0xe1, + 0xf7, 0xa0, 0x11, 0x32, 0x15, 0x48, 0x9e, 0x6a, 0x2e, 0x12, 0x5a, 0x45, 0x5e, 0x19, 0x22, 0x5f, + 0xa1, 0xc3, 0x93, 0x34, 0xd3, 0x1e, 0xb6, 0xca, 0x34, 0x93, 0x8a, 0xae, 0xf7, 0xaa, 0xfd, 0xc6, + 0x70, 0xe8, 0xac, 0x98, 0x76, 0xca, 0xed, 0x39, 0x6f, 0x8d, 0xea, 0x7c, 0x21, 0x3a, 0x4d, 0xb4, + 0x9c, 0xbb, 0x6d, 0x7e, 0x13, 0x35, 0x26, 0xf4, 0x3c, 0x65, 0xb4, 0x66, 0x4d, 0x98, 0x35, 0x79, + 0x0a, 0xfb, 0xe1, 0x3c, 0xf1, 0x67, 0x3c, 0xf0, 0xd0, 0x8c, 0xb1, 0x60, 0xcb, 0xd3, 0x0d, 0xa4, + 0xed, 0xe5, 0x61, 0x53, 0xc7, 0xd8, 0xc0, 0x7c, 0xa4, 0x0f, 0x9d, 0xc0, 0x57, 0xcc, 0xfb, 0xe1, + 0xc7, 0x59, 0xc1, 0xdf, 0x44, 0x7e, 0xcb, 0xe0, 0x5f, 0x0c, 0x6c, 0x99, 0x8f, 0xa0, 0x8d, 0x4c, + 0x76, 0x9d, 0x4a, 0xa6, 0x94, 0x71, 0xbe, 0xb5, 0x24, 0x9e, 0x2e, 0x50, 0x72, 0x01, 0xad, 0x90, + 0x05, 0xdc, 0xac, 0x3d, 0x13, 0x52, 0xb4, 0x8e, 0xd6, 0x9f, 0xdc, 0x6f, 0x7d, 0x94, 0x6b, 0x5e, + 0x19, 0x89, 0x35, 0xbe, 0x1d, 0x96, 0x31, 0xf2, 0x0c, 0x68, 0x61, 0x31, 0x12, 0x72, 0x8a, 0x3e, + 0x55, 0xde, 0x33, 0x60, 0x2b, 0x7f, 0xe5, 0xf1, 0xd7, 0x42, 0x4e, 0x4d, 0x52, 0x65, 0x5b, 0x7f, + 0x07, 0x0f, 0xef, 0x10, 0x96, 0x6e, 0xc8, 0x5e, 0x78, 0x03, 0x73, 0xfc, 0xb7, 0x9a, 0x63, 0x79, + 0x27, 0x78, 0xfb, 0x2f, 0xa0, 0x19, 0xb2, 0xc8, 0xcf, 0x62, 0x8d, 0xee, 0x68, 0x13, 0xcd, 0x1d, + 0xdc, 0x6b, 0xce, 0xbc, 0x0e, 0x94, 0x18, 0x23, 0xe4, 0x3d, 0xc0, 0xb2, 0x0d, 0xba, 0x8d, 0x7a, + 0xe7, 0xfe, 0xc3, 0x29, 0x6f, 0xce, 0xb8, 0xd2, 0x6e, 0x3d, 0x2a, 0xda, 0x23, 0xff, 0x43, 0x43, + 0x69, 0x5f, 0x6a, 0x2f, 0x64, 0xb1, 0x3f, 0xa7, 0xad, 0x5e, 0xa5, 0x5f, 0x73, 0x01, 0xa1, 0x91, + 0x41, 0xc8, 0x39, 0x10, 0x95, 0x8d, 0xbd, 0x62, 0x7c, 0xf2, 0x13, 0x6b, 0xf7, 0x2a, 0xfd, 0xc6, + 0xf0, 0xf0, 0x56, 0xdd, 0x4f, 0xd9, 0xb8, 0xa8, 0x86, 0xa6, 0x95, 0xdb, 0x51, 0x2b, 0x10, 0xd9, + 0x87, 0xcd, 0xef, 0x82, 0x27, 0x9e, 0x48, 0x68, 0xa7, 0x57, 0xed, 0xd7, 0xdd, 0x0d, 0xb3, 0xfd, + 0x98, 0x98, 0x97, 0xa9, 0x78, 0x32, 0xa5, 0x3b, 0xf6, 0x65, 0x9a, 0x35, 0xe9, 0xc2, 0x96, 0xc0, + 0xb1, 0xf0, 0x63, 0x4a, 0x7a, 0x95, 0xfe, 0x96, 0xbb, 0xd8, 0x93, 0x97, 0xd0, 0xc6, 0xd7, 0x1a, + 0xb2, 0x88, 0x27, 0x1c, 0xc7, 0x69, 0x17, 0xfb, 0xa2, 0xb7, 0xfa, 0x32, 0x66, 0x47, 0x2c, 0x72, + 0x5b, 0xda, 0x2e, 0x72, 0x3e, 0x79, 0x00, 0x4d, 0xe9, 0x6b, 0xe6, 0xc5, 0x7c, 0xc6, 0x35, 0x0b, + 0xe9, 0x1e, 0x96, 0x68, 0x18, 0xec, 0xcc, 0x42, 0xdd, 0x37, 0xd0, 0x59, 0x3d, 0x40, 0x72, 0x04, + 0x35, 0x7b, 0xfe, 0x95, 0xdf, 0xb9, 0x3f, 0xcb, 0xed, 0x5e, 0xc2, 0xde, 0x5d, 0x13, 0x4a, 0x3a, + 0x50, 0x9d, 0xb2, 0x79, 0xfe, 0xa9, 0x98, 0x25, 0x79, 0x0c, 0x35, 0x1c, 0x29, 0xfc, 0x45, 0x1a, + 0xc3, 0xbf, 0x1d, 0xfb, 0x4d, 0x39, 0xc5, 0x37, 0xe5, 0xe0, 0x64, 0xb9, 0x96, 0x74, 0xbc, 0xf6, + 0xbc, 0xd2, 0x4d, 0x81, 0xdc, 0x1e, 0x81, 0x3b, 0x32, 0x8f, 0x6e, 0x66, 0xfe, 0xd3, 0x87, 0xb3, + 0xac, 0x78, 0xc2, 0xe1, 0x9f, 0x40, 0xcc, 0x9c, 0x84, 0xe9, 0x28, 0xe6, 0xd7, 0xab, 0x79, 0x4e, + 0x5a, 0x65, 0xed, 0xf9, 0xf8, 0xf2, 0x78, 0xc2, 0xf5, 0xb7, 0x6c, 0xec, 0x04, 0x62, 0x36, 0xc8, + 0x35, 0x83, 0x85, 0x66, 0x10, 0xc4, 0x9c, 0x25, 0x7a, 0x30, 0x11, 0x13, 0x99, 0x06, 0x25, 0x1c, + 0x7f, 0xef, 0xf1, 0x06, 0xa6, 0x3c, 0xfa, 0x15, 0x00, 0x00, 0xff, 0xff, 0xa9, 0x25, 0x52, 0x75, + 0x0d, 0x06, 0x00, 0x00, } From 0318f5bec1eae30a6133cf0ab597c290890cd8d6 Mon Sep 17 00:00:00 2001 From: Anoop Panicker Date: Tue, 2 Oct 2018 18:06:28 -0700 Subject: [PATCH 163/163] update nebula wrapper, fix tests, refactor --- build.gradle | 5 +- .../conductor/client/http/WorkflowClient.java | 2 +- .../conductor/common/run/Workflow.java | 22 +- contribs/build.gradle | 4 +- .../core/execution/DeciderService.java | 2 +- .../core/execution/ParametersUtils.java | 4 +- .../core/execution/WorkflowExecutor.java | 129 +-- .../core/metadata/MetadataMapperService.java | 22 +- .../conductor/service/ExecutionService.java | 8 +- .../conductor/service/WorkflowService.java | 13 +- .../metadata/MetadataMapperServiceTest.java | 23 +- jersey/build.gradle | 2 +- .../integration/AbstractEndToEndTest.java | 28 +- .../AbstractWorkflowServiceTest.java | 827 ++++++++++-------- .../tests/integration/End2EndTests.java | 16 +- .../WorkflowLegacyMigrationTest.java | 4 +- .../integration/WorkflowServiceTest.java | 1 - .../conductor/tests/utils/TestModule.java | 3 +- 18 files changed, 604 insertions(+), 511 deletions(-) rename core/src/test/java/com/netflix/conductor/{ => core}/metadata/MetadataMapperServiceTest.java (89%) diff --git a/build.gradle b/build.gradle index 4473de01a8..60f9adf280 100644 --- a/build.gradle +++ b/build.gradle @@ -4,12 +4,12 @@ buildscript { } dependencies { - classpath 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' + classpath 'com.netflix.nebula:gradle-extra-configurations-plugin:4.0.1' classpath 'org.apache.ant:ant:1.9.7' } } plugins { - id 'nebula.netflixoss' version '5.1.1' + id 'nebula.netflixoss' version '6.0.3' } // Establish version and status @@ -22,7 +22,6 @@ apply from: "$rootDir/versionsOfDependencies.gradle" subprojects { apply plugin: 'nebula.netflixoss' - apply plugin: 'nebula.provided-base' apply plugin: 'java' apply plugin: 'idea' apply plugin: 'eclipse' diff --git a/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java b/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java index 096d901c91..83cb51bf55 100644 --- a/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java +++ b/client/src/main/java/com/netflix/conductor/client/http/WorkflowClient.java @@ -247,7 +247,7 @@ public List getWorkflows(String name, String correlationId, boolean in */ private void populateWorkflowOutput(Workflow workflow) { if (StringUtils.isNotBlank(workflow.getExternalOutputPayloadStoragePath())) { - WorkflowTaskMetrics.incrementExternalPayloadUsedCount(workflow.getWorkflowType(), ExternalPayloadStorage.Operation.READ.name(), ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT.name()); + WorkflowTaskMetrics.incrementExternalPayloadUsedCount(workflow.getWorkflowName(), ExternalPayloadStorage.Operation.READ.name(), ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT.name()); workflow.setOutput(downloadFromExternalStorage(ExternalPayloadStorage.PayloadType.WORKFLOW_OUTPUT, workflow.getExternalOutputPayloadStoragePath())); } } diff --git a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java index 242b4e42b2..08b032c939 100644 --- a/common/src/main/java/com/netflix/conductor/common/run/Workflow.java +++ b/common/src/main/java/com/netflix/conductor/common/run/Workflow.java @@ -251,20 +251,23 @@ public String getWorkflowType() { * * @param workflowType Workflow type */ + @Deprecated public void setWorkflowType(String workflowType) { this.workflowType = workflowType; } - /** * @return the version */ + @Deprecated public int getVersion() { return version; } + /** * @param version the version to set */ + @Deprecated public void setVersion(int version) { this.version = version; } @@ -291,6 +294,7 @@ public void setReasonForIncompletion(String reasonForIncompletion) { public String getParentWorkflowId() { return parentWorkflowId; } + /** * @param parentWorkflowId the parentWorkflowId to set */ @@ -304,12 +308,14 @@ public void setParentWorkflowId(String parentWorkflowId) { public String getParentWorkflowTaskId() { return parentWorkflowTaskId; } + /** * @param parentWorkflowTaskId the parentWorkflowTaskId to set */ public void setParentWorkflowTaskId(String parentWorkflowTaskId) { this.parentWorkflowTaskId = parentWorkflowTaskId; } + /** * @return the schemaVersion Version of the schema for the workflow definition */ @@ -318,6 +324,7 @@ public int getSchemaVersion() { getWorkflowDefinition().getSchemaVersion() : schemaVersion; } + /** * @param schemaVersion the schemaVersion to set */ @@ -446,12 +453,9 @@ public Workflow copy() { copy.setParentWorkflowId(parentWorkflowId); copy.setParentWorkflowTaskId(parentWorkflowTaskId); copy.setReRunFromWorkflowId(reRunFromWorkflowId); - copy.setWorkflowType(workflowType); - copy.setVersion(version); copy.setCorrelationId(correlationId); copy.setEvent(event); copy.setReasonForIncompletion(reasonForIncompletion); - copy.setSchemaVersion(schemaVersion); copy.setWorkflowDefinition(workflowDefinition); copy.setTasks(tasks.stream() @@ -462,7 +466,7 @@ public Workflow copy() { @Override public String toString() { - return workflowDefinition.getName() + "." + workflowDefinition.getVersion() + "/" + workflowId + "." + status; + return getWorkflowName() + "." + getWorkflowVersion() + "/" + workflowId + "." + status; } @Override @@ -471,7 +475,7 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Workflow workflow = (Workflow) o; return getEndTime() == workflow.getEndTime() && - getVersion() == workflow.getVersion() && + getWorkflowVersion() == workflow.getWorkflowVersion() && getSchemaVersion() == workflow.getSchemaVersion() && getStatus() == workflow.getStatus() && Objects.equals(getWorkflowId(), workflow.getWorkflowId()) && @@ -480,7 +484,7 @@ public boolean equals(Object o) { Objects.equals(getTasks(), workflow.getTasks()) && Objects.equals(getInput(), workflow.getInput()) && Objects.equals(getOutput(), workflow.getOutput()) && - Objects.equals(getWorkflowType(), workflow.getWorkflowType()) && + Objects.equals(getWorkflowName(), workflow.getWorkflowName()) && Objects.equals(getCorrelationId(), workflow.getCorrelationId()) && Objects.equals(getReRunFromWorkflowId(), workflow.getReRunFromWorkflowId()) && Objects.equals(getReasonForIncompletion(), workflow.getReasonForIncompletion()) && @@ -503,8 +507,8 @@ public int hashCode() { getTasks(), getInput(), getOutput(), - getWorkflowType(), - getVersion(), + getWorkflowName(), + getWorkflowVersion(), getCorrelationId(), getReRunFromWorkflowId(), getReasonForIncompletion(), diff --git a/contribs/build.gradle b/contribs/build.gradle index ee6019c4da..d2e767de9f 100644 --- a/contribs/build.gradle +++ b/contribs/build.gradle @@ -13,8 +13,8 @@ dependencies { compile "io.nats:java-nats-streaming:${revNatsStreaming}" - provided "javax.ws.rs:jsr311-api:${revJsr311Api}" - provided "io.swagger:swagger-jaxrs:${revSwagger}" + compileOnly "javax.ws.rs:jsr311-api:${revJsr311Api}" + compile "io.swagger:swagger-jaxrs:${revSwagger}" testCompile "org.eclipse.jetty:jetty-server:${revJetteyServer}" testCompile "org.eclipse.jetty:jetty-servlet:${revJettyServlet}" diff --git a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java index 7afd0e0514..d7879dce27 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/DeciderService.java @@ -415,7 +415,7 @@ Workflow populateWorkflowAndTaskData(Workflow workflow) { if (StringUtils.isNotBlank(workflow.getExternalInputPayloadStoragePath())) { // download the workflow input from external storage here and plug it into the workflow Map workflowInputParams = externalPayloadStorageUtils.downloadPayload(workflow.getExternalInputPayloadStoragePath()); - Monitors.recordExternalPayloadStorageUsage(workflow.getWorkflowType(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT.toString()); + Monitors.recordExternalPayloadStorageUsage(workflow.getWorkflowName(), ExternalPayloadStorage.Operation.READ.toString(), ExternalPayloadStorage.PayloadType.WORKFLOW_INPUT.toString()); workflowInstance.setInput(workflowInputParams); workflowInstance.setExternalInputPayloadStoragePath(null); } diff --git a/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java b/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java index 7326f501e6..7db08828a4 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/ParametersUtils.java @@ -84,8 +84,8 @@ public Map getTaskInputV2(Map input, Workflow wo workflowParams.put("workflowId", workflow.getWorkflowId()); workflowParams.put("parentWorkflowId", workflow.getParentWorkflowId()); workflowParams.put("parentWorkflowTaskId", workflow.getParentWorkflowTaskId()); - workflowParams.put("workflowType", workflow.getWorkflowType()); - workflowParams.put("version", workflow.getVersion()); + workflowParams.put("workflowType", workflow.getWorkflowName()); + workflowParams.put("version", workflow.getWorkflowVersion()); workflowParams.put("correlationId", workflow.getCorrelationId()); workflowParams.put("reasonForIncompletion", workflow.getReasonForIncompletion()); workflowParams.put("schemaVersion", workflow.getSchemaVersion()); diff --git a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java index 8fb20f6edd..a5320eb3fb 100644 --- a/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java +++ b/core/src/main/java/com/netflix/conductor/core/execution/WorkflowExecutor.java @@ -51,12 +51,9 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; -import java.util.Set; import java.util.function.Predicate; import java.util.stream.Collectors; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Preconditions.checkNotNull; import static com.netflix.conductor.common.metadata.tasks.Task.Status.CANCELED; import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED; @@ -71,7 +68,6 @@ import static java.util.Comparator.comparingInt; import static java.util.stream.Collectors.groupingBy; import static java.util.stream.Collectors.maxBy; -import static java.util.stream.Collectors.toSet; /** * @author Viren Workflow services provider interface @@ -263,7 +259,7 @@ public String startWorkflow( String event, Map taskToDomain ) { - metadataMapperService.populateTaskDefinitions(workflowDefinition); + workflowDefinition = metadataMapperService.populateTaskDefinitions(workflowDefinition); // perform validations validateWorkflow(workflowDefinition, workflowInput, externalInputPayloadStoragePath); @@ -276,8 +272,6 @@ public String startWorkflow( wf.setWorkflowId(workflowId); wf.setCorrelationId(correlationId); wf.setWorkflowDefinition(workflowDefinition); - wf.setWorkflowType(workflowDefinition.getName()); - wf.setVersion(workflowDefinition.getVersion()); wf.setInput(workflowInput); wf.setExternalInputPayloadStoragePath(externalInputPayloadStoragePath); wf.setStatus(WorkflowStatus.RUNNING); @@ -305,37 +299,20 @@ public String startWorkflow( * @throws ApplicationException if the validation fails */ private void validateWorkflow(WorkflowDef workflowDef, Map workflowInput, String externalStoragePath) { - String workflowName = workflowDef.getName(); - if (workflowName == null) { - workflowName = ""; - } try { - //because everything else is a system defined task - Set missingTaskDefs = workflowDef.collectTasks().stream() - .filter(task -> task.getType().equals(TaskType.SIMPLE.name())) - .map(WorkflowTask::getName) - .filter(task -> metadataDAO.getTaskDef(task) == null) - .collect(toSet()); - - if (!missingTaskDefs.isEmpty()) { - logger.error("Cannot find the task definitions for the following tasks used in workflow: {}", missingTaskDefs); - throw new ApplicationException(INVALID_INPUT, "Cannot find the task definitions for the following tasks used in workflow: " + missingTaskDefs); - } - //Check if the input to the workflow is not null if (workflowInput == null && StringUtils.isBlank(externalStoragePath)) { - logger.error("The input for the workflow '{}' cannot be NULL", workflowName); + logger.error("The input for the workflow '{}' cannot be NULL", workflowDef.getName()); throw new ApplicationException(INVALID_INPUT, "NULL input passed when starting workflow"); } } catch (Exception e) { - Monitors.recordWorkflowStartError(workflowName, WorkflowContext.get().getClientApp()); + Monitors.recordWorkflowStartError(workflowDef.getName(), WorkflowContext.get().getClientApp()); throw e; } } /** - * * @param workflowId * @return * @throws ApplicationException @@ -370,9 +347,13 @@ public String rerun(RerunWorkflowRequest request) { } /** - * - * @param workflowId - * @throws ApplicationException + * @param workflowId the id of the workflow to be restarted + * @throws ApplicationException in the following cases: + *

      + *
    • Workflow is not in a terminal state
    • + *
    • Workflow definition is not found
    • + *
    • Workflow is deemed non-restartable as per workflow definition
    • + *
    */ public void rewind(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, true); @@ -381,8 +362,11 @@ public void rewind(String workflowId) { } WorkflowDef workflowDef = Optional.ofNullable(workflow.getWorkflowDefinition()) - .orElse(metadataDAO.get(workflow.getWorkflowType(), workflow.getVersion()).get()); - if (!workflowDef.isRestartable() && workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { // Can only restart non completed workflows when the configuration is set to false + .orElse(metadataDAO.get(workflow.getWorkflowName(), workflow.getWorkflowVersion()) + .orElseThrow(() -> new ApplicationException(NOT_FOUND, String.format("Unable to find definition for %s", workflowId))) + ); + + if (!workflowDef.isRestartable() && workflow.getStatus().equals(WorkflowStatus.COMPLETED)) { // Can only restart non-completed workflows when the configuration is set to false throw new ApplicationException(CONFLICT, String.format("WorkflowId: %s is an instance of WorkflowDef: %s and version: %d and is non restartable", workflowId, workflowDef.getName(), workflowDef.getVersion())); } @@ -403,6 +387,7 @@ public void rewind(String workflowId) { * Gets the last instance of each failed task and reschedule each * Gets all cancelled tasks and schedule all of them except JOIN (join should change status to INPROGRESS) * Switch workflow back to RUNNING status and aall decider. + * * @param workflowId */ public void retry(String workflowId) { @@ -417,7 +402,8 @@ public void retry(String workflowId) { List failedTasks = getFailedTasksToRetry(workflow); List cancelledTasks = workflow.getTasks().stream() - .filter(x->CANCELED.equals(x.getStatus())).collect(Collectors.toList()); + .filter(t -> CANCELED.equals(t.getStatus())) + .collect(Collectors.toList()); if (failedTasks.isEmpty()) { throw new ApplicationException(CONFLICT, @@ -450,6 +436,7 @@ public void retry(String workflowId) { /** * Get all failed and cancelled tasks. * for failed tasks - get one for each task reference name(latest failed using seq id) + * * @param workflow * @return list of latest failed tasks, one for each task reference reference type. */ @@ -458,11 +445,15 @@ List getFailedTasksToRetry(Workflow workflow) { return workflow.getTasks().stream() .filter(x -> FAILED.equals(x.getStatus())) .collect(groupingBy(Task::getReferenceTaskName, maxBy(comparingInt(Task::getSeq)))) - .values().stream().filter(Optional::isPresent).map(Optional::get).collect(Collectors.toList()); + .values().stream() + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); } /** * Reschedule a task + * * @param task failed or cancelled task * @return new instance of a task with "SCHEDULED" status */ @@ -486,10 +477,9 @@ public Task getPendingTaskByWorkflow(String taskReferenceName, String workflowId .findFirst() // There can only be one task by a given reference name running at a time. .orElse(null); } - + /** - * * @param wf * @throws ApplicationException */ @@ -509,7 +499,13 @@ void completeWorkflow(Workflow wf) { throw new ApplicationException(CONFLICT, msg); } + // FIXME Backwards compatibility for legacy workflows already running. + // This code will be removed in a future version. + if (workflow.getWorkflowDefinition() == null) { + workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); + } deciderService.updateWorkflowOutput(wf, null); + workflow.setStatus(WorkflowStatus.COMPLETED); workflow.setOutput(wf.getOutput()); workflow.setExternalOutputPayloadStoragePath(wf.getExternalOutputPayloadStoragePath()); @@ -520,7 +516,10 @@ void completeWorkflow(Workflow wf) { // If the following task, for some reason fails, the sweep will take care of this again! if (workflow.getParentWorkflowId() != null) { Workflow parent = executionDAO.getWorkflow(workflow.getParentWorkflowId(), false); - WorkflowDef parentDef = Optional.ofNullable(parent.getWorkflowDefinition()).orElse(metadataDAO.get(parent.getWorkflowType(), parent.getVersion()).get()); + WorkflowDef parentDef = Optional.ofNullable(parent.getWorkflowDefinition()) + .orElse(metadataDAO.get(parent.getWorkflowName(), parent.getWorkflowVersion()) + .orElseThrow(() -> new ApplicationException(NOT_FOUND, String.format("Unable to find parent workflow definition for %s", wf.getWorkflowId()))) + ); logger.debug("Completed sub-workflow {}, deciding parent workflow {}", wf.getWorkflowId(), wf.getParentWorkflowId()); Task parentWorkflowTask = executionDAO.getTask(workflow.getParentWorkflowTaskId()); @@ -538,36 +537,13 @@ void completeWorkflow(Workflow wf) { logger.debug("Removed workflow {} from decider queue", wf.getWorkflowId()); } - @VisibleForTesting - Optional lookupWorkflowDefinition(String workflowName, int workflowVersion) { - // TODO: Update to use ServiceUtils once this is merged with dev - // FIXME: Add messages. - checkNotNull(workflowName); - checkArgument(StringUtils.isNotBlank(workflowName)); - checkArgument(workflowVersion > 0); - - return metadataDAO.get(workflowName, workflowVersion); - } - - @VisibleForTesting - Optional lookupLatestWorkflowDefinition(String workflowName) { - // FIXME: Add messages. - checkNotNull(workflowName); - checkArgument(StringUtils.isNotBlank(workflowName)); - - return metadataDAO.getLatest(workflowName); - } - public void terminateWorkflow(String workflowId, String reason) { Workflow workflow = executionDAO.getWorkflow(workflowId, true); workflow.setStatus(WorkflowStatus.TERMINATED); - - terminateWorkflow(workflow, reason, null); } /** - * * @param workflow * @param reason * @param failureWorkflow @@ -580,10 +556,9 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo // FIXME Backwards compatibility for legacy workflows already running. // This code will be removed in a future version. - if(workflow.getWorkflowDefinition() == null) { + if (workflow.getWorkflowDefinition() == null) { workflow = metadataMapperService.populateWorkflowWithDefinitions(workflow); } - deciderService.updateWorkflowOutput(workflow, null); String workflowId = workflow.getWorkflowId(); @@ -657,8 +632,7 @@ public void terminateWorkflow(Workflow workflow, String reason, String failureWo } /** - * - * @param taskResult + * @param taskResult the task result to be updated * @throws ApplicationException */ public void updateTask(TaskResult taskResult) { @@ -670,10 +644,9 @@ public void updateTask(TaskResult taskResult) { String workflowId = taskResult.getWorkflowInstanceId(); Workflow workflowInstance = executionDAO.getWorkflow(workflowId); - // FIXME Backwards compatibility for legacy workflows already running. // This code will be removed in a future version. - if(workflowInstance.getWorkflowDefinition() == null) { + if (workflowInstance.getWorkflowDefinition() == null) { workflowInstance = metadataMapperService.populateWorkflowWithDefinitions(workflowInstance); } @@ -691,6 +664,7 @@ public void updateTask(TaskResult taskResult) { } task.setOutputData(taskResult.getOutputData()); task.setOutputMessage(taskResult.getOutputMessage()); + task.setExternalOutputPayloadStoragePath(taskResult.getExternalOutputPayloadStoragePath()); task.setReasonForIncompletion(taskResult.getReasonForIncompletion()); task.setWorkerId(taskResult.getWorkerId()); executionDAO.updateTask(task); @@ -730,25 +704,6 @@ public void updateTask(TaskResult taskResult) { //This gives the ability to look at workflow and see what tasks have failed at a high level. if (FAILED.equals(task.getStatus()) || FAILED_WITH_TERMINAL_ERROR.equals(task.getStatus())) { workflowInstance.getFailedReferenceTaskNames().add(task.getReferenceTaskName()); - - //TODO is the following needed? - //In case of a FAILED_WITH_TERMINAL_ERROR the workflow will be terminated and the output of the task is never copied - //ensuring the task output is copied to the workflow here - if (FAILED_WITH_TERMINAL_ERROR.equals(task.getStatus())) { - //Update the task in the workflow instance - Task taskByRefName = workflowInstance.getTaskByRefName(task.getReferenceTaskName()); - taskByRefName.setStatus(task.getStatus()); - taskByRefName.setOutputData(task.getOutputData()); - taskByRefName.setReasonForIncompletion(task.getReasonForIncompletion()); - taskByRefName.setWorkerId(task.getWorkerId()); - taskByRefName.setCallbackAfterSeconds(task.getCallbackAfterSeconds()); - WorkflowDef workflowDef = workflowInstance.getWorkflowDefinition(); - Map outputData = task.getOutputData(); - if (!workflowDef.getOutputParameters().isEmpty()) { - outputData = parametersUtils.getTaskInput(workflowDef.getOutputParameters(), workflowInstance, null, null); - } - workflowInstance.setOutput(outputData); - } executionDAO.updateWorkflow(workflowInstance); logger.debug("Task: {} has a {} status and the Workflow has been updated with failed task reference", task, task.getStatus()); } @@ -784,7 +739,6 @@ public void updateTask(TaskResult taskResult) { Monitors.recordTaskExecutionTime(task.getTaskDefName(), duration, true, task.getStatus()); Monitors.recordTaskExecutionTime(task.getTaskDefName(), lastDuration, false, task.getStatus()); } - } public List getTasks(String taskType, String startKey, int count) { @@ -900,7 +854,6 @@ public void pauseWorkflow(String workflowId) { } /** - * * @param workflowId * @throws IllegalStateException */ @@ -908,7 +861,7 @@ public void resumeWorkflow(String workflowId) { Workflow workflow = executionDAO.getWorkflow(workflowId, false); if (!workflow.getStatus().equals(WorkflowStatus.PAUSED)) { throw new IllegalStateException("The workflow " + workflowId + " is not PAUSED so cannot resume. " + - "Current status is " + workflow.getStatus().name()); + "Current status is " + workflow.getStatus().name()); } workflow.setStatus(WorkflowStatus.RUNNING); executionDAO.updateWorkflow(workflow); @@ -916,7 +869,6 @@ public void resumeWorkflow(String workflowId) { } /** - * * @param workflowId * @param taskReferenceName * @param skipTaskRequest @@ -1172,7 +1124,6 @@ private void addTaskToQueue(final List tasks) { } private void terminate(final Workflow workflow, TerminateWorkflowException tw) { - if (!workflow.getStatus().isTerminal()) { workflow.setStatus(tw.workflowStatus); } diff --git a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java index 1028628e5b..7636d8138c 100644 --- a/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java +++ b/core/src/main/java/com/netflix/conductor/core/metadata/MetadataMapperService.java @@ -13,6 +13,7 @@ import com.netflix.conductor.core.execution.TerminateWorkflowException; import com.netflix.conductor.dao.MetadataDAO; import com.netflix.conductor.metrics.Monitors; +import com.netflix.conductor.service.utils.ServiceUtils; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -62,20 +63,13 @@ public WorkflowDef lookupForWorkflowDefinition(String name, Integer version) { @VisibleForTesting Optional lookupWorkflowDefinition(String workflowName, int workflowVersion) { - // TODO: Update to use ServiceUtils once this is merged with dev - // FIXME: Add messages. - checkNotNull(workflowName); - checkArgument(StringUtils.isNotBlank(workflowName)); - checkArgument(workflowVersion > 0); - + ServiceUtils.checkNotNullOrEmpty(workflowName, "Workflow name must be specified when searching for a definition"); return metadataDAO.get(workflowName, workflowVersion); } @VisibleForTesting Optional lookupLatestWorkflowDefinition(String workflowName) { - checkNotNull(workflowName, "Workflow name cannot be null when searching for a definition"); - checkArgument(StringUtils.isNotBlank(workflowName), "Workflow name cannot be blank when searching for a definition"); - + ServiceUtils.checkNotNullOrEmpty(workflowName, "Workflow name must be specified when searching for a definition"); return metadataDAO.getLatest(workflowName); } @@ -88,7 +82,7 @@ public Workflow populateWorkflowWithDefinitions(Workflow workflow) { return wd; }); - workflowDefinition.collectTasks().stream().forEach( + workflowDefinition.collectTasks().forEach( workflowTask -> { if (shouldPopulateDefinition(workflowTask)) { workflowTask.setTaskDefinition(metadataDAO.getTaskDef(workflowTask.getName())); @@ -104,8 +98,8 @@ public Workflow populateWorkflowWithDefinitions(Workflow workflow) { } public WorkflowDef populateTaskDefinitions(WorkflowDef workflowDefinition) { - workflowDefinition.collectTasks().stream().forEach( - workflowTask -> populateWorkflowTaskWithDefinition(workflowTask) + workflowDefinition.collectTasks().forEach( + this::populateWorkflowTaskWithDefinition ); checkNotEmptyDefinitions(workflowDefinition); return workflowDefinition; @@ -142,8 +136,8 @@ private void checkNotEmptyDefinitions(WorkflowDef workflowDefinition) { // Obtain the names of the tasks with missing definitions Set missingTaskDefinitionNames = workflowDefinition.collectTasks().stream() - .filter(workflowTask -> shouldPopulateDefinition(workflowTask)) - .map(workflowTask -> workflowTask.getName()) + .filter(MetadataMapperService::shouldPopulateDefinition) + .map(WorkflowTask::getName) .collect(Collectors.toSet()); if (!missingTaskDefinitionNames.isEmpty()) { diff --git a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java index 1775f17abc..70035772aa 100644 --- a/core/src/main/java/com/netflix/conductor/service/ExecutionService.java +++ b/core/src/main/java/com/netflix/conductor/service/ExecutionService.java @@ -211,9 +211,9 @@ public List getTasks(String taskType, String startKey, int count) { } public Task getTask(String taskId) { - Task task = executionDAO.getTask(taskId); - task = metadataMapperService.populateTaskWithDefinition(task); - return task; + return Optional.ofNullable(executionDAO.getTask(taskId)) + .map(t -> metadataMapperService.populateTaskWithDefinition(t)) + .orElse(null); } public Task getPendingTaskForWorkflow(String taskReferenceName, String workflowId) { @@ -333,7 +333,7 @@ public List getWorkflowInstances(String workflowName, String correlati List workflows = executionDAO.getWorkflowsByCorrelationId(correlationId, includeTasks); List result = new LinkedList<>(); for (Workflow wf : workflows) { - if (wf.getWorkflowType().equals(workflowName) && (includeClosed || wf.getStatus().equals(Workflow.WorkflowStatus.RUNNING))) { + if (wf.getWorkflowName().equals(workflowName) && (includeClosed || wf.getStatus().equals(Workflow.WorkflowStatus.RUNNING))) { result.add(wf); } } diff --git a/core/src/main/java/com/netflix/conductor/service/WorkflowService.java b/core/src/main/java/com/netflix/conductor/service/WorkflowService.java index 99f520400f..2811bc5622 100644 --- a/core/src/main/java/com/netflix/conductor/service/WorkflowService.java +++ b/core/src/main/java/com/netflix/conductor/service/WorkflowService.java @@ -70,6 +70,13 @@ public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { WorkflowDef workflowDef = startWorkflowRequest.getWorkflowDef(); if (workflowDef == null) { + workflowDef = metadataService.getWorkflowDef(startWorkflowRequest.getName(), startWorkflowRequest.getVersion()); + if (workflowDef == null) { + throw new ApplicationException(ApplicationException.Code.NOT_FOUND, + String.format("No such workflow found by name: %s, version: %d", startWorkflowRequest.getName(), + startWorkflowRequest.getVersion())); + } + return workflowExecutor.startWorkflow( startWorkflowRequest.getName(), startWorkflowRequest.getVersion(), @@ -80,12 +87,6 @@ public String startWorkflow(StartWorkflowRequest startWorkflowRequest) { startWorkflowRequest.getTaskToDomain() ); } else { - workflowDef = metadataService.getWorkflowDef(startWorkflowRequest.getName(), startWorkflowRequest.getVersion()); - if (workflowDef == null) { - throw new ApplicationException(ApplicationException.Code.NOT_FOUND, - String.format("No such workflow found by name: %s, version: %d", startWorkflowRequest.getName(), - startWorkflowRequest.getVersion())); - } return workflowExecutor.startWorkflow( startWorkflowRequest.getWorkflowDef(), startWorkflowRequest.getInput(), diff --git a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java b/core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java similarity index 89% rename from core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java rename to core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java index 956f8e0bda..1816bf1ce6 100644 --- a/core/src/test/java/com/netflix/conductor/metadata/MetadataMapperServiceTest.java +++ b/core/src/test/java/com/netflix/conductor/core/metadata/MetadataMapperServiceTest.java @@ -1,4 +1,4 @@ -package com.netflix.conductor.metadata; +package com.netflix.conductor.core.metadata; import com.google.common.collect.ImmutableList; import com.netflix.conductor.common.metadata.tasks.TaskDef; @@ -8,7 +8,6 @@ import com.netflix.conductor.common.metadata.workflow.WorkflowTask; import com.netflix.conductor.core.execution.ApplicationException; import com.netflix.conductor.core.execution.TerminateWorkflowException; -import com.netflix.conductor.core.metadata.MetadataMapperService; import com.netflix.conductor.dao.MetadataDAO; import org.junit.Test; import org.junit.runner.RunWith; @@ -21,6 +20,7 @@ import static junit.framework.Assert.assertEquals; import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.verifyZeroInteractions; @@ -200,6 +200,25 @@ public void testExceptionWhenWorkflowDefinitionNotAvailable() { verify(metadataDAO).getLatest(workflowDefinitionName); } + @Test(expected = ApplicationException.class) + public void testLookupWorkflowDefinition() { + String workflowName = "test"; + when(metadataDAO.get(workflowName, 0)).thenReturn(Optional.of(new WorkflowDef())); + Optional optionalWorkflowDef = metadataMapperService.lookupWorkflowDefinition(workflowName, 0); + assertTrue(optionalWorkflowDef.isPresent()); + + metadataMapperService.lookupWorkflowDefinition(null, 0); + } + + @Test(expected = ApplicationException.class) + public void testLookupLatestWorkflowDefinition() { + String workflowName = "test"; + when(metadataDAO.getLatest(workflowName)).thenReturn(Optional.of(new WorkflowDef())); + Optional optionalWorkflowDef = metadataMapperService.lookupLatestWorkflowDefinition(workflowName); + assertTrue(optionalWorkflowDef.isPresent()); + + metadataMapperService.lookupLatestWorkflowDefinition(null); + } private WorkflowDef createWorkflowDefinition(String name) { WorkflowDef workflowDefinition = new WorkflowDef(); diff --git a/jersey/build.gradle b/jersey/build.gradle index 87613a3363..7e6ba28843 100644 --- a/jersey/build.gradle +++ b/jersey/build.gradle @@ -9,5 +9,5 @@ dependencies { compile "io.swagger:swagger-jaxrs:${revSwagger}" compile "com.sun.jersey:jersey-bundle:${revJerseyBundle}" - provided "javax.servlet:javax.servlet-api:${revServletApi}" + compileOnly "javax.servlet:javax.servlet-api:${revServletApi}" } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java index 6e7b1c9c69..42e00bae44 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractEndToEndTest.java @@ -7,6 +7,7 @@ import com.netflix.conductor.common.run.Workflow; import org.junit.Test; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.Optional; @@ -22,18 +23,15 @@ public abstract class AbstractEndToEndTest { private static final String DEFAULT_NULL_VALUE = "null"; @Test - public void testEphemeralWorkflowsWithStoredTasks() throws Exception { - createAndRegisterTaskDefinitions("storedTaskDef", 5); - - WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflow"); + public void testEphemeralWorkflowsWithStoredTasks() { + String workflowExecutionName = "testEphemeralWorkflow"; + createAndRegisterTaskDefinitions("storedTaskDef", 5); + WorkflowDef workflowDefinition = createWorkflowDefinition(workflowExecutionName); WorkflowTask workflowTask1 = createWorkflowTask("storedTaskDef1"); WorkflowTask workflowTask2 = createWorkflowTask("storedTaskDef2"); + workflowDefinition.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflow"; String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); assertNotNull(workflowId); @@ -44,21 +42,17 @@ public void testEphemeralWorkflowsWithStoredTasks() throws Exception { } @Test - public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { - WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowWithEphemeralTasks"); + public void testEphemeralWorkflowsWithEphemeralTasks() { + String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; + WorkflowDef workflowDefinition = createWorkflowDefinition(workflowExecutionName); WorkflowTask workflowTask1 = createWorkflowTask("ephemeralTask1"); TaskDef taskDefinition1 = createTaskDefinition("ephemeralTaskDef1"); workflowTask1.setTaskDefinition(taskDefinition1); - WorkflowTask workflowTask2 = createWorkflowTask("ephemeralTask2"); TaskDef taskDefinition2 = createTaskDefinition("ephemeralTaskDef2"); workflowTask2.setTaskDefinition(taskDefinition2); - - workflowDefinition.getTasks().add(workflowTask1); - workflowDefinition.getTasks().add(workflowTask2); - - String workflowExecutionName = "ephemeralWorkflowWithEphemeralTasks"; + workflowDefinition.getTasks().addAll(Arrays.asList(workflowTask1, workflowTask2)); String workflowId = startWorkflow(workflowExecutionName, workflowDefinition); assertNotNull(workflowId); @@ -76,7 +70,7 @@ public void testEphemeralWorkflowsWithEphemeralTasks() throws Exception { } @Test - public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() throws Exception { + public void testEphemeralWorkflowsWithEphemeralAndStoredTasks() { createAndRegisterTaskDefinitions("storedTask", 1); WorkflowDef workflowDefinition = createWorkflowDefinition("testEphemeralWorkflowsWithEphemeralAndStoredTasks"); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java index 1d7006223d..ac61fdf22a 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/AbstractWorkflowServiceTest.java @@ -75,6 +75,10 @@ import static com.netflix.conductor.common.metadata.tasks.Task.Status.COMPLETED; import static com.netflix.conductor.common.metadata.tasks.Task.Status.FAILED; +import static com.netflix.conductor.common.metadata.tasks.Task.Status.IN_PROGRESS; +import static com.netflix.conductor.common.metadata.tasks.Task.Status.SCHEDULED; +import static com.netflix.conductor.common.metadata.tasks.Task.Status.TIMED_OUT; +import static com.netflix.conductor.common.run.Workflow.WorkflowStatus.RUNNING; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -137,7 +141,7 @@ public abstract class AbstractWorkflowServiceTest { private static final String TEST_WORKFLOW_NAME_3 = "junit_test_wf3"; @Before - public void init() throws Exception { + public void init() { System.setProperty("EC2_REGION", "us-east-1"); System.setProperty("EC2_AVAILABILITY_ZONE", "us-east-1c"); if (registered) { @@ -265,7 +269,7 @@ private TaskDef notFoundSafeGetTaskDef(String name) { } @Test - public void testWorkflowWithNoTasks() throws Exception { + public void testWorkflowWithNoTasks() { WorkflowDef empty = new WorkflowDef(); empty.setName("empty_workflow"); @@ -340,7 +344,7 @@ public void testTaskDefTemplate() throws Exception { @Test - public void testWorkflowSchemaVersion() throws Exception { + public void testWorkflowSchemaVersion() { WorkflowDef ver2 = new WorkflowDef(); ver2.setSchemaVersion(2); ver2.setName("Test_schema_version2"); @@ -412,7 +416,7 @@ public void testForkJoin() throws Exception { Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); printTaskStatuses(workflow, "T1 completed"); task3 = workflowExecutionService.poll("junit_task_3", "test"); @@ -446,12 +450,12 @@ public void testForkJoin() throws Exception { workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); printTaskStatuses(workflow, "T2 T3 completed"); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); assertTrue("Found " + workflow.getTasks().stream().map(t -> t.getReferenceTaskName() + "." + t.getStatus()).collect(Collectors.toList()), workflow.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t4"))); Task t4 = workflowExecutionService.poll("junit_task_4", "test"); @@ -466,7 +470,7 @@ public void testForkJoin() throws Exception { } @Test - public void testForkJoinNested() throws Exception { + public void testForkJoinNested() { createForkJoinNestedWorkflow(); @@ -477,7 +481,7 @@ public void testForkJoinNested() throws Exception { Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(RUNNING, wf.getStatus()); assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); @@ -526,7 +530,7 @@ public void testForkJoinNested() throws Exception { wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(RUNNING, wf.getStatus()); assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet @@ -544,7 +548,7 @@ public void testForkJoinNested() throws Exception { Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(RUNNING, wf.getStatus()); Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); @@ -564,7 +568,7 @@ public void testForkJoinNested() throws Exception { } @Test - public void testForkJoinNestedWithSubWorkflow() throws Exception { + public void testForkJoinNestedWithSubWorkflow() { createForkJoinNestedWorkflowWithSubworkflow(); @@ -575,7 +579,7 @@ public void testForkJoinNestedWithSubWorkflow() throws Exception { Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(RUNNING, wf.getStatus()); assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t11"))); assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t12"))); @@ -625,7 +629,7 @@ public void testForkJoinNestedWithSubWorkflow() throws Exception { wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(RUNNING, wf.getStatus()); assertTrue(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t19"))); assertFalse(wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("t15"))); //Not there yet @@ -643,7 +647,7 @@ public void testForkJoinNestedWithSubWorkflow() throws Exception { Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS); wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals(WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals(RUNNING, wf.getStatus()); Set pendingTasks = wf.getTasks().stream().filter(t -> !t.getStatus().isTerminal()).map(t -> t.getReferenceTaskName()).collect(Collectors.toSet()); assertTrue("Found only this: " + pendingTasks, wf.getTasks().stream().anyMatch(t -> t.getReferenceTaskName().equals("join1"))); @@ -662,7 +666,7 @@ public void testForkJoinNestedWithSubWorkflow() throws Exception { } @Test - public void testForkJoinFailure() throws Exception { + public void testForkJoinFailure() { try { createForkJoinWorkflow(); @@ -698,7 +702,7 @@ public void testForkJoinFailure() throws Exception { workflowExecutionService.updateTask(t2); Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); - assertEquals("Found " + wf.getTasks(), WorkflowStatus.RUNNING, wf.getStatus()); + assertEquals("Found " + wf.getTasks(), RUNNING, wf.getStatus()); t3 = workflowExecutionService.poll("junit_task_3", "test"); assertNotNull(t3); @@ -717,7 +721,7 @@ public void testForkJoinFailure() throws Exception { @SuppressWarnings("unchecked") @Test - public void testDynamicForkJoinLegacy() throws Exception { + public void testDynamicForkJoinLegacy() { try { createDynamicForkJoinWorkflowDefsLegacy(); @@ -786,7 +790,7 @@ public void testDynamicForkJoinLegacy() throws Exception { @SuppressWarnings("unchecked") @Test - public void testDynamicForkJoin() throws Exception { + public void testDynamicForkJoin() { createDynamicForkJoinWorkflowDefs(); @@ -803,7 +807,7 @@ public void testDynamicForkJoin() throws Exception { System.out.println("testDynamicForkJoin.wfid=" + workflowId); Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); Task task1 = workflowExecutionService.poll("junit_task_1", "test"); @@ -848,7 +852,7 @@ public void testDynamicForkJoin() throws Exception { workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).count()); assertTrue(workflow.getTasks().stream().filter(t -> t.getTaskType().equals("junit_task_2")).allMatch(t -> t.getWorkflowTask() != null)); assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 6, workflow.getTasks().size()); @@ -879,7 +883,7 @@ public void testDynamicForkJoin() throws Exception { workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); assertEquals("Found " + workflow.getTasks().stream().map(Task::getTaskType).collect(Collectors.toList()), 7, workflow.getTasks().size()); Task task4 = workflowExecutionService.poll("junit_task_4", "test"); @@ -910,7 +914,7 @@ public void testDynamicForkJoin() throws Exception { metadataService.updateTaskDef(taskDef); } - private void createForkJoinWorkflow() throws Exception { + private void createForkJoinWorkflow() { WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName(FORK_JOIN_WF); @@ -963,7 +967,7 @@ private void createForkJoinWorkflow() throws Exception { } - private void createForkJoinWorkflowWithZeroRetry() throws Exception { + private void createForkJoinWorkflowWithZeroRetry() { WorkflowDef def = new WorkflowDef(); def.setName(FORK_JOIN_WF + "_2"); @@ -1016,7 +1020,7 @@ private void createForkJoinWorkflowWithZeroRetry() throws Exception { } - private void createForkJoinNestedWorkflow() throws Exception { + private void createForkJoinNestedWorkflow() { WorkflowDef def = new WorkflowDef(); def.setName(FORK_JOIN_NESTED_WF); @@ -1081,7 +1085,7 @@ private void createForkJoinNestedWorkflow() throws Exception { metadataService.updateWorkflowDef(def); } - private void createForkJoinNestedWorkflowWithSubworkflow() throws Exception { + private void createForkJoinNestedWorkflowWithSubworkflow() { WorkflowDef def = new WorkflowDef(); def.setName(FORK_JOIN_NESTED_WF); @@ -1157,7 +1161,7 @@ private void createForkJoinNestedWorkflowWithSubworkflow() throws Exception { } - private void createDynamicForkJoinWorkflowDefs() throws Exception { + private void createDynamicForkJoinWorkflowDefs() { WorkflowDef def = new WorkflowDef(); def.setName(DYNAMIC_FORK_JOIN_WF); @@ -1200,7 +1204,7 @@ private void createDynamicForkJoinWorkflowDefs() throws Exception { } @SuppressWarnings("deprecation") - private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { + private void createDynamicForkJoinWorkflowDefsLegacy() { WorkflowDef def = new WorkflowDef(); def.setName(DYNAMIC_FORK_JOIN_WF_LEGACY); @@ -1237,7 +1241,7 @@ private void createDynamicForkJoinWorkflowDefsLegacy() throws Exception { } - private void createConditionalWF() throws Exception { + private void createConditionalWF() { WorkflowTask wft1 = new WorkflowTask(); wft1.setName("junit_task_1"); @@ -1312,14 +1316,14 @@ private void createConditionalWF() throws Exception { @Test - public void testDefDAO() throws Exception { + public void testDefDAO() { List taskDefs = metadataService.getTaskDefs(); assertNotNull(taskDefs); assertTrue(!taskDefs.isEmpty()); } @Test - public void testSimpleWorkflowFailureWithTerminalError() throws Exception { + public void testSimpleWorkflowFailureWithTerminalError() { clearWorkflows(); @@ -1344,11 +1348,11 @@ public void testSimpleWorkflowFailureWithTerminalError() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(es.getReasonForIncompletion(), RUNNING, es.getStatus()); es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. boolean failed = false; @@ -1389,12 +1393,10 @@ public void testSimpleWorkflowFailureWithTerminalError() throws Exception { outputParameters.remove("validationErrors"); metadataService.updateWorkflowDef(found); - } - @Test - public void testSimpleWorkflow() throws Exception { + public void testSimpleWorkflow() { clearWorkflows(); @@ -1409,16 +1411,14 @@ public void testSimpleWorkflow() throws Exception { logger.info("testSimpleWorkflow.wfid= {}", workflowInstanceId); assertNotNull(workflowInstanceId); - Workflow es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); + workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(workflow); + assertEquals(RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. boolean failed = false; try { @@ -1428,7 +1428,7 @@ public void testSimpleWorkflow() throws Exception { } assertTrue(failed); - // Polling for the first task should return the same task as before + // Polling for the first task Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); assertEquals("junit_task_1", task.getTaskType()); @@ -1442,19 +1442,14 @@ public void testSimpleWorkflow() throws Exception { assertNotNull(tasks); assertEquals(1, tasks.size()); task = tasks.get(0); - - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); assertEquals(workflowInstanceId, task.getWorkflowInstanceId()); task.getOutputData().put("op", task1Op); task.setStatus(COMPLETED); workflowExecutionService.updateTask(task); - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); + workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, false); + assertNotNull(workflow); + assertNotNull(workflow.getOutput()); task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); assertNotNull(task); @@ -1464,7 +1459,6 @@ public void testSimpleWorkflow() throws Exception { assertNotNull("Found=" + task.getInputData(), task2Input); assertEquals(task1Op, task2Input); - task2Input = (String) task.getInputData().get("tp1"); assertNotNull(task2Input); assertEquals(inputParam1, task2Input); @@ -1473,17 +1467,15 @@ public void testSimpleWorkflow() throws Exception { task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); + workflow = workflowExecutionService.getExecutionStatus(workflowInstanceId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + tasks = workflow.getTasks(); assertNotNull(tasks); assertEquals(2, tasks.size()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - + assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); + assertEquals("task1.Done", workflow.getOutput().get("o3")); } @Test @@ -1497,16 +1489,16 @@ public void testSimpleWorkflowWithResponseTimeout() throws Exception { workflowInput.put("param1", inputParam1); workflowInput.put("param2", "p2 value"); String workflowId = startOrLoadWorkflowExecution("RTOWF", 1, correlationId, workflowInput, null, null); - System.out.println("testSimpleWorkflowWithResponseTimeout.wfid=" + workflowId); + logger.debug("testSimpleWorkflowWithResponseTimeout.wfid=" + workflowId); assertNotNull(workflowId); Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. assertEquals(1, queueDAO.getSize("task_rt")); - // Polling for the first task should return the same task as before + // Polling for the first task should return the first task Task task = workflowExecutionService.poll("task_rt", "task1.junit.worker.testTimeout"); assertNotNull(task); assertEquals("task_rt", task.getTaskType()); @@ -1521,19 +1513,45 @@ public void testSimpleWorkflowWithResponseTimeout() throws Exception { workflowExecutor.decide(workflowId); assertEquals(1, queueDAO.getSize("task_rt")); + // The first task would be timed_out and a new task will be scheduled workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().size()); + assertTrue(workflow.getTasks().stream().allMatch(t -> t.getReferenceTaskName().equals("task_rt_t1"))); + assertEquals(TIMED_OUT, workflow.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, workflow.getTasks().get(1).getStatus()); // Polling now should get the same task back because it should have been put back in the queue Task taskAgain = workflowExecutionService.poll("task_rt", "task1.junit.worker"); assertNotNull(taskAgain); + // update task with callback after seconds greater than the response timeout + taskAgain.setStatus(IN_PROGRESS); + taskAgain.setCallbackAfterSeconds(20); + workflowExecutionService.updateTask(taskAgain); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + assertEquals(IN_PROGRESS, workflow.getTasks().get(1).getStatus()); + + // wait for callback after seconds which is longer than response timeout seconds and then call decide + Thread.sleep(20000); + workflowExecutor.decide(workflowId); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + + // Poll for task again + taskAgain = workflowExecutionService.poll("task_rt", "task1.junit.worker"); + assertNotNull(taskAgain); + taskAgain.getOutputData().put("op", "task1.Done"); taskAgain.setStatus(COMPLETED); workflowExecutionService.updateTask(taskAgain); + // poll for next task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker.testTimeout"); assertNotNull(task); assertEquals("junit_task_2", task.getTaskType()); @@ -1543,16 +1561,14 @@ public void testSimpleWorkflowWithResponseTimeout() throws Exception { task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); - } @Test - public void testWorkflowRerunWithSubWorkflows() throws Exception { - // Execute a workflow + public void testWorkflowRerunWithSubWorkflows() { + // Execute a workflow with sub-workflow String workflowId = this.runWorkflowWithSubworkflow(); // Check it completed Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); @@ -1563,8 +1579,8 @@ public void testWorkflowRerunWithSubWorkflows() throws Exception { // Now lets pickup the first task in the sub workflow and rerun it from there String subWorkflowId = null; for (Task task : workflow.getTasks()) { - if (task.getTaskType().equalsIgnoreCase("SUB_WORKFLOW")) { - subWorkflowId = task.getOutputData().get("subWorkflowId").toString(); + if (task.getTaskType().equalsIgnoreCase(SubWorkflow.NAME)) { + subWorkflowId = task.getOutputData().get(SubWorkflow.SUB_WORKFLOW_ID).toString(); } } assertNotNull(subWorkflowId); @@ -1577,30 +1593,30 @@ public void testWorkflowRerunWithSubWorkflows() throws Exception { } assertNotNull(subWorkflowTask1); - RerunWorkflowRequest request = new RerunWorkflowRequest(); - request.setReRunFromTaskId(subWorkflowTask1.getTaskId()); + RerunWorkflowRequest rerunWorkflowRequest = new RerunWorkflowRequest(); + rerunWorkflowRequest.setReRunFromTaskId(subWorkflowTask1.getTaskId()); Map newInput = new HashMap<>(); newInput.put("p1", "1"); newInput.put("p2", "2"); - request.setTaskInput(newInput); + rerunWorkflowRequest.setTaskInput(newInput); String correlationId = "unit_test_sw_new"; Map input = new HashMap<>(); input.put("param1", "New p1 value"); input.put("param2", "New p2 value"); - request.setCorrelationId(correlationId); - request.setWorkflowInput(input); + rerunWorkflowRequest.setCorrelationId(correlationId); + rerunWorkflowRequest.setWorkflowInput(input); - request.setReRunFromWorkflowId(workflowId); - request.setReRunFromTaskId(subWorkflowTask1.getTaskId()); + rerunWorkflowRequest.setReRunFromWorkflowId(workflowId); + rerunWorkflowRequest.setReRunFromTaskId(subWorkflowTask1.getTaskId()); // Rerun - workflowExecutor.rerun(request); + workflowExecutor.rerun(rerunWorkflowRequest); // The main WF and the sub WF should be in RUNNING state workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().size()); assertEquals(correlationId, workflow.getCorrelationId()); assertEquals("New p1 value", workflow.getInput().get("param1")); @@ -1608,11 +1624,11 @@ public void testWorkflowRerunWithSubWorkflows() throws Exception { subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(RUNNING, subWorkflow.getStatus()); // Since we are re running from the sub workflow task, there // should be only 1 task that is SCHEDULED assertEquals(1, subWorkflow.getTasks().size()); - assertEquals(Status.SCHEDULED, subWorkflow.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, subWorkflow.getTasks().get(0).getStatus()); // Now execute the task Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); @@ -1626,7 +1642,7 @@ public void testWorkflowRerunWithSubWorkflows() throws Exception { subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(RUNNING, subWorkflow.getStatus()); assertEquals(2, subWorkflow.getTasks().size()); // Poll for second task of the sub workflow and execute it @@ -1650,7 +1666,7 @@ public void testWorkflowRerunWithSubWorkflows() throws Exception { } @Test - public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { + public void testSimpleWorkflowWithTaskSpecificDomain() { long startTimeTimestamp = System.currentTimeMillis(); @@ -1660,11 +1676,11 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2_SW, 1); String correlationId = "unit_test_sw"; - Map input = new HashMap(); + Map input = new HashMap<>(); String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - Map taskToDomain = new HashMap(); + Map taskToDomain = new HashMap<>(); taskToDomain.put("junit_task_3", "domain1"); taskToDomain.put("junit_task_2", "domain1"); @@ -1674,69 +1690,62 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain1"); assertNull(task); - String wfid = startOrLoadWorkflowExecution("simpleWorkflowWithTaskSpecificDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); - System.out.println("testSimpleWorkflow.wfid=" + wfid); - assertNotNull(wfid); - Workflow wf = workflowExecutor.getWorkflow(wfid, false); - assertNotNull(wf); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + String workflowId = startOrLoadWorkflowExecution("simpleWorkflowWithTaskSpecificDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); + assertNotNull(workflowId); + Workflow workflow = workflowExecutor.getWorkflow(workflowId, false); + assertNotNull(workflow); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. // Check Size Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain1:junit_task_3", "junit_task_3")); assertEquals(sizes.get("domain1:junit_task_3").intValue(), 1); assertEquals(sizes.get("junit_task_3").intValue(), 0); - // Polling for the first task should return the same task as before + // Polling for the first task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); assertNull(task); task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain1"); assertNotNull(task); assertEquals("junit_task_3", task.getTaskType()); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(wfid, task.getWorkflowInstanceId()); + assertEquals(workflowId, task.getWorkflowInstanceId()); - String task1Op = "task1.Done"; - List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + + List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 10); assertNotNull(tasks); assertEquals(1, tasks.size()); task = tasks.get(0); + assertEquals(workflowId, task.getWorkflowInstanceId()); - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(wfid, task.getWorkflowInstanceId()); + String task1Op = "task1.Done"; task.getOutputData().put("op", task1Op); task.setStatus(COMPLETED); workflowExecutionService.updateTask(task); - es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); + workflow = workflowExecutionService.getExecutionStatus(workflowId, false); + assertNotNull(workflow); + assertEquals(RUNNING, workflow.getStatus()); task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); assertEquals("junit_task_1", task.getTaskType()); - Workflow essw = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, false); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(essw.getTaskToDomain()); - assertEquals(essw.getTaskToDomain().size(), 2); + assertNotNull(workflow.getTaskToDomain()); + assertEquals(workflow.getTaskToDomain().size(), 2); task.setStatus(COMPLETED); task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker", "domain1"); assertNotNull(task); assertEquals("junit_task_2", task.getTaskType()); @@ -1746,25 +1755,22 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + tasks = workflow.getTasks(); assertNotNull(tasks); assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); + assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); + assertEquals("task1.Done", workflow.getOutput().get("o3")); Predicate pollDataWithinTestTimes = pollData -> pollData.getLastPollTime() != 0 && pollData.getLastPollTime() > startTimeTimestamp; - List pddata = workflowExecutionService.getPollData("junit_task_3").stream() + List pollData = workflowExecutionService.getPollData("junit_task_3").stream() .filter(pollDataWithinTestTimes) .collect(Collectors.toList()); - - assertTrue(pddata.size() == 2); - for (PollData pd : pddata) { + assertEquals(2, pollData.size()); + for (PollData pd : pollData) { assertEquals(pd.getQueueName(), "junit_task_3"); assertEquals(pd.getWorkerId(), "task1.junit.worker"); assertTrue(pd.getLastPollTime() != 0); @@ -1782,12 +1788,11 @@ public void testSimpleWorkflowWithTaskSpecificDomain() throws Exception { count++; } } - assertTrue(count == 2); - + assertEquals(2, count); } @Test - public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { + public void testSimpleWorkflowWithAllTaskInOneDomain() { clearWorkflows(); createWorkflowDefForDomain(); @@ -1808,29 +1813,24 @@ public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { task = workflowExecutionService.poll("junit_task_2", "task1.junit.worker", "domain12"); assertNull(task); - String wfid = startOrLoadWorkflowExecution("simpleWorkflowWithTasksInOneDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); - System.out.println("testSimpleWorkflow.wfid=" + wfid); - assertNotNull(wfid); - Workflow wf = workflowExecutor.getWorkflow(wfid, false); - assertNotNull(wf); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(es.getReasonForIncompletion(), WorkflowStatus.RUNNING, es.getStatus()); - + String workflowId = startOrLoadWorkflowExecution("simpleWorkflowWithTasksInOneDomain", LINEAR_WORKFLOW_T1_T2_SW, 1, correlationId, input, null, taskToDomain); + assertNotNull(workflowId); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); - assertEquals(1, es.getTasks().size()); //The very first task is the one that should be scheduled. + Workflow workflow = workflowExecutor.getWorkflow(workflowId, false); + assertNotNull(workflow); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(workflow.getReasonForIncompletion(), RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. // Check Size Map sizes = workflowExecutionService.getTaskQueueSizes(Arrays.asList("domain11:junit_task_3", "junit_task_3")); assertEquals(sizes.get("domain11:junit_task_3").intValue(), 1); assertEquals(sizes.get("junit_task_3").intValue(), 0); - // Polling for the first task should return the same task as before + // Polling for the first task task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker"); assertNull(task); task = workflowExecutionService.poll("junit_task_3", "task1.junit.worker", "domain11"); @@ -1838,34 +1838,33 @@ public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { assertEquals("junit_task_3", task.getTaskType()); assertEquals("domain11", task.getDomain()); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(wfid, task.getWorkflowInstanceId()); + assertEquals(workflowId, task.getWorkflowInstanceId()); - String task1Op = "task1.Done"; List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); assertNotNull(tasks); assertEquals(1, tasks.size()); task = tasks.get(0); - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(wfid, task.getWorkflowInstanceId()); + String task1Op = "task1.Done"; + assertEquals(workflowId, task.getWorkflowInstanceId()); task.getOutputData().put("op", task1Op); task.setStatus(COMPLETED); workflowExecutionService.updateTask(task); - es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertEquals(workflowId, task.getWorkflowInstanceId()); + assertNotNull(workflow); + assertEquals(RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); assertEquals("junit_task_1", task.getTaskType()); - Workflow essw = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, false); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertNotNull(essw.getTaskToDomain()); - assertEquals(essw.getTaskToDomain().size(), 1); + assertNotNull(workflow.getTaskToDomain()); + assertEquals(workflow.getTaskToDomain().size(), 1); task.setStatus(COMPLETED); task.setReasonForIncompletion("unit test failure"); @@ -1884,20 +1883,18 @@ public void testSimpleWorkflowWithAllTaskInOneDomain() throws Exception { task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + tasks = workflow.getTasks(); assertNotNull(tasks); assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); + assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); + assertEquals("task1.Done", workflow.getOutput().get("o3")); } @After - public void clearWorkflows() throws Exception { + public void clearWorkflows() { List workflows = metadataService.getWorkflowDefs().stream() .map(WorkflowDef::getName) .collect(Collectors.toList()); @@ -1907,13 +1904,11 @@ public void clearWorkflows() throws Exception { workflowExecutor.terminateWorkflow(wfid, "cleanup"); } } - queueDAO.queuesDetail().keySet().forEach(queueName -> { - queueDAO.flush(queueName); - }); + queueDAO.queuesDetail().keySet().forEach(queueDAO::flush); } @Test - public void testLongRunning() throws Exception { + public void testLongRunning() { clearWorkflows(); @@ -1924,22 +1919,17 @@ public void testLongRunning() throws Exception { String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input, null, null); - System.out.println("testLongRunning.wfid=" + wfid); - assertNotNull(wfid); - - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + String workflowId = startOrLoadWorkflowExecution(LONG_RUNNING, 1, correlationId, input, null, null); + System.out.println("testLongRunning.wfid=" + workflowId); + assertNotNull(workflowId); - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(RUNNING, workflow.getStatus()); // Check the queue - assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); - /// + assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Collections.singletonList("junit_task_1")).get("junit_task_1")); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); @@ -1953,22 +1943,19 @@ public void testLongRunning() throws Exception { assertEquals("p1 value", param1); assertEquals("p2 value", param2); - - String task1Op = "task1.In.Progress"; - task.getOutputData().put("op", task1Op); - task.setStatus(Status.IN_PROGRESS); + String task1Output = "task1.In.Progress"; + task.getOutputData().put("op", task1Output); + task.setStatus(IN_PROGRESS); task.setCallbackAfterSeconds(5); workflowExecutionService.updateTask(task); String taskId = task.getTaskId(); // Check the queue assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); - /// - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(RUNNING, workflow.getStatus()); // Polling for next task should not return anything Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); @@ -1984,13 +1971,13 @@ public void testLongRunning() throws Exception { assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); assertEquals(task.getTaskId(), taskId); - task1Op = "task1.Done"; + task1Output = "task1.Done"; List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); assertNotNull(tasks); assertEquals(1, tasks.size()); - assertEquals(wfid, task.getWorkflowInstanceId()); + assertEquals(workflowId, task.getWorkflowInstanceId()); task = tasks.get(0); - task.getOutputData().put("op", task1Op); + task.getOutputData().put("op", task1Output); task.setStatus(COMPLETED); workflowExecutionService.updateTask(task); @@ -1999,7 +1986,7 @@ public void testLongRunning() throws Exception { assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); String task2Input = (String) task.getInputData().get("tp2"); assertNotNull(task2Input); - assertEquals(task1Op, task2Input); + assertEquals(task1Output, task2Input); task2Input = (String) task.getInputData().get("tp1"); assertNotNull(task2Input); @@ -2009,19 +1996,16 @@ public void testLongRunning() throws Exception { task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + tasks = workflow.getTasks(); assertNotNull(tasks); assertEquals(2, tasks.size()); - - } @Test - public void testResetWorkflowInProgressTasks() throws Exception { + public void testResetWorkflowInProgressTasks() { clearWorkflows(); @@ -2038,12 +2022,12 @@ public void testResetWorkflowInProgressTasks() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // Check the queue assertEquals(Integer.valueOf(1), workflowExecutionService.getTaskQueueSizes(Arrays.asList("junit_task_1")).get("junit_task_1")); @@ -2064,7 +2048,7 @@ public void testResetWorkflowInProgressTasks() throws Exception { String task1Op = "task1.In.Progress"; task.getOutputData().put("op", task1Op); - task.setStatus(Status.IN_PROGRESS); + task.setStatus(IN_PROGRESS); task.setCallbackAfterSeconds(3600); workflowExecutionService.updateTask(task); String taskId = task.getTaskId(); @@ -2076,7 +2060,7 @@ public void testResetWorkflowInProgressTasks() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // Polling for next task should not return anything Task task2 = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); @@ -2134,7 +2118,7 @@ public void testResetWorkflowInProgressTasks() throws Exception { @Test - public void testConcurrentWorkflowExecutions() throws Exception { + public void testConcurrentWorkflowExecutions() { int count = 3; @@ -2211,7 +2195,7 @@ public void testConcurrentWorkflowExecutions() throws Exception { } @Test - public void testCaseStatements() throws Exception { + public void testCaseStatements() { createConditionalWF(); String correlationId = "testCaseStatements: " + System.currentTimeMillis(); @@ -2228,7 +2212,7 @@ public void testCaseStatements() throws Exception { assertNotNull(wfid); Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); Task task = workflowExecutionService.poll("junit_task_2", "junit"); assertNotNull(task); task.setStatus(COMPLETED); @@ -2250,7 +2234,7 @@ public void testCaseStatements() throws Exception { assertNotNull(wfid); es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); sequence = new String[]{"junit_task_1", "junit_task_3"}; validate(wfid, sequence, new String[]{SystemTaskType.DECISION.name(), SystemTaskType.DECISION.name(), "junit_task_1", "junit_task_3", SystemTaskType.DECISION.name()}, 5); @@ -2279,7 +2263,7 @@ public void testCaseStatements() throws Exception { } - private void validate(String wfid, String[] sequence, String[] executedTasks, int expectedTotalTasks) throws Exception { + private void validate(String wfid, String[] sequence, String[] executedTasks, int expectedTotalTasks) { for (int i = 0; i < sequence.length; i++) { String t = sequence[i]; Task task = getTask(t); @@ -2299,7 +2283,7 @@ private void validate(String wfid, String[] sequence, String[] executedTasks, in assertNotNull(workflow); assertTrue(!workflow.getTasks().isEmpty()); if (i < sequence.length - 1) { - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); } else { workflow = workflowExecutionService.getExecutionStatus(wfid, true); List workflowTasks = workflow.getTasks(); @@ -2314,7 +2298,7 @@ private void validate(String wfid, String[] sequence, String[] executedTasks, in } - private Task getTask(String taskType) throws Exception { + private Task getTask(String taskType) { Task task; int count = 2; do { @@ -2334,7 +2318,7 @@ private Task getTask(String taskType) throws Exception { } @Test - public void testRetries() throws Exception { + public void testRetries() { String taskName = "junit_task_2"; TaskDef taskDef = notFoundSafeGetTaskDef(taskName); @@ -2366,7 +2350,7 @@ public void testRetries() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); @@ -2407,7 +2391,7 @@ public void testRetries() throws Exception { } @Test - public void testSuccess() throws Exception { + public void testSuccess() { metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); @@ -2440,18 +2424,18 @@ public void testSuccess() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // The first task would be marked as scheduled assertEquals(1, es.getTasks().size()); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); // decideNow should be idempotent if re-run on the same state! es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); assertEquals(1, es.getTasks().size()); Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); + assertEquals(SCHEDULED, t.getStatus()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); @@ -2460,7 +2444,7 @@ public void testSuccess() throws Exception { assertEquals(t.getTaskId(), task.getTaskId()); es = workflowExecutionService.getExecutionStatus(wfid, true); t = es.getTasks().get(0); - assertEquals(Status.IN_PROGRESS, t.getStatus()); + assertEquals(IN_PROGRESS, t.getStatus()); String taskId = t.getTaskId(); String param1 = (String) task.getInputData().get("p1"); @@ -2482,7 +2466,7 @@ public void testSuccess() throws Exception { if (wfTask.getTaskId().equals(taskId)) { assertEquals(COMPLETED, wfTask.getStatus()); } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); + assertEquals(SCHEDULED, wfTask.getStatus()); } }); @@ -2516,7 +2500,7 @@ public void testSuccess() throws Exception { } @Test - public void testDeciderUpdate() throws Exception { + public void testDeciderUpdate() { metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); @@ -2547,7 +2531,7 @@ public void testDeciderUpdate() throws Exception { @Test @Ignore //Ignore for now, will improve this in the future - public void testFailurePoints() throws Exception { + public void testFailurePoints() { metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); @@ -2561,10 +2545,10 @@ public void testFailurePoints() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // The first task would be marked as scheduled assertEquals(1, es.getTasks().size()); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); @@ -2585,7 +2569,7 @@ public void testFailurePoints() throws Exception { if (wfTask.getTaskId().equals(taskId)) { assertEquals(COMPLETED, wfTask.getStatus()); } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); + assertEquals(SCHEDULED, wfTask.getStatus()); } }); @@ -2646,10 +2630,10 @@ public void testDeciderMix() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // The first task would be marked as scheduled assertEquals(1, es.getTasks().size()); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); List> futures = new LinkedList<>(); for (int i = 0; i < 10; i++) { @@ -2665,20 +2649,20 @@ public void testDeciderMix() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // The first task would be marked as scheduled assertEquals(1, es.getTasks().size()); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); // decideNow should be idempotent if re-run on the same state! workflowExecutor.decide(wfid); es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); assertEquals(1, es.getTasks().size()); Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); + assertEquals(SCHEDULED, t.getStatus()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); @@ -2687,7 +2671,7 @@ public void testDeciderMix() throws Exception { assertEquals(t.getTaskId(), task.getTaskId()); es = workflowExecutionService.getExecutionStatus(wfid, true); t = es.getTasks().get(0); - assertEquals(Status.IN_PROGRESS, t.getStatus()); + assertEquals(IN_PROGRESS, t.getStatus()); String taskId = t.getTaskId(); String param1 = (String) task.getInputData().get("p1"); @@ -2709,7 +2693,7 @@ public void testDeciderMix() throws Exception { if (wfTask.getTaskId().equals(taskId)) { assertEquals(COMPLETED, wfTask.getStatus()); } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); + assertEquals(SCHEDULED, wfTask.getStatus()); } }); @@ -2728,7 +2712,7 @@ public void testDeciderMix() throws Exception { futures.clear(); es = workflowExecutionService.getExecutionStatus(wfid, true); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); assertEquals(2, es.getTasks().size()); System.out.println("Workflow tasks=" + es.getTasks()); @@ -2761,7 +2745,7 @@ public void testDeciderMix() throws Exception { } @Test - public void testFailures() throws Exception { + public void testFailures() { metadataService.getWorkflowDef(FORK_JOIN_WF, 1); String taskName = "junit_task_1"; @@ -2804,7 +2788,7 @@ public void testRetryWithForkJoin() throws Exception { Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(workflow.getStatus(), WorkflowStatus.RUNNING); + assertEquals(workflow.getStatus(), RUNNING); printTaskStatuses(workflow, "After retry called"); @@ -2855,7 +2839,7 @@ public void testRetryWithForkJoin() throws Exception { } @Test - public void testRetry() throws Exception { + public void testRetry() { String taskName = "junit_task_1"; TaskDef taskDef = notFoundSafeGetTaskDef(taskName); int retryCount = taskDef.getRetryCount(); @@ -2883,7 +2867,7 @@ public void testRetry() throws Exception { Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); task = getTask("junit_task_1"); assertNotNull(task); @@ -2901,7 +2885,7 @@ public void testRetry() throws Exception { printTaskStatuses(workflowId, "after retry"); workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); task = getTask("junit_task_1"); assertNotNull(task); @@ -2911,7 +2895,7 @@ public void testRetry() throws Exception { workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); task = getTask("junit_task_2"); assertNotNull(task); @@ -2934,7 +2918,7 @@ public void testRetry() throws Exception { } @Test - public void testNonRestartartableWorkflows() throws Exception { + public void testNonRestartartableWorkflows() { String taskName = "junit_task_1"; TaskDef taskDef = notFoundSafeGetTaskDef(taskName); taskDef.setRetryCount(0); @@ -2950,63 +2934,54 @@ public void testNonRestartartableWorkflows() throws Exception { assertFalse(StringUtils.isBlank(found.getFailureWorkflow())); String correlationId = "unit_test_1" + UUID.randomUUID().toString(); - Map input = new HashMap(); + Map input = new HashMap<>(); String inputParam1 = "p1 value"; input.put("param1", inputParam1); input.put("param2", "p2 value"); - String wfid = startOrLoadWorkflowExecution(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input, null, null); - assertNotNull(wfid); + String workflowId = startOrLoadWorkflowExecution(JUNIT_TEST_WF_NON_RESTARTABLE, 1, correlationId, input, null, null); + assertNotNull(workflowId); Task task = getTask("junit_task_1"); task.setStatus(FAILED); workflowExecutionService.updateTask(task); - // If we get the full workflow here then, last task should be completed and the next task should be scheduled - Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.FAILED, es.getStatus()); - - workflowExecutor.rewind(es.getWorkflowId()); + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.FAILED, workflow.getStatus()); - workflowExecutor.decide(wfid); + workflowExecutor.rewind(workflow.getWorkflowId()); // Polling for the first task should return the same task as before task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); assertEquals("junit_task_1", task.getTaskType()); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); - assertEquals(wfid, task.getWorkflowInstanceId()); - - workflowExecutor.decide(wfid); + assertEquals(workflowId, task.getWorkflowInstanceId()); - String task1Op = "task1.Done"; List tasks = workflowExecutionService.getTasks(task.getTaskType(), null, 1); assertNotNull(tasks); assertEquals(1, tasks.size()); task = tasks.get(0); + assertEquals(workflowId, task.getWorkflowInstanceId()); - Workflow workflow = workflowExecutionService.getExecutionStatus(task.getWorkflowInstanceId(), false); - System.out.println("task workflow = " + workflow.getWorkflowType() + "," + workflow.getInput()); - assertEquals(wfid, task.getWorkflowInstanceId()); + String task1Op = "task1.Done"; task.getOutputData().put("op", task1Op); task.setStatus(COMPLETED); workflowExecutionService.updateTask(task); - es = workflowExecutionService.getExecutionStatus(wfid, false); - assertNotNull(es); - assertNotNull(es.getOutput()); - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); + workflow = workflowExecutionService.getExecutionStatus(workflowId, false); + assertNotNull(workflow); + assertNotNull(workflow.getOutput()); task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); assertNotNull(task); assertEquals("junit_task_2", task.getTaskType()); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + String task2Input = (String) task.getInputData().get("tp2"); assertNotNull("Found=" + task.getInputData(), task2Input); assertEquals(task1Op, task2Input); - task2Input = (String) task.getInputData().get("tp1"); assertNotNull(task2Input); assertEquals(inputParam1, task2Input); @@ -3015,26 +2990,23 @@ public void testNonRestartartableWorkflows() throws Exception { task.setReasonForIncompletion("unit test failure"); workflowExecutionService.updateTask(task); - - es = workflowExecutionService.getExecutionStatus(wfid, true); - assertNotNull(es); - assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - tasks = es.getTasks(); + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + tasks = workflow.getTasks(); assertNotNull(tasks); assertEquals(2, tasks.size()); - - assertTrue("Found " + es.getOutput().toString(), es.getOutput().containsKey("o3")); - assertEquals("task1.Done", es.getOutput().get("o3")); - + assertTrue("Found " + workflow.getOutput().toString(), workflow.getOutput().containsKey("o3")); + assertEquals("task1.Done", workflow.getOutput().get("o3")); expectedException.expect(ApplicationException.class); expectedException.expectMessage(String.format("is an instance of WorkflowDef: %s and version: %d and is non restartable", JUNIT_TEST_WF_NON_RESTARTABLE, 1)); - workflowExecutor.rewind(es.getWorkflowId()); + workflowExecutor.rewind(workflow.getWorkflowId()); } @Test - public void testRestart() throws Exception { + public void testRestart() { String taskName = "junit_task_1"; TaskDef taskDef = notFoundSafeGetTaskDef(taskName); taskDef.setRetryCount(0); @@ -3064,7 +3036,7 @@ public void testRestart() throws Exception { workflowExecutor.rewind(es.getWorkflowId()); es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); task = getTask("junit_task_1"); assertNotNull(task); @@ -3074,7 +3046,7 @@ public void testRestart() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); task = getTask("junit_task_2"); assertNotNull(task); @@ -3085,8 +3057,6 @@ public void testRestart() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - } @@ -3121,7 +3091,7 @@ public void testTimeout() throws Exception { // If we get the full workflow here then, last task should be completed and the next task should be scheduled Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); assertEquals("fond: " + es.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 1, es.getTasks().size()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); @@ -3142,9 +3112,9 @@ public void testTimeout() throws Exception { assertEquals("fond: " + es.getTasks().stream().map(Task::toString).collect(Collectors.toList()), 2, es.getTasks().size()); Task task1 = es.getTasks().get(0); - assertEquals(Status.TIMED_OUT, task1.getStatus()); + assertEquals(TIMED_OUT, task1.getStatus()); Task task2 = es.getTasks().get(1); - assertEquals(Status.SCHEDULED, task2.getStatus()); + assertEquals(SCHEDULED, task2.getStatus()); task = workflowExecutionService.poll(task2.getTaskDefName(), "task1.junit.worker"); assertNotNull(task); @@ -3158,8 +3128,8 @@ public void testTimeout() throws Exception { assertNotNull(es); assertEquals(2, es.getTasks().size()); - assertEquals(Status.TIMED_OUT, es.getTasks().get(0).getStatus()); - assertEquals(Status.TIMED_OUT, es.getTasks().get(1).getStatus()); + assertEquals(TIMED_OUT, es.getTasks().get(0).getStatus()); + assertEquals(TIMED_OUT, es.getTasks().get(1).getStatus()); assertEquals(WorkflowStatus.TIMED_OUT, es.getStatus()); assertEquals(1, queueDAO.getSize(WorkflowExecutor.DECIDER_QUEUE)); @@ -3167,11 +3137,10 @@ public void testTimeout() throws Exception { taskDef.setTimeoutSeconds(0); taskDef.setRetryCount(RETRY_COUNT); metadataService.updateTaskDef(taskDef); - } @Test - public void testReruns() throws Exception { + public void testReruns() { metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); @@ -3185,11 +3154,11 @@ public void testReruns() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // Check the tasks, at this time there should be 1 task assertEquals(es.getTasks().size(), 1); Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); + assertEquals(SCHEDULED, t.getStatus()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); @@ -3215,7 +3184,7 @@ public void testReruns() throws Exception { if (wfTask.getTaskId().equals(t.getTaskId())) { assertEquals(wfTask.getStatus(), COMPLETED); } else { - assertEquals(wfTask.getStatus(), Status.SCHEDULED); + assertEquals(wfTask.getStatus(), SCHEDULED); } }); @@ -3246,13 +3215,13 @@ public void testReruns() throws Exception { Workflow esRR = workflowExecutionService.getExecutionStatus(reRunwfid, true); assertNotNull(esRR); - assertEquals(esRR.getReasonForIncompletion(), WorkflowStatus.RUNNING, esRR.getStatus()); + assertEquals(esRR.getReasonForIncompletion(), RUNNING, esRR.getStatus()); // Check the tasks, at this time there should be 2 tasks // first one is skipped and the second one is scheduled assertEquals(esRR.getTasks().toString(), 2, esRR.getTasks().size()); assertEquals(COMPLETED, esRR.getTasks().get(0).getStatus()); Task tRR = esRR.getTasks().get(1); - assertEquals(esRR.getTasks().toString(), Status.SCHEDULED, tRR.getStatus()); + assertEquals(esRR.getTasks().toString(), SCHEDULED, tRR.getStatus()); assertEquals(tRR.getTaskType(), "junit_task_2"); task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); @@ -3282,10 +3251,10 @@ public void testReruns() throws Exception { es = workflowExecutionService.getExecutionStatus(reRunwfid1, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // Check the tasks, at this time there should be 1 task assertEquals(es.getTasks().size(), 1); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); assertNotNull(task); @@ -3305,13 +3274,10 @@ public void testReruns() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - } - @Test - public void testTaskSkipping() throws Exception { + public void testTaskSkipping() { String taskName = "junit_task_1"; TaskDef taskDef = notFoundSafeGetTaskDef(taskName); @@ -3335,10 +3301,10 @@ public void testTaskSkipping() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); // Check the tasks, at this time there should be 3 task assertEquals(2, es.getTasks().size()); - assertEquals(Status.SCHEDULED, es.getTasks().get(0).getStatus()); + assertEquals(SCHEDULED, es.getTasks().get(0).getStatus()); assertEquals(Status.SKIPPED, es.getTasks().get(1).getStatus()); Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); @@ -3368,13 +3334,13 @@ public void testTaskSkipping() throws Exception { } else if (wfTask.getReferenceTaskName().equals("t2")) { assertEquals(Status.SKIPPED, wfTask.getStatus()); } else { - assertEquals(Status.SCHEDULED, wfTask.getStatus()); + assertEquals(SCHEDULED, wfTask.getStatus()); } }); task = workflowExecutionService.poll("junit_task_3", "task3.junit.worker"); assertNotNull(task); - assertEquals(Status.IN_PROGRESS, task.getStatus()); + assertEquals(IN_PROGRESS, task.getStatus()); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); task.setStatus(COMPLETED); @@ -3383,12 +3349,10 @@ public void testTaskSkipping() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - - } @Test - public void testPauseResume() throws Exception { + public void testPauseResume() { metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); @@ -3414,13 +3378,13 @@ public void testPauseResume() throws Exception { Workflow es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); Task t = es.getTasks().get(0); - assertEquals(Status.SCHEDULED, t.getStatus()); + assertEquals(SCHEDULED, t.getStatus()); // PAUSE workflowExecutor.pauseWorkflow(wfid); @@ -3493,11 +3457,10 @@ public void testPauseResume() throws Exception { es = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(es); assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); - } @Test - public void testSubWorkflow() throws Exception { + public void testSubWorkflow() { createSubWorkflow(); metadataService.getWorkflowDef(WF_WITH_SUB_WF, 1); @@ -3521,7 +3484,7 @@ public void testSubWorkflow() throws Exception { assertNotNull(es); assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name())).findAny().get(); assertNotNull(task); assertNotNull(task.getOutputData()); assertNotNull("Output: " + task.getOutputData().toString() + ", status: " + task.getStatus(), task.getOutputData().get("subWorkflowId")); @@ -3531,7 +3494,7 @@ public void testSubWorkflow() throws Exception { assertNotNull(es); assertNotNull(es.getTasks()); assertEquals(wfId, es.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); task = workflowExecutionService.poll("junit_task_1", "test"); task.setStatus(COMPLETED); @@ -3564,7 +3527,7 @@ public void testSubWorkflow() throws Exception { } @Test - public void testSubWorkflowFailure() throws Exception { + public void testSubWorkflowFailure() { TaskDef taskDef = notFoundSafeGetTaskDef("junit_task_1"); assertNotNull(taskDef); @@ -3595,7 +3558,7 @@ public void testSubWorkflowFailure() throws Exception { es = workflowExecutionService.getExecutionStatus(wfId, true); assertNotNull(es); assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name())).findAny().get(); assertNotNull(task); assertNotNull(task.getOutputData()); assertNotNull(task.getOutputData().get("subWorkflowId")); @@ -3606,7 +3569,7 @@ public void testSubWorkflowFailure() throws Exception { assertNotNull(es.getTasks()); assertEquals(wfId, es.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); task = workflowExecutionService.poll("junit_task_1", "test"); assertNotNull(task); @@ -3623,11 +3586,10 @@ public void testSubWorkflowFailure() throws Exception { taskDef.setTimeoutSeconds(0); taskDef.setRetryCount(RETRY_COUNT); metadataService.updateTaskDef(taskDef); - } @Test - public void testSubWorkflowFailureInverse() throws Exception { + public void testSubWorkflowFailureInverse() { TaskDef taskDef = notFoundSafeGetTaskDef("junit_task_1"); assertNotNull(taskDef); @@ -3659,7 +3621,7 @@ public void testSubWorkflowFailureInverse() throws Exception { es = workflowExecutionService.getExecutionStatus(wfId, true); assertNotNull(es); assertNotNull(es.getTasks()); - task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name().toString())).findAny().get(); + task = es.getTasks().stream().filter(t -> t.getTaskType().equals(TaskType.SUB_WORKFLOW.name())).findAny().get(); assertNotNull(task); assertNotNull(task.getOutputData()); assertNotNull(task.getOutputData().get("subWorkflowId")); @@ -3669,7 +3631,7 @@ public void testSubWorkflowFailureInverse() throws Exception { assertNotNull(es); assertNotNull(es.getTasks()); assertEquals(wfId, es.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); workflowExecutor.terminateWorkflow(wfId, "fail"); es = workflowExecutionService.getExecutionStatus(wfId, true); @@ -3681,7 +3643,7 @@ public void testSubWorkflowFailureInverse() throws Exception { } @Test - public void testSubWorkflowRetry() throws Exception { + public void testSubWorkflowRetry() { String taskName = "junit_task_1"; TaskDef taskDef = notFoundSafeGetTaskDef(taskName); int retryCount = notFoundSafeGetTaskDef(taskName).getRetryCount(); @@ -3724,7 +3686,7 @@ public void testSubWorkflowRetry() throws Exception { assertNotNull(workflow); assertNotNull(workflow.getTasks()); assertEquals(workflowId, workflow.getParentWorkflowId()); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); // poll and fail the first task in sub-workflow task = workflowExecutionService.poll("junit_task_1", "test"); @@ -3747,7 +3709,7 @@ public void testSubWorkflowRetry() throws Exception { workflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); task = workflowExecutionService.poll("junit_task_2", "test"); assertEquals(subWorkflowId, task.getWorkflowInstanceId()); @@ -3767,7 +3729,7 @@ public void testSubWorkflowRetry() throws Exception { workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); task = workflowExecutionService.poll("junit_task_6", "test"); assertNotNull(task); @@ -3786,7 +3748,7 @@ public void testSubWorkflowRetry() throws Exception { @Test - public void testWait() throws Exception { + public void testWait() { WorkflowDef workflowDef = new WorkflowDef(); workflowDef.setName("test_wait"); @@ -3809,7 +3771,7 @@ public void testWait() throws Exception { Workflow workflow = workflowExecutor.getWorkflow(workflowId, true); assertNotNull(workflow); assertEquals(1, workflow.getTasks().size()); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); Task waitTask = workflow.getTasks().get(0); assertEquals(TaskType.WAIT.name(), waitTask.getTaskType()); @@ -3827,7 +3789,7 @@ public void testWait() throws Exception { } @Test - public void testEventWorkflow() throws Exception { + public void testEventWorkflow() { TaskDef taskDef = new TaskDef(); taskDef.setName("eventX"); @@ -3875,7 +3837,7 @@ public void testEventWorkflow() throws Exception { } @Test - public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { + public void testTaskWithCallbackAfterSecondsInWorkflow() { WorkflowDef workflowDef = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); assertNotNull(workflowDef); @@ -3888,7 +3850,7 @@ public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); String taskId = task.getTaskId(); - task.setStatus(Status.IN_PROGRESS); + task.setStatus(IN_PROGRESS); task.setCallbackAfterSeconds(5L); workflowExecutionService.updateTask(task); @@ -3922,7 +3884,7 @@ public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); taskId = task.getTaskId(); - task.setStatus(Status.IN_PROGRESS); + task.setStatus(IN_PROGRESS); task.setCallbackAfterSeconds(5L); workflowExecutionService.updateTask(task); @@ -3949,8 +3911,175 @@ public void testTaskWithCallbackAfterSecondsInWorkflow() throws Exception { assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); } + @Test + public void testWorkflowUsingExternalPayloadStorage() { + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); + assertNotNull(found); + + Map outputParameters = found.getOutputParameters(); + outputParameters.put("workflow_output", "${t1.output.op}"); + metadataService.updateWorkflowDef(found); + + String workflowInputPath = "workflow/input"; + String correlationId = "wf_external_storage"; + String workflowId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, null, workflowInputPath, null, null); + assertNotNull(workflowId); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); + assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); + + // Polling for the first task + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowId, task.getWorkflowInstanceId()); + + // update first task with COMPLETED + String taskOutputPath = "task/output"; + task.setOutputData(null); + task.setExternalOutputPayloadStoragePath(taskOutputPath); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); + assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); + assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); + assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); + assertEquals("task/input", workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); + + // Polling for the second task + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(task.getInputData().isEmpty()); + assertNotNull(task.getExternalInputPayloadStoragePath()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowId, task.getWorkflowInstanceId()); + + // update second task with COMPLETED + task.getOutputData().put("op", "success_task2"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); + assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals(2, workflow.getTasks().size()); + assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); + assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); + assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); + assertEquals("task/input", workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); + assertTrue(workflow.getOutput().isEmpty()); + assertNotNull(workflow.getExternalOutputPayloadStoragePath()); + assertEquals("workflow/output", workflow.getExternalOutputPayloadStoragePath()); + } + + @Test + public void testRetryWorkflowUsingExternalPayloadStorage() { + WorkflowDef found = metadataService.getWorkflowDef(LINEAR_WORKFLOW_T1_T2, 1); + assertNotNull(found); + + Map outputParameters = found.getOutputParameters(); + outputParameters.put("workflow_output", "${t1.output.op}"); + metadataService.updateWorkflowDef(found); + + String taskName = "junit_task_2"; + TaskDef taskDef = metadataService.getTaskDef(taskName); + taskDef.setRetryCount(2); + taskDef.setRetryDelaySeconds(0); + metadataService.updateTaskDef(taskDef); + + String workflowInputPath = "workflow/input"; + String correlationId = "wf_external_storage"; + String workflowId = workflowExecutor.startWorkflow(LINEAR_WORKFLOW_T1_T2, 1, correlationId, null, workflowInputPath, null, null); + assertNotNull(workflowId); + + Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); + assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); + assertEquals(workflow.getReasonForIncompletion(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(1, workflow.getTasks().size()); + + // Polling for the first task + Task task = workflowExecutionService.poll("junit_task_1", "task1.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_1", task.getTaskType()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowId, task.getWorkflowInstanceId()); + + // update first task with COMPLETED + String taskOutputPath = "task/output"; + task.setOutputData(null); + task.setExternalOutputPayloadStoragePath(taskOutputPath); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + // Polling for the second task + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(task.getInputData().isEmpty()); + assertNotNull(task.getExternalInputPayloadStoragePath()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowId, task.getWorkflowInstanceId()); + + // update second task with FAILED + task.getOutputData().put("op", "failed_task2"); + task.setStatus(FAILED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); + assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); + assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + + // Polling again for the second task + task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); + assertNotNull(task); + assertEquals("junit_task_2", task.getTaskType()); + assertTrue(task.getInputData().isEmpty()); + assertNotNull(task.getExternalInputPayloadStoragePath()); + assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); + assertEquals(workflowId, task.getWorkflowInstanceId()); + + // update second task with COMPLETED + task.getOutputData().put("op", "success_task2"); + task.setStatus(COMPLETED); + workflowExecutionService.updateTask(task); + + workflow = workflowExecutionService.getExecutionStatus(workflowId, true); + assertNotNull(workflow); + assertTrue("The workflow input should not be persisted", workflow.getInput().isEmpty()); + assertEquals(workflowInputPath, workflow.getExternalInputPayloadStoragePath()); + assertEquals(WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals(3, workflow.getTasks().size()); + assertTrue("The first task output should not be persisted", workflow.getTasks().get(0).getOutputData().isEmpty()); + assertTrue("The second task input should not be persisted", workflow.getTasks().get(1).getInputData().isEmpty()); + assertTrue("The second task input should not be persisted", workflow.getTasks().get(2).getInputData().isEmpty()); + assertEquals(taskOutputPath, workflow.getTasks().get(0).getExternalOutputPayloadStoragePath()); + assertEquals("task/input", workflow.getTasks().get(1).getExternalInputPayloadStoragePath()); + assertEquals("task/input", workflow.getTasks().get(2).getExternalInputPayloadStoragePath()); + assertTrue(workflow.getOutput().isEmpty()); + assertNotNull(workflow.getExternalOutputPayloadStoragePath()); + assertEquals("workflow/output", workflow.getExternalOutputPayloadStoragePath()); + } + //@Test - public void testRateLimiting() throws Exception { + public void testRateLimiting() { TaskDef td = new TaskDef(); td.setName("eventX1"); @@ -4005,7 +4134,7 @@ public void testRateLimiting() throws Exception { } } - private void createSubWorkflow() throws Exception { + private void createSubWorkflow() { WorkflowTask wft1 = new WorkflowTask(); wft1.setName("junit_task_5"); @@ -4045,7 +4174,7 @@ private void createSubWorkflow() throws Exception { } - private void verify(String inputParam1, String wfid, String task1Op, boolean fail) throws Exception { + private void verify(String inputParam1, String wfid, String task1Op, boolean fail) { Task task = workflowExecutionService.poll("junit_task_2", "task2.junit.worker"); assertNotNull(task); assertTrue(workflowExecutionService.ackTaskReceived(task.getTaskId())); @@ -4068,7 +4197,7 @@ private void verify(String inputParam1, String wfid, String task1Op, boolean fai Workflow es = workflowExecutionService.getExecutionStatus(wfid, false); assertNotNull(es); if (fail) { - assertEquals(WorkflowStatus.RUNNING, es.getStatus()); + assertEquals(RUNNING, es.getStatus()); } else { assertEquals(WorkflowStatus.COMPLETED, es.getStatus()); } @@ -4128,7 +4257,7 @@ private void createWorkflowDefForDomain() { } } - private void createWFWithResponseTimeout() throws Exception { + private void createWFWithResponseTimeout() { TaskDef task = new TaskDef(); task.setName("task_rt"); task.setTimeoutSeconds(120); @@ -4174,7 +4303,7 @@ private void createWFWithResponseTimeout() throws Exception { metadataService.updateWorkflowDef(def); } - private String runWorkflowWithSubworkflow() throws Exception { + private String runWorkflowWithSubworkflow() { clearWorkflows(); createWorkflowDefForDomain(); @@ -4192,7 +4321,7 @@ private String runWorkflowWithSubworkflow() throws Exception { Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); assertEquals(1, workflow.getTasks().size()); //The very first task is the one that should be scheduled. // Poll for first task and execute it @@ -4205,7 +4334,7 @@ private String runWorkflowWithSubworkflow() throws Exception { workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().size()); // Get the sub workflow id @@ -4219,7 +4348,7 @@ private String runWorkflowWithSubworkflow() throws Exception { Workflow subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(RUNNING, subWorkflow.getStatus()); assertEquals(1, subWorkflow.getTasks().size()); // Now the Sub workflow is triggered @@ -4233,12 +4362,12 @@ private String runWorkflowWithSubworkflow() throws Exception { subWorkflow = workflowExecutionService.getExecutionStatus(subWorkflowId, true); assertNotNull(subWorkflow); - assertEquals(WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(RUNNING, subWorkflow.getStatus()); assertEquals(2, subWorkflow.getTasks().size()); workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals(WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals(RUNNING, workflow.getStatus()); assertEquals(2, workflow.getTasks().size()); // Poll for second task of the sub workflow and execute it @@ -4285,7 +4414,7 @@ private String runAFailedForkJoinWF() throws Exception { Workflow workflow = workflowExecutionService.getExecutionStatus(workflowId, true); assertNotNull(workflow); - assertEquals("Found " + workflow.getTasks(), WorkflowStatus.RUNNING, workflow.getStatus()); + assertEquals("Found " + workflow.getTasks(), RUNNING, workflow.getStatus()); printTaskStatuses(workflow, "Initial"); t2.setStatus(FAILED); @@ -4308,7 +4437,7 @@ private String runAFailedForkJoinWF() throws Exception { return workflowId; } - private void printTaskStatuses(String wfid, String message) throws Exception { + private void printTaskStatuses(String wfid, String message) { Workflow wf = workflowExecutionService.getExecutionStatus(wfid, true); assertNotNull(wf); printTaskStatuses(wf, message); @@ -4322,7 +4451,7 @@ private String startOrLoadWorkflowExecution(String workflowName, int version, St private boolean printWFTaskDetails = false; - private void printTaskStatuses(Workflow wf, String message) throws Exception { + private void printTaskStatuses(Workflow wf, String message) { if (printWFTaskDetails) { System.out.println(message + " >>> Workflow status " + wf.getStatus().name()); wf.getTasks().forEach(t -> { diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java index 0f783d7ef0..9cd897cb4e 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/End2EndTests.java @@ -250,8 +250,9 @@ public void testAll() throws Exception { @Test public void testMetadataWorkflowDefinition() { + String workflowDefName = "testWorkflowDefMetadata"; WorkflowDef def = new WorkflowDef(); - def.setName("testWorkflowDel"); + def.setName(workflowDefName); def.setVersion(1); WorkflowTask t0 = new WorkflowTask(); t0.setName("t0"); @@ -264,17 +265,17 @@ public void testMetadataWorkflowDefinition() { def.getTasks().add(t0); def.getTasks().add(t1); metadataClient.registerWorkflowDef(def); - metadataClient.unregisterWorkflowDef("testWorkflowDel", 1); try { - metadataClient.getWorkflowDef("testWorkflowDel", 1); + metadataClient.getWorkflowDef(workflowDefName, 1); } catch (ConductorClientException e) { int statusCode = e.getStatus(); String errorMessage = e.getMessage(); boolean retryable = e.isRetryable(); assertEquals(404, statusCode); - assertEquals("No such workflow found by name: testWorkflowDel, version: 1", errorMessage); + assertEquals("No such workflow found by name: testWorkflowDefMetadata, version: 1", errorMessage); assertFalse(retryable); } + metadataClient.unregisterWorkflowDef(workflowDefName, 1); } @Test @@ -372,10 +373,11 @@ public void testUpdateWorkflowDef() { assertFalse(e.isRetryable()); } } - @Test(expected = Test.None.class /* no exception expected */) + @Test public void testGetTaskInProgress() { taskClient.getPendingTaskForWorkflow("test", "t1"); } + @Test public void testRemoveTaskFromTaskQueue() { try { @@ -384,6 +386,7 @@ public void testRemoveTaskFromTaskQueue() { assertEquals(404, e.getStatus()); } } + @Test public void testTaskByTaskId() { try { @@ -393,7 +396,8 @@ public void testTaskByTaskId() { assertEquals("No such task found by taskId: test123", e.getMessage()); } } - @Test(expected = Test.None.class /* no exception expected */) + + @Test public void testListworkflowsByCorrelationId() { workflowClient.getWorkflows("test", "test12", false, false); } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java index 1a1b44d747..ff28faa7bd 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowLegacyMigrationTest.java @@ -71,7 +71,7 @@ public String startOrLoadWorkflowExecution(String snapshotResourceName, String w workflow.setTaskToDomain(taskToDomain); workflow.setVersion(version); - workflow.getTasks().stream().forEach(task -> { + workflow.getTasks().forEach(task -> { task.setTaskId(IDGenerator.generate()); task.setWorkflowInstanceId(workflowId); task.setCorrelationId(correlationId); @@ -85,7 +85,7 @@ public String startOrLoadWorkflowExecution(String snapshotResourceName, String w * in order to represent a workflow on the system, we need to populate the * respective queues related to tasks in progress or decisions. */ - workflow.getTasks().stream().forEach(task -> { + workflow.getTasks().forEach(task -> { workflowExecutor.addTaskToQueue(task); queueDAO.push(WorkflowExecutor.DECIDER_QUEUE, workflowId, configuration.getSweepFrequency()); }); diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java index 7ae3cac694..fdc4eee01e 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/integration/WorkflowServiceTest.java @@ -30,5 +30,4 @@ public class WorkflowServiceTest extends AbstractWorkflowServiceTest { String startOrLoadWorkflowExecution(String snapshotResourceName, String workflowName, int version, String correlationId, Map input, String event, Map taskToDomain) { return workflowExecutor.startWorkflow(workflowName, version, correlationId, input, null, event, taskToDomain); } - } diff --git a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java index 316fa09792..9abed623b3 100644 --- a/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java +++ b/test-harness/src/test/java/com/netflix/conductor/tests/utils/TestModule.java @@ -22,7 +22,6 @@ import com.netflix.conductor.common.utils.JsonMapperProvider; import com.netflix.conductor.core.config.Configuration; import com.netflix.conductor.core.config.CoreModule; -import com.netflix.conductor.core.config.SystemPropertiesConfiguration; import com.netflix.conductor.dao.ExecutionDAO; import com.netflix.conductor.dao.IndexDAO; import com.netflix.conductor.dao.MetadataDAO; @@ -59,7 +58,7 @@ protected void configure() { configureExecutorService(); - SystemPropertiesConfiguration config = new SystemPropertiesConfiguration(); + MockConfiguration config = new MockConfiguration(); bind(Configuration.class).toInstance(config); JedisCommands jedisMock = new JedisMock();