diff --git a/.github/project.yml b/.github/project.yml
index 633f883e..e787d911 100644
--- a/.github/project.yml
+++ b/.github/project.yml
@@ -1,3 +1,4 @@
+# Retriggering release again
release:
- current-version: 7.0.0-alpha5.1
- next-version: 7.0.0-SNAPSHOT
+ current-version: 7.0.0.Final
+ next-version: 8.0.0-SNAPSHOT
diff --git a/.github/workflows/maven-verify.yml b/.github/workflows/maven-verify.yml
index a9f5077c..2070974b 100644
--- a/.github/workflows/maven-verify.yml
+++ b/.github/workflows/maven-verify.yml
@@ -26,3 +26,7 @@ jobs:
- name: Verify with Maven
run: |
mvn -B -f pom.xml clean install verify
+
+ - name: Verify Examples with Maven
+ run: |
+ mvn -B -f examples/pom.xml clean install verify
diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml
index ce904c75..9d46ce2a 100644
--- a/.github/workflows/pre-release.yml
+++ b/.github/workflows/pre-release.yml
@@ -11,7 +11,7 @@ jobs:
name: pre release
steps:
- - uses: radcortez/project-metadata-action@master
+ - uses: radcortez/project-metadata-action@main
name: retrieve project metadata
id: metadata
with:
@@ -22,4 +22,4 @@ jobs:
if: contains(steps.metadata.outputs.current-version, 'SNAPSHOT')
run: |
echo '::error::Cannot release a SNAPSHOT version.'
- exit 1
\ No newline at end of file
+ exit 1
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 2d002124..cb4ec5eb 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -10,7 +10,7 @@ jobs:
release:
runs-on: ubuntu-latest
name: release
- if: ${{github.event.pull_request.merged == true}}
+ if: ${{ github.event.pull_request.merged == true }}
steps:
- uses: radcortez/project-metadata-action@main
@@ -51,7 +51,7 @@ jobs:
cat release.properties
git checkout ${{github.base_ref}}
git rebase release
- mvn -B release:perform -Darguments=-DperformRelease -DperformRelease -Prelease
+ mvn -B release:perform -Prelease -Darguments="-DperformRelease"
env:
MAVEN_USERNAME: ${{ secrets.OSSRH_USERNAME }}
MAVEN_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
diff --git a/README.md b/README.md
index c0b4df70..caf87812 100644
--- a/README.md
+++ b/README.md
@@ -8,11 +8,11 @@ Provides the Java API for the [Serverless Workflow Specification](https://github
With the SDK you can:
* Read workflow JSON and YAML definitions
-* Write workflow in JSON and YAML format.
+* Write workflow definitions in JSON and YAML formats.
+* Test your workflow definitions using the reference implementation.
-Serverless Workflow Java SDK is **not** a workflow runtime implementation but can be used by Java runtime implementations to parse workflow definitions.
-### Status
+## Status
| Latest Releases | Conformance to spec version |
| :---: | :---: |
@@ -25,17 +25,18 @@ Serverless Workflow Java SDK is **not** a workflow runtime implementation but ca
Note that 6.0.0.Final, which will be the one for specification version 0.9, is skipped intentionally in case someone want to work on it.
-### JDK Version
+## JDK Version
| SDK Version | JDK Version |
| :---: | :---: |
+| 7.0.0 and after | 17 |
| 5.0.0 and after | 11 |
| 4.0.x and before | 8 |
-### Getting Started
+## Getting Started
-#### Building SNAPSHOT locally
+### Building SNAPSHOT locally
To build project and run tests locally:
@@ -47,7 +48,7 @@ mvn clean install
The project uses [Google's code styleguide](https://google.github.io/styleguide/javaguide.html).
Your changes should be automatically formatted during the build.
-#### Maven projects:
+### Maven projects:
Add the following dependencies to your pom.xml `dependencies` section:
@@ -55,23 +56,32 @@ Add the following dependencies to your pom.xml `dependencies` section:
io.serverlessworkflow
serverlessworkflow-api
- 7.0.0-SNAPSHOT
+ 7.0.0.Final
```
-#### Gradle projects:
+### Gradle projects:
Add the following dependencies to your build.gradle `dependencies` section:
```text
-implementation("io.serverlessworkflow:serverlessworkflow-api:7.0.0-SNAPSHOT")
+implementation("io.serverlessworkflow:serverlessworkflow-api:7.0.0.Final")
```
-### How to Use
+## How to Use
-#### Creating from JSON/YAML source
+There are, roughly speaking, two kind of users of this SDK:
+ * Those ones interested on implementing their own runtime using Java.
+ * Those ones interested on using the provided runtime reference implementation.
-You can create a Workflow instance from JSON/YAML source:
+### Implementing your own runtime
+
+For those ones interested on implementing their own runtime, this SDK provides an easy way to load an in memory representation of a given workflow definition.
+This in-memory representation consists of a hierarchy of POJOS directly generated from the Serverless Workflow specification [schema](api/src/main/resources/schema/workflow.yaml), which ensures the internal representation is aligned with the specification schema. The root of the hierarchy is `io.serverlessworkflow.api.types.Workflow` class
+
+### Reading workflow definition from JSON/YAML source
+
+You can read a Workflow definition from JSON/YAML source:
Let's say you have a simple YAML based workflow definition in a file name `simple.yaml` located in your working dir:
@@ -93,7 +103,7 @@ do:
```
-To parse it and create a Workflow instance you can do:
+To parse it and get a Workflow instance you can do:
``` java
@@ -102,10 +112,20 @@ try (InputStream in = new FileInputStream("simple.yaml")) {
// Once you have the Workflow instance you can use its API to inspect it
}
```
+By default, Workflows are not validated against the schema (performance being the priority). If you want to enable validation, you can do that by using:
+
+``` java
+try (InputStream in = new FileInputStream("simple.yaml")) {
+ Workflow workflow = WorkflowReader.validation().readWorkflow (in, WorkflowFormat.YAML);
+ // Once you have the Workflow instance you can use its API to inspect it
+}
+```
-#### Writing a workflow
+For additional reading helper methods, including the one to read a workflow definition from classpath, check [WorkflowReader](api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java) class.
-Given a workflow definition, you can store it using JSON or YAML format.
+### Writing workflow definition to a JSON/YAML target
+
+Given a Workflow instance, you can store it using JSON or YAML format.
For example, to store a workflow using json format in a file called `simple.json`, you write
``` java
@@ -113,4 +133,10 @@ try (OutputStream out = new FileOutputStream("simple.json")) {
WorkflowWriter.writeWorkflow(out, workflow, WorkflowFormat.JSON);
}
-```
\ No newline at end of file
+```
+For additional writing helper methods, check [WorkflowWriter](api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java) class.
+
+### Reference implementation
+
+The reference implementation provides a ready-to-use runtime that supports the Serverless Workflow Specification. It includes a workflow execution engine, validation utilities, and illustrative examples to help you quickly test and deploy your workflows. For details on usage, configuration, and supported features, see [readme](impl/README.md).
+
diff --git a/api/pom.xml b/api/pom.xml
index 466a2754..69f8c2f5 100644
--- a/api/pom.xml
+++ b/api/pom.xml
@@ -4,7 +4,7 @@
io.serverlessworkflow
serverlessworkflow-parent
- 7.0.0-alpha5.1
+ 8.0.0-SNAPSHOT
serverlessworkflow-api
@@ -21,6 +21,10 @@
com.fasterxml.jackson.core
jackson-core
+
+ com.networknt
+ json-schema-validator
+
com.fasterxml.jackson.core
jackson-databind
@@ -108,7 +112,7 @@
io.serverlessworkflow
- custom-generator
+ serverless-workflow-custom-generator
${project.version}
diff --git a/api/src/main/java/io/serverlessworkflow/api/DirectReader.java b/api/src/main/java/io/serverlessworkflow/api/DirectReader.java
new file mode 100644
index 00000000..83fe0550
--- /dev/null
+++ b/api/src/main/java/io/serverlessworkflow/api/DirectReader.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.api;
+
+import io.serverlessworkflow.api.types.Workflow;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+
+class DirectReader implements WorkflowReaderOperations {
+
+ @Override
+ public Workflow read(InputStream input, WorkflowFormat format) throws IOException {
+ return format.mapper().readValue(input, Workflow.class);
+ }
+
+ @Override
+ public Workflow read(Reader input, WorkflowFormat format) throws IOException {
+ return format.mapper().readValue(input, Workflow.class);
+ }
+
+ @Override
+ public Workflow read(byte[] input, WorkflowFormat format) throws IOException {
+ return format.mapper().readValue(input, Workflow.class);
+ }
+
+ @Override
+ public Workflow read(String input, WorkflowFormat format) throws IOException {
+ return format.mapper().readValue(input, Workflow.class);
+ }
+}
diff --git a/api/src/main/java/io/serverlessworkflow/api/ValidationReader.java b/api/src/main/java/io/serverlessworkflow/api/ValidationReader.java
new file mode 100644
index 00000000..25481d5c
--- /dev/null
+++ b/api/src/main/java/io/serverlessworkflow/api/ValidationReader.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.api;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.networknt.schema.InputFormat;
+import com.networknt.schema.JsonSchema;
+import com.networknt.schema.JsonSchemaFactory;
+import com.networknt.schema.SchemaValidatorsConfig;
+import com.networknt.schema.SpecVersion.VersionFlag;
+import com.networknt.schema.ValidationMessage;
+import io.serverlessworkflow.api.types.Workflow;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+import java.io.UncheckedIOException;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+class ValidationReader implements WorkflowReaderOperations {
+ private final JsonSchema schemaObject;
+
+ ValidationReader() {
+ try (InputStream input =
+ Thread.currentThread()
+ .getContextClassLoader()
+ .getResourceAsStream("schema/workflow.yaml")) {
+ this.schemaObject =
+ JsonSchemaFactory.getInstance(VersionFlag.V7)
+ .getSchema(input, InputFormat.YAML, SchemaValidatorsConfig.builder().build());
+ } catch (IOException e) {
+ throw new UncheckedIOException(e);
+ }
+ }
+
+ @Override
+ public Workflow read(InputStream input, WorkflowFormat format) throws IOException {
+ return validate(format.mapper().readValue(input, JsonNode.class), format);
+ }
+
+ @Override
+ public Workflow read(Reader input, WorkflowFormat format) throws IOException {
+ return validate(format.mapper().readValue(input, JsonNode.class), format);
+ }
+
+ @Override
+ public Workflow read(byte[] input, WorkflowFormat format) throws IOException {
+ return validate(format.mapper().readValue(input, JsonNode.class), format);
+ }
+
+ @Override
+ public Workflow read(String input, WorkflowFormat format) throws IOException {
+ return validate(format.mapper().readValue(input, JsonNode.class), format);
+ }
+
+ private Workflow validate(JsonNode value, WorkflowFormat format) {
+ Set validationErrors = schemaObject.validate(value);
+ if (!validationErrors.isEmpty()) {
+ throw new IllegalArgumentException(
+ validationErrors.stream()
+ .map(ValidationMessage::toString)
+ .collect(Collectors.joining("\n")));
+ }
+ return format.mapper().convertValue(value, Workflow.class);
+ }
+}
diff --git a/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java b/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java
index 4decc696..6868a6dc 100644
--- a/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java
+++ b/api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java
@@ -16,58 +16,98 @@
package io.serverlessworkflow.api;
import io.serverlessworkflow.api.types.Workflow;
-import java.io.ByteArrayInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
-import java.io.StringReader;
import java.nio.file.Files;
import java.nio.file.Path;
public class WorkflowReader {
public static Workflow readWorkflow(InputStream input, WorkflowFormat format) throws IOException {
- return format.mapper().readValue(input, Workflow.class);
+ return defaultReader().read(input, format);
}
public static Workflow readWorkflow(Reader input, WorkflowFormat format) throws IOException {
- return format.mapper().readValue(input, Workflow.class);
+ return defaultReader().read(input, format);
}
- public static Workflow readWorkflow(Path path, WorkflowFormat format) throws IOException {
- return format.mapper().readValue(Files.readAllBytes(path), Workflow.class);
+ public static Workflow readWorkflow(byte[] input, WorkflowFormat format) throws IOException {
+ return defaultReader().read(input, format);
}
- public static Workflow readWorkflow(byte[] content, WorkflowFormat format) throws IOException {
- try (InputStream input = new ByteArrayInputStream(content)) {
- return readWorkflow(input, format);
- }
+ public static Workflow readWorkflow(Path path) throws IOException {
+ return readWorkflow(path, WorkflowFormat.fromPath(path), defaultReader());
+ }
+
+ public static Workflow readWorkflow(Path path, WorkflowFormat format) throws IOException {
+ return readWorkflow(path, format, defaultReader());
}
- public static Workflow readWorkflowFromString(String content, WorkflowFormat format)
+ public static Workflow readWorkflowFromString(String input, WorkflowFormat format)
throws IOException {
- try (Reader reader = new StringReader(content)) {
- return readWorkflow(reader, format);
- }
+ return defaultReader().read(input, format);
}
public static Workflow readWorkflowFromClasspath(String classpath) throws IOException {
+ return readWorkflowFromClasspath(classpath, defaultReader());
+ }
+
+ public static Workflow readWorkflowFromClasspath(
+ String classpath, ClassLoader cl, WorkflowFormat format) throws IOException {
+ return readWorkflowFromClasspath(classpath, defaultReader());
+ }
+
+ public static Workflow readWorkflow(Path path, WorkflowReaderOperations reader)
+ throws IOException {
+ return readWorkflow(path, WorkflowFormat.fromPath(path), reader);
+ }
+
+ public static Workflow readWorkflow(
+ Path path, WorkflowFormat format, WorkflowReaderOperations reader) throws IOException {
+ return reader.read(Files.readAllBytes(path), format);
+ }
+
+ public static Workflow readWorkflowFromClasspath(
+ String classpath, WorkflowReaderOperations reader) throws IOException {
return readWorkflowFromClasspath(
classpath,
Thread.currentThread().getContextClassLoader(),
- WorkflowFormat.fromFileName(classpath));
+ WorkflowFormat.fromFileName(classpath),
+ reader);
}
public static Workflow readWorkflowFromClasspath(
- String classpath, ClassLoader cl, WorkflowFormat format) throws IOException {
+ String classpath, ClassLoader cl, WorkflowFormat format, WorkflowReaderOperations reader)
+ throws IOException {
try (InputStream in = cl.getResourceAsStream(classpath)) {
if (in == null) {
throw new FileNotFoundException(classpath);
}
- return readWorkflow(in, format);
+ return reader.read(in, format);
}
}
+ public static WorkflowReaderOperations noValidation() {
+ return NoValidationHolder.instance;
+ }
+
+ public static WorkflowReaderOperations validation() {
+ return ValidationHolder.instance;
+ }
+
+ private static class NoValidationHolder {
+ private static final WorkflowReaderOperations instance = new DirectReader();
+ }
+
+ private static class ValidationHolder {
+ private static final WorkflowReaderOperations instance = new ValidationReader();
+ }
+
+ private static WorkflowReaderOperations defaultReader() {
+ return NoValidationHolder.instance;
+ }
+
private WorkflowReader() {}
}
diff --git a/api/src/main/java/io/serverlessworkflow/api/WorkflowReaderOperations.java b/api/src/main/java/io/serverlessworkflow/api/WorkflowReaderOperations.java
new file mode 100644
index 00000000..7049aba0
--- /dev/null
+++ b/api/src/main/java/io/serverlessworkflow/api/WorkflowReaderOperations.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.api;
+
+import io.serverlessworkflow.api.types.Workflow;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+
+public interface WorkflowReaderOperations {
+ Workflow read(InputStream input, WorkflowFormat format) throws IOException;
+
+ Workflow read(Reader input, WorkflowFormat format) throws IOException;
+
+ Workflow read(byte[] input, WorkflowFormat format) throws IOException;
+
+ Workflow read(String input, WorkflowFormat format) throws IOException;
+}
diff --git a/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java b/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java
index 29115396..5980dee6 100644
--- a/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java
+++ b/api/src/main/java/io/serverlessworkflow/api/WorkflowWriter.java
@@ -19,7 +19,6 @@
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
-import java.io.StringWriter;
import java.io.Writer;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -49,10 +48,7 @@ public static void writeWorkflow(Path output, Workflow workflow, WorkflowFormat
public static String workflowAsString(Workflow workflow, WorkflowFormat format)
throws IOException {
- try (Writer writer = new StringWriter()) {
- writeWorkflow(writer, workflow, format);
- return writer.toString();
- }
+ return format.mapper().writeValueAsString(workflow);
}
public static byte[] workflowAsBytes(Workflow workflow, WorkflowFormat format)
diff --git a/api/src/main/resources/schema/workflow.yaml b/api/src/main/resources/schema/workflow.yaml
index aecbeacb..b59e2f3a 100644
--- a/api/src/main/resources/schema/workflow.yaml
+++ b/api/src/main/resources/schema/workflow.yaml
@@ -241,33 +241,42 @@ $defs:
properties:
document:
$ref: '#/$defs/externalResource'
- title: WithAsyncAPIDocument
+ title: AsyncAPIDocument
description: The document that defines the AsyncAPI operation to call.
- operationRef:
+ channel:
type: string
- title: WithAsyncAPIOperation
+ title: With
+ description: The name of the channel on which to perform the operation. Used only in case the referenced document uses AsyncAPI v2.6.0.
+ operation:
+ type: string
+ title: AsyncAPIOperation
description: A reference to the AsyncAPI operation to call.
server:
+ $ref: '#/$defs/asyncApiServer'
+ title: AsyncAPIServer
+ description: An object used to configure to the server to call the specified AsyncAPI operation on.
+ protocol:
type: string
- title: WithAsyncAPIServer
- description: A a reference to the server to call the specified AsyncAPI operation on. If not set, default to the first server matching the operation's channel.
+ title: AsyncApiProtocol
+ description: The protocol to use to select the target server.
+ enum: [ amqp, amqp1, anypointmq, googlepubsub, http, ibmmq, jms, kafka, mercure, mqtt, mqtt5, nats, pulsar, redis, sns, solace, sqs, stomp, ws ]
message:
- type: string
- title: WithAsyncAPIMessage
- description: The name of the message to use. If not set, defaults to the first message defined by the operation.
- binding:
- type: string
- title: WithAsyncAPIBinding
- description: The name of the binding to use. If not set, defaults to the first binding defined by the operation.
- payload:
- type: object
- title: WithAsyncAPIPayload
- description: The payload to call the AsyncAPI operation with, if any.
+ $ref: '#/$defs/asyncApiOutboundMessage'
+ title: AsyncApiMessage
+ description: An object used to configure the message to publish using the target operation.
+ subscription:
+ $ref: '#/$defs/asyncApiSubscription'
+ title: AsyncApiSubscription
+ description: An object used to configure the subscription to messages consumed using the target operation.
authentication:
$ref: '#/$defs/referenceableAuthenticationPolicy'
- title: WithAsyncAPIAuthentication
+ title: AsyncAPIAuthentication
description: The authentication policy, if any, to use when calling the AsyncAPI operation.
- required: [ document, operationRef ]
+ oneOf:
+ - required: [ document, operation, message ]
+ - required: [ document, operation, subscription ]
+ - required: [ document, channel, message ]
+ - required: [ document, channel, subscription ]
unevaluatedProperties: false
- title: CallGRPC
description: Defines the GRPC call to perform.
@@ -341,29 +350,33 @@ $defs:
properties:
method:
type: string
- title: WithHTTPMethod
+ title: HTTPMethod
description: The HTTP method of the HTTP request to perform.
endpoint:
- title: WithHTTPEndpoint
+ title: HTTPEndpoint
description: The HTTP endpoint to send the request to.
$ref: '#/$defs/endpoint'
headers:
type: object
- title: WithHTTPHeaders
+ title: HTTPHeaders
description: A name/value mapping of the headers, if any, of the HTTP request to perform.
body:
- title: WithHTTPBody
+ title: HTTPBody
description: The body, if any, of the HTTP request to perform.
query:
type: object
- title: WithHTTPQuery
+ title: HTTPQuery
description: A name/value mapping of the query parameters, if any, of the HTTP request to perform.
additionalProperties: true
output:
type: string
- title: WithHTTPOutput
+ title: HTTPOutput
description: The http call output format. Defaults to 'content'.
enum: [ raw, content, response ]
+ redirect:
+ type: boolean
+ title: HttpRedirect
+ description: Specifies whether redirection status codes (`300–399`) should be treated as errors.
required: [ method, endpoint ]
unevaluatedProperties: false
- title: CallOpenAPI
@@ -403,6 +416,10 @@ $defs:
enum: [ raw, content, response ]
title: WithOpenAPIOutput
description: The http call output format. Defaults to 'content'.
+ redirect:
+ type: boolean
+ title: HttpRedirect
+ description: Specifies whether redirection status codes (`300–399`) should be treated as errors.
required: [ document, operationId ]
unevaluatedProperties: false
- title: CallFunction
@@ -537,7 +554,17 @@ $defs:
$ref: '#/$defs/eventConsumptionStrategy'
title: ListenTo
description: Defines the event(s) to listen to.
+ read:
+ type: string
+ enum: [ data, envelope, raw ]
+ default: data
+ title: ListenAndReadAs
+ description: Specifies how events are read during the listen operation.
required: [ to ]
+ foreach:
+ $ref: '#/$defs/subscriptionIterator'
+ title: ListenIterator
+ description: Configures the iterator, if any, for processing consumed event(s).
raiseTask:
type: object
$ref: '#/$defs/taskBase'
@@ -581,6 +608,12 @@ $defs:
default: true
title: AwaitProcessCompletion
description: Whether to await the process completion before continuing.
+ return:
+ type: string
+ title: ProcessReturnType
+ description: Configures the output of the process.
+ enum: [ stdout, stderr, code, all, none ]
+ default: stdout
oneOf:
- title: RunContainer
description: Enables the execution of external processes encapsulated within a containerized environment.
@@ -595,6 +628,10 @@ $defs:
type: string
title: ContainerImage
description: The name of the container image to run.
+ name:
+ type: string
+ title: ContainerName
+ description: A runtime expression, if any, used to give specific name to the container.
command:
type: string
title: ContainerCommand
@@ -611,6 +648,10 @@ $defs:
type: object
title: ContainerEnvironment
description: A key/value mapping of the environment variables, if any, to use when running the configured process.
+ lifetime:
+ $ref: '#/$defs/containerLifetime'
+ title: ContainerLifetime
+ description: An object, if any, used to configure the container's lifetime
required: [ image ]
required: [ container ]
- title: RunScript
@@ -1257,6 +1298,12 @@ $defs:
- title: ExpressionDataSchema
$ref: '#/$defs/runtimeExpression'
description: An expression based event data schema.
+ data:
+ title: EventData
+ description: The event's payload data
+ anyOf:
+ - $ref: '#/$defs/runtimeExpression'
+ - {}
additionalProperties: true
eventConsumptionStrategy:
type: object
@@ -1276,11 +1323,22 @@ $defs:
- title: AnyEventConsumptionStrategy
properties:
any:
- type: array
- title: AnyEventConsumptionStrategyConfiguration
- description: A list containing any of the events to consume.
- items:
- $ref: '#/$defs/eventFilter'
+ type: array
+ title: AnyEventConsumptionStrategyConfiguration
+ description: A list containing any of the events to consume.
+ items:
+ $ref: '#/$defs/eventFilter'
+ until:
+ oneOf:
+ - type: string
+ title: AnyEventUntilCondition
+ description: A runtime expression condition evaluated after consuming an event and which determines whether or not to continue listening.
+ - allOf:
+ - $ref: '#/$defs/eventConsumptionStrategy'
+ description: The strategy that defines the event(s) to consume to stop listening.
+ - properties:
+ until: false
+ title: AnyEventUntilConsumed
required: [ any ]
- title: OneEventConsumptionStrategy
properties:
@@ -1522,16 +1580,179 @@ $defs:
catalog:
type: object
title: Catalog
- description: The definition of a resource catalog
+ description: The definition of a resource catalog.
unevaluatedProperties: false
properties:
endpoint:
$ref: '#/$defs/endpoint'
title: CatalogEndpoint
- description: The root URL where the catalog is hosted
+ description: The root URL where the catalog is hosted.
required: [ endpoint ]
runtimeExpression:
type: string
title: RuntimeExpression
description: A runtime expression.
pattern: "^\\s*\\$\\{.+\\}\\s*$"
+ containerLifetime:
+ type: object
+ title: ContainerLifetime
+ description: The configuration of a container's lifetime
+ unevaluatedProperties: false
+ properties:
+ cleanup:
+ type: string
+ title: ContainerCleanupPolicy
+ description: The container cleanup policy to use
+ enum: [ always, never, eventually ]
+ default: never
+ after:
+ $ref: '#/$defs/duration'
+ title: ContainerLifetimeDuration
+ description: The duration after which to cleanup the container, in case the cleanup policy has been set to 'eventually'
+ required: [ cleanup ]
+ if:
+ properties:
+ cleanup:
+ const: eventually
+ then:
+ required: [ after ]
+ else:
+ not:
+ required: [ after ]
+ processResult:
+ type: object
+ title: ProcessResult
+ description: The object returned by a run task when its return type has been set 'all'.
+ unevaluatedProperties: false
+ properties:
+ code:
+ type: integer
+ title: ProcessExitCode
+ description: The process's exit code.
+ stdout:
+ type: string
+ title: ProcessStandardOutput
+ description: The content of the process's STDOUT.
+ stderr:
+ type: string
+ title: ProcessStandardError
+ description: The content of the process's STDERR.
+ required: [ code, stdout, stderr ]
+ asyncApiServer:
+ type: object
+ title: AsyncApiServer
+ description: Configures the target server of an AsyncAPI operation.
+ unevaluatedProperties: false
+ properties:
+ name:
+ type: string
+ title: AsyncApiServerName
+ description: The target server's name.
+ variables:
+ type: object
+ title: AsyncApiServerVariables
+ description: The target server's variables, if any.
+ required: [ name ]
+ asyncApiOutboundMessage:
+ type: object
+ title: AsyncApiOutboundMessage
+ description: An object used to configure the message to publish using the target operation.
+ unevaluatedProperties: false
+ properties:
+ payload:
+ type: object
+ title: AsyncApiMessagePayload
+ description: The message's payload, if any.
+ additionalProperties: true
+ headers:
+ type: object
+ title: AsyncApiMessageHeaders
+ description: The message's headers, if any.
+ additionalProperties: true
+ asyncApiInboundMessage:
+ type: object
+ title: AsyncApiInboundMessage
+ description: Represents a message counsumed by an AsyncAPI subscription.
+ allOf:
+ - $ref: '#/$defs/asyncApiOutboundMessage'
+ properties:
+ correlationId:
+ type: string
+ title: AsyncApiMessageCorrelationId
+ description: The message's correlation id, if any.
+ asyncApiSubscription:
+ type: object
+ title: AsyncApiSubscription
+ description: An object used to configure the subscription to messages consumed using the target operation.
+ unevaluatedProperties: false
+ properties:
+ filter:
+ $ref: '#/$defs/runtimeExpression'
+ title: AsyncApiSubscriptionCorrelation
+ description: A runtime expression, if any, used to filter consumed messages.
+ consume:
+ $ref: '#/$defs/asyncApiMessageConsumptionPolicy'
+ title: AsyncApiMessageConsumptionPolicy
+ description: An object used to configure the subscription's message consumption policy.
+ foreach:
+ $ref: '#/$defs/subscriptionIterator'
+ title: AsyncApiSubscriptionIterator
+ description: Configures the iterator, if any, for processing consumed messages(s).
+ required: [ consume ]
+ asyncApiMessageConsumptionPolicy:
+ type: object
+ title: AsyncApiMessageConsumptionPolicy
+ description: An object used to configure a subscription's message consumption policy.
+ unevaluatedProperties: false
+ properties:
+ for:
+ $ref: '#/$defs/duration'
+ title: AsyncApiMessageConsumptionPolicyFor
+ description: Specifies the time period over which messages will be consumed.
+ oneOf:
+ - properties:
+ amount:
+ type: integer
+ description: The amount of (filtered) messages to consume before disposing of the subscription.
+ title: AsyncApiMessageConsumptionPolicyAmount
+ required: [ amount ]
+ - properties:
+ while:
+ $ref: '#/$defs/runtimeExpression'
+ description: A runtime expression evaluated after each consumed (filtered) message to decide if message consumption should continue.
+ title: AsyncApiMessageConsumptionPolicyWhile
+ required: [ while ]
+ - properties:
+ until:
+ $ref: '#/$defs/runtimeExpression'
+ description: A runtime expression evaluated before each consumed (filtered) message to decide if message consumption should continue.
+ title: AsyncApiMessageConsumptionPolicyUntil
+ required: [ until ]
+ subscriptionIterator:
+ type: object
+ title: SubscriptionIterator
+ description: Configures the iteration over each item (event or message) consumed by a subscription.
+ unevaluatedProperties: false
+ properties:
+ item:
+ type: string
+ title: SubscriptionIteratorItem
+ description: The name of the variable used to store the current item being enumerated.
+ default: item
+ at:
+ type: string
+ title: SubscriptionIteratorIndex
+ description: The name of the variable used to store the index of the current item being enumerated.
+ default: index
+ do:
+ $ref: '#/$defs/taskList'
+ title: SubscriptionIteratorTasks
+ description: The tasks to perform for each consumed item.
+ output:
+ $ref: '#/$defs/output'
+ title: SubscriptionIteratorOutput
+ description: An object, if any, used to customize the item's output and to document its schema.
+ export:
+ $ref: '#/$defs/export'
+ title: SubscriptionIteratorExport
+ description: An object, if any, used to customize the content of the workflow context.
\ No newline at end of file
diff --git a/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java b/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java
index 81d10ecf..39d7045b 100644
--- a/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java
+++ b/api/src/test/java/io/serverlessworkflow/api/FeaturesTest.java
@@ -17,6 +17,7 @@
import static io.serverlessworkflow.api.WorkflowReader.readWorkflow;
import static io.serverlessworkflow.api.WorkflowReader.readWorkflowFromClasspath;
+import static io.serverlessworkflow.api.WorkflowReader.validation;
import static io.serverlessworkflow.api.WorkflowWriter.workflowAsBytes;
import static io.serverlessworkflow.api.WorkflowWriter.workflowAsString;
import static io.serverlessworkflow.api.WorkflowWriter.writeWorkflow;
@@ -53,13 +54,13 @@ public class FeaturesTest {
"features/set.yaml",
"features/switch.yaml",
"features/try.yaml",
- "features/listen.yaml",
+ "features/listen-to-any.yaml",
"features/callFunction.yaml",
"features/callCustomFunction.yaml",
"features/call-http-query-parameters.yaml"
})
public void testSpecFeaturesParsing(String workflowLocation) throws IOException {
- Workflow workflow = readWorkflowFromClasspath(workflowLocation);
+ Workflow workflow = readWorkflowFromClasspath(workflowLocation, validation());
assertWorkflow(workflow);
assertWorkflowEquals(workflow, writeAndReadInMemory(workflow));
}
diff --git a/api/src/test/resources/features/callCustomFunction.yaml b/api/src/test/resources/features/callCustomFunction.yaml
index 4161cf41..fbb636b4 100644
--- a/api/src/test/resources/features/callCustomFunction.yaml
+++ b/api/src/test/resources/features/callCustomFunction.yaml
@@ -1,27 +1,25 @@
document:
- dsl: 1.0.0-alpha5
- namespace: test
- name: call-example
- version: 0.1.0
-schedule:
- cron: 0 8 * * *
+ dsl: '1.0.0-alpha5'
+ namespace: samples
+ name: call-custom-function-inline
+ version: '0.1.0'
+use:
+ functions:
+ getPetById:
+ input:
+ schema:
+ document:
+ type: object
+ properties:
+ petId:
+ type: string
+ required: [ petId ]
+ call: http
+ with:
+ method: get
+ endpoint: https://petstore.swagger.io/v2/pet/{petId}
do:
-- getData:
- call: http
- with:
- method: get
- endpoint: https://api.agify.io?name=meelad
- output:
- as: ".data.reading"
-- filterData:
- for:
- in: ".data.reading"
- each: reading
- do:
- - log:
- call: https://raw.githubusercontent.com/serverlessworkflow/catalog/main/functions/log/1.0.0/function.yaml
- with:
- level: information
- format: "{TIMESTAMP} [{LEVEL}] ({CONTEXT}): {MESSAGE}"
- message: Hello, world!
- timestamp: true
\ No newline at end of file
+ - getPet:
+ call: getPetById
+ with:
+ petId: 69
\ No newline at end of file
diff --git a/api/src/test/resources/features/callOpenAPI.yaml b/api/src/test/resources/features/callOpenAPI.yaml
index 1a1d0c56..82843c5d 100644
--- a/api/src/test/resources/features/callOpenAPI.yaml
+++ b/api/src/test/resources/features/callOpenAPI.yaml
@@ -8,7 +8,7 @@ do:
call: openapi
with:
document:
- uri: "https://petstore.swagger.io/v2/swagger.json"
+ endpoint: "https://petstore.swagger.io/v2/swagger.json"
operationId: findPetsByStatus
parameters:
status: ${ .status }
diff --git a/api/src/test/resources/features/listen-to-any.yaml b/api/src/test/resources/features/listen-to-any.yaml
new file mode 100644
index 00000000..fa8794d3
--- /dev/null
+++ b/api/src/test/resources/features/listen-to-any.yaml
@@ -0,0 +1,16 @@
+document:
+ dsl: '1.0.0-alpha5'
+ namespace: test
+ name: listen-to-any
+ version: '0.1.0'
+do:
+ - callDoctor:
+ listen:
+ to:
+ any:
+ - with:
+ type: com.fake-hospital.vitals.measurements.temperature
+ data: ${ .temperature > 38 }
+ - with:
+ type: com.fake-hospital.vitals.measurements.bpm
+ data: ${ .bpm < 60 or .bpm > 100 }
\ No newline at end of file
diff --git a/api/src/test/resources/features/listen.yaml b/api/src/test/resources/features/listen.yaml
deleted file mode 100644
index 1c56c229..00000000
--- a/api/src/test/resources/features/listen.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-document:
- dsl: 1.0.0-alpha1
- namespace: default
- name: listen-task
- version: 1.0.0
-do:
- - listenToSomething:
- listen:
- to:
- any:
- - with:
- source: pepe
- type: pepe
\ No newline at end of file
diff --git a/custom-generator/pom.xml b/custom-generator/pom.xml
index 8444bb2a..3660e286 100644
--- a/custom-generator/pom.xml
+++ b/custom-generator/pom.xml
@@ -1,20 +1,22 @@
-
- 4.0.0
-
- io.serverlessworkflow
- serverlessworkflow-parent
- 7.0.0-alpha5.1
-
- custom-generator
-
+
+ 4.0.0
+
+ io.serverlessworkflow
+ serverlessworkflow-parent
+ 8.0.0-SNAPSHOT
+
+ serverless-workflow-custom-generator
+ Serverless Workflow :: Custom Generator
+
org.jsonschema2pojo
jsonschema2pojo-core
-
-
-
-
+
+
+
+
com.spotify.fmt
fmt-maven-plugin
@@ -34,6 +36,6 @@
-
-
+
+
\ No newline at end of file
diff --git a/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java b/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java
index d14ba357..622efcbb 100644
--- a/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java
+++ b/custom-generator/src/main/java/io/serverlessworkflow/generator/AllAnyOneOfSchemaRule.java
@@ -60,6 +60,7 @@ class AllAnyOneOfSchemaRule extends SchemaRule {
}
private static final String REF = "$ref";
+ private static final String TITLE = "title";
private static final String PATTERN = "pattern";
private enum Format {
@@ -154,6 +155,16 @@ public JType apply(
&& allOfTypes.isEmpty()
&& refType.isPresent()) {
javaType = refType.get();
+ } else if (!schemaNode.has("properties")
+ && oneOfTypes.isEmpty()
+ && allOfTypes.size() == 1
+ && refType.isEmpty()) {
+ javaType = allOfTypes.get(0).getType();
+ } else if (!schemaNode.has("properties")
+ && oneOfTypes.size() == 1
+ && allOfTypes.isEmpty()
+ && refType.isEmpty()) {
+ javaType = oneOfTypes.get(0).getType();
} else {
JPackage container = generatableType.getPackage();
javaType = ruleFactory.getTypeRule().apply(nodeName, schemaNode, parent, container, schema);
@@ -468,25 +479,52 @@ private void unionType(
Schema parentSchema,
Collection types) {
if (schemaNode.has(prefix)) {
+ ArrayNode array = (ArrayNode) schemaNode.get(prefix);
+ if (schemaNode.has(TITLE)) {
+ nodeName = schemaNode.get(TITLE).asText();
+ }
int i = 0;
- for (JsonNode oneOf : (ArrayNode) schemaNode.get(prefix)) {
- String ref = parentSchema.getId().toString() + '/' + prefix + '/' + i++;
- Schema schema =
- ruleFactory
- .getSchemaStore()
- .create(
- URI.create(ref),
- ruleFactory.getGenerationConfig().getRefFragmentPathDelimiters());
- types.add(
- new JTypeWrapper(
- schema.isGenerated()
- ? schema.getJavaType()
- : apply(nodeName, oneOf, parent, generatableType.getPackage(), schema),
- oneOf));
+ for (JsonNode oneOf : array) {
+ if (!ignoreNode(oneOf)) {
+ String ref = parentSchema.getId().toString() + '/' + prefix + '/' + i++;
+ Schema schema =
+ ruleFactory
+ .getSchemaStore()
+ .create(
+ URI.create(ref),
+ ruleFactory.getGenerationConfig().getRefFragmentPathDelimiters());
+ types.add(
+ new JTypeWrapper(
+ schema.isGenerated()
+ ? schema.getJavaType()
+ : apply(nodeName, oneOf, parent, generatableType.getPackage(), schema),
+ oneOf));
+ }
}
}
}
+ private static boolean ignoreNode(JsonNode node) {
+ return allRequired(node) || allRemoveProperties(node);
+ }
+
+ private static boolean allRemoveProperties(JsonNode node) {
+ if (node.size() == 1 && node.has("properties")) {
+ JsonNode propsNode = node.get("properties");
+ for (JsonNode propNode : propsNode) {
+ if (!propNode.isBoolean() || propNode.asBoolean()) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+
+ private static boolean allRequired(JsonNode node) {
+ return node.size() == 1 && node.has("required");
+ }
+
private Optional refType(
String nodeName,
JsonNode schemaNode,
@@ -507,7 +545,7 @@ private Optional refType(
schema.isGenerated()
? schema.getJavaType()
: apply(
- nameFromRef(ref, nodeName),
+ nameFromRef(ref, nodeName, schemaNode),
schema.getContent(),
parent,
generatableType,
@@ -549,7 +587,10 @@ private String pattern(JsonNode node) {
return format != null ? format.pattern() : getFromNode(node, PATTERN);
}
- private String nameFromRef(String ref, String nodeName) {
+ private String nameFromRef(String ref, String nodeName, JsonNode schemaNode) {
+ if (schemaNode.has(TITLE)) {
+ return schemaNode.get(TITLE).asText();
+ }
if ("#".equals(ref)) {
return nodeName;
}
diff --git a/custom-generator/src/main/java/io/serverlessworkflow/generator/RefNameHelper.java b/custom-generator/src/main/java/io/serverlessworkflow/generator/RefNameHelper.java
new file mode 100644
index 00000000..6411e886
--- /dev/null
+++ b/custom-generator/src/main/java/io/serverlessworkflow/generator/RefNameHelper.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.generator;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.sun.codemodel.JClassAlreadyExistsException;
+import com.sun.codemodel.JDefinedClass;
+import com.sun.codemodel.JPackage;
+import org.jsonschema2pojo.GenerationConfig;
+import org.jsonschema2pojo.util.NameHelper;
+
+public class RefNameHelper extends NameHelper {
+
+ public RefNameHelper(GenerationConfig generationConfig) {
+ super(generationConfig);
+ }
+
+ @Override
+ public String getUniqueClassName(String nodeName, JsonNode node, JPackage _package) {
+ String className = getClassName(nodeName, node, _package);
+ try {
+ JDefinedClass _class = _package._class(className);
+ _package.remove(_class);
+ return className;
+ } catch (JClassAlreadyExistsException ex) {
+ return super.getUniqueClassName(nodeName, null, _package);
+ }
+ }
+}
diff --git a/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java b/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java
index 01263033..f101fb8d 100644
--- a/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java
+++ b/custom-generator/src/main/java/io/serverlessworkflow/generator/UnreferencedFactory.java
@@ -18,10 +18,25 @@
import com.sun.codemodel.JClassContainer;
import com.sun.codemodel.JDefinedClass;
import com.sun.codemodel.JType;
+import org.jsonschema2pojo.GenerationConfig;
import org.jsonschema2pojo.rules.Rule;
import org.jsonschema2pojo.rules.RuleFactory;
+import org.jsonschema2pojo.util.NameHelper;
public class UnreferencedFactory extends RuleFactory {
+
+ private NameHelper refNameHelper;
+
+ public UnreferencedFactory() {
+ this.refNameHelper = new RefNameHelper(getGenerationConfig());
+ }
+
+ @Override
+ public void setGenerationConfig(final GenerationConfig generationConfig) {
+ super.setGenerationConfig(generationConfig);
+ this.refNameHelper = new RefNameHelper(generationConfig);
+ }
+
@Override
public Rule getSchemaRule() {
return new AllAnyOneOfSchemaRule(this);
@@ -36,4 +51,9 @@ public Rule getTypeRule() {
public Rule getAdditionalPropertiesRule() {
return new UnevaluatedPropertiesRule(this);
}
+
+ @Override
+ public NameHelper getNameHelper() {
+ return refNameHelper;
+ }
}
diff --git a/examples/events/pom.xml b/examples/events/pom.xml
new file mode 100644
index 00000000..143a7967
--- /dev/null
+++ b/examples/events/pom.xml
@@ -0,0 +1,21 @@
+
+ 4.0.0
+
+ io.serverlessworkflow
+ serverlessworkflow-examples
+ 8.0.0-SNAPSHOT
+
+ Serverless Workflow :: Examples :: Events
+ serverlessworkflow-examples-events
+
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-core
+
+
+ org.slf4j
+ slf4j-simple
+
+
+
\ No newline at end of file
diff --git a/examples/events/src/main/java/events/EventExample.java b/examples/events/src/main/java/events/EventExample.java
new file mode 100644
index 00000000..628782fb
--- /dev/null
+++ b/examples/events/src/main/java/events/EventExample.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package events;
+
+import io.serverlessworkflow.api.WorkflowReader;
+import io.serverlessworkflow.impl.WorkflowApplication;
+import io.serverlessworkflow.impl.WorkflowDefinition;
+import io.serverlessworkflow.impl.WorkflowInstance;
+import java.io.IOException;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class EventExample {
+
+ private static final Logger logger = LoggerFactory.getLogger(EventExample.class);
+
+ public static void main(String[] args) throws IOException {
+ try (WorkflowApplication appl = WorkflowApplication.builder().build()) {
+ WorkflowDefinition listenDefinition =
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("listen.yaml"));
+ WorkflowDefinition emitDefinition =
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("emit.yaml"));
+ WorkflowInstance waitingInstance = listenDefinition.instance(Map.of());
+ waitingInstance
+ .start()
+ .thenAccept(node -> logger.info("Waiting instance completed with result {}", node));
+ logger.info("Listen instance waiting for proper event, Status {}", waitingInstance.status());
+ logger.info("Publishing event with temperature 35");
+ emitDefinition.instance(Map.of("temperature", 35)).start().join();
+ logger.info(
+ "Listen instance still waiting for proper event, Status {}", waitingInstance.status());
+ logger.info("Publishing event with temperature 39");
+ emitDefinition.instance(Map.of("temperature", 39)).start().join();
+ }
+ }
+}
diff --git a/examples/events/src/main/resources/emit.yaml b/examples/events/src/main/resources/emit.yaml
new file mode 100644
index 00000000..4d14b030
--- /dev/null
+++ b/examples/events/src/main/resources/emit.yaml
@@ -0,0 +1,14 @@
+document:
+ dsl: '1.0.0-alpha5'
+ namespace: test
+ name: emit
+ version: '0.1.0'
+do:
+ - emitEvent:
+ emit:
+ event:
+ with:
+ source: https://hospital.com
+ type: com.fake-hospital.vitals.measurements.temperature
+ data:
+ temperature: ${.temperature}
\ No newline at end of file
diff --git a/examples/events/src/main/resources/listen.yaml b/examples/events/src/main/resources/listen.yaml
new file mode 100644
index 00000000..e49cea92
--- /dev/null
+++ b/examples/events/src/main/resources/listen.yaml
@@ -0,0 +1,13 @@
+document:
+ dsl: '1.0.0-alpha5'
+ namespace: examples
+ name: listen
+ version: '0.1.0'
+do:
+ - callDoctor:
+ listen:
+ to:
+ one:
+ with:
+ type: com.fake-hospital.vitals.measurements.temperature
+ data: ${ .temperature > 38 }
\ No newline at end of file
diff --git a/examples/pom.xml b/examples/pom.xml
new file mode 100644
index 00000000..238ee4b1
--- /dev/null
+++ b/examples/pom.xml
@@ -0,0 +1,35 @@
+
+ 4.0.0
+
+ io.serverlessworkflow
+ serverlessworkflow-parent
+ 8.0.0-SNAPSHOT
+
+ Serverless Workflow :: Examples
+ serverlessworkflow-examples
+ pom
+
+
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-core
+ ${project.version}
+
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-http
+ ${project.version}
+
+
+ org.slf4j
+ slf4j-simple
+ ${version.org.slf4j}
+
+
+
+
+ simpleGet
+ events
+
+
\ No newline at end of file
diff --git a/examples/simpleGet/pom.xml b/examples/simpleGet/pom.xml
new file mode 100644
index 00000000..923001ae
--- /dev/null
+++ b/examples/simpleGet/pom.xml
@@ -0,0 +1,25 @@
+
+ 4.0.0
+
+ io.serverlessworkflow
+ serverlessworkflow-examples
+ 8.0.0-SNAPSHOT
+
+ serverlessworkflow-examples-simpleGet
+ Serverless Workflow :: Examples :: SimpleGet
+
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-core
+
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-http
+
+
+ org.slf4j
+ slf4j-simple
+
+
+
\ No newline at end of file
diff --git a/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java
new file mode 100644
index 00000000..233d121f
--- /dev/null
+++ b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl;
+
+import io.serverlessworkflow.api.WorkflowReader;
+import java.io.IOException;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class BlockingExample {
+
+ private static final Logger logger = LoggerFactory.getLogger(BlockingExample.class);
+
+ public static void main(String[] args) throws IOException {
+ try (WorkflowApplication appl = WorkflowApplication.builder().build()) {
+ logger.info(
+ "Workflow output is {}",
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml"))
+ .instance(Map.of("petId", 10))
+ .start()
+ .join());
+ }
+ }
+}
diff --git a/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java
new file mode 100644
index 00000000..cb663c1a
--- /dev/null
+++ b/examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl;
+
+import io.serverlessworkflow.api.WorkflowReader;
+import java.io.IOException;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class NotBlockingExample {
+
+ private static final Logger logger = LoggerFactory.getLogger(NotBlockingExample.class);
+
+ public static void main(String[] args) throws IOException {
+ try (WorkflowApplication appl = WorkflowApplication.builder().build()) {
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml"))
+ .instance(Map.of("petId", 10))
+ .start()
+ .thenAccept(node -> logger.info("Workflow output is {}", node));
+ logger.info("The request has been sent, this thread might continue doing stuff");
+ }
+ }
+}
diff --git a/examples/simpleGet/src/main/resources/get.yaml b/examples/simpleGet/src/main/resources/get.yaml
new file mode 100644
index 00000000..7adf3132
--- /dev/null
+++ b/examples/simpleGet/src/main/resources/get.yaml
@@ -0,0 +1,11 @@
+document:
+ dsl: '1.0.0-alpha5'
+ namespace: examples
+ name: call-http-shorthand-endpoint
+ version: '0.1.0'
+do:
+ - getPet:
+ call: http
+ with:
+ method: get
+ endpoint: https://petstore.swagger.io/v2/pet/{petId}
diff --git a/impl/README.md b/impl/README.md
new file mode 100644
index 00000000..26655a02
--- /dev/null
+++ b/impl/README.md
@@ -0,0 +1,194 @@
+
+ [](https://gitpod.io/#https://github.com/serverlessworkflow/sdk-java)
+
+# Serverless Workflow Specification - Java SDK- Reference Implementation
+
+Welcome to Java SDK runtime reference implementation, a lightweight implementation of the Serverless Workflow specification which provides a simple, non blocking, reactive API for workflow execution.
+
+Although initially conceived mainly for testing purposes, it was designed to be easily expanded, so it can eventually become production ready.
+
+## Status
+
+This reference implementation is currently capable of running workflows consisting of:
+
+
+* Tasks
+ * Switch
+ * Set
+ * Do
+ * Raise
+ * Listen
+ * Emit
+ * Fork
+ * For
+ * Try
+ * Wait
+ * Call
+ * HTTP
+* Schema Validation
+ * Input
+ * Output
+* Expressions
+ * Input
+ * Output
+ * Export
+ * Special keywords: runtime, workflow, task...
+* Error definitions
+
+
+## Setup
+
+Before getting started, ensure you have Java 17+ and Maven or Gradle installed.
+
+Install [Java 17](https://openjdk.org/projects/jdk/17/)
+Install [Maven](https://maven.apache.org/install.html) (if using Maven)
+Install [Gradle](https://gradle.org/install) (if using Gradle)
+
+### Dependencies
+
+This implementation follows a modular approach, keeping dependencies minimal:
+- The core library is always required.
+- Additional dependencies must be explicitly included if your workflow interacts with external services (e.g., HTTP).
+This ensures you only include what you need, preventing unnecessary dependencies.
+
+#### Maven
+
+You always need to add this dependency to your pom.xml `dependencies` section:
+
+```xml
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-core
+ 7.0.0.Final
+
+```
+
+And only if your workflow is using HTTP calls, you must add:
+
+```xml
+
+ io.serverlessworkflow
+ serverlessworkflow-impl-http
+ 7.0.0.Final
+
+```
+
+#### Gradle projects:
+
+You always need to add this dependency to your build.gradle `dependencies` section:
+
+```text
+implementation("io.serverlessworkflow:serverlessworkflow-impl-core:7.0.0.Final")
+```
+
+And only if your workflow is using HTTP calls, you must add:
+
+```text
+implementation("io.serverlessworkflow:serverlessworkflow-impl-http:7.0.0.Final")
+```
+
+## How to use
+
+The quick version is intended for impatient users who want to try something as soon as possible.
+
+The detailed version is more suitable for those users interested in a more thoughtful discussion of the API.
+
+### Quick version
+
+For a quick introduction, we will use a simple workflow [definition](../examples/simpleGet/src/main/resources/get.yaml) that performs a get call.
+We are going to show two ways of invoking the workflow:
+ - blocking the thread till the get request goes through
+ - returning control to the caller, so the main thread continues while the get is executed
+
+In order to execute the workflow, blocking the thread till the HTTP request is completed, you should write
+
+``` java
+try (WorkflowApplication appl = WorkflowApplication.builder().build()) {
+ logger.info(
+ "Workflow output is {}",
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml"))
+ .instance(Map.of("petId", 10))
+ .start()
+ .join());
+ }
+```
+You can find the complete java code [here](../examples/simpleGet/src/main/java/io/serverlessworkflow/impl/BlockingExample.java)
+
+In order to execute the workflow without blocking the calling thread till the HTTP request is completed, you should write
+
+``` java
+ try (WorkflowApplication appl = WorkflowApplication.builder().build()) {
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("get.yaml"))
+ .instance(Map.of("petId", 10))
+ .start()
+ .thenAccept(node -> logger.info("Workflow output is {}", node));
+ }
+```
+When the HTTP request is done, both examples will print a similar output
+
+
+```shell
+Workflow output is {"id":10,"category":{"id":10,"name":"string"},"name":"doggie","photoUrls":["string"],"tags":[{"id":10,"name":"string"}],"status":"string"}
+```
+
+You can find the complete java code [here](../examples/simpleGet/src/main/java/io/serverlessworkflow/impl/NotBlockingExample.java)
+
+### Detailed version
+
+To discuss runtime API we are going to use a couple of workflow:
+- [listen.yaml](../examples/events/src/main/listen.yaml), which waits for an event reporting a temperature greater than 38
+- [emit.yaml](../examples/events/src/main/emit.yaml), which emits events with a certain temperature, specified as workflow parameter.
+
+Here is a summary of what we are trying to do:
+
+- The listen.yaml workflow waits for an event (not-blocking).
+- We send an event with a low temperature (ignored).
+- We send an event with a high temperature (completes the workflow).
+
+The first step is to create a [WorkflowApplication](core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java) instance. An application is an abstraction that allows customization of different aspects of the workflow execution (for example, change the default `ExecutorService` for thread spawning)
+
+Since `WorkflowApplication` implements `Autocloseable`, we better use a **try-with-resources** block, ensuring any resource that the workflow might have used is freed when done.
+
+`try (WorkflowApplication appl = WorkflowApplication.builder().build())`
+
+Once we have the application object, we use it to parse our definition examples. To load each workflow definition, we use the `readFromClasspath` helper method defined in [WorkflowReader](api/src/main/java/io/serverlessworkflow/api/WorkflowReader.java) class.
+
+```java
+ WorkflowDefinition listenDefinition =
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("listen.yaml"));
+ WorkflowDefinition emitDefinition =
+ appl.workflowDefinition(WorkflowReader.readWorkflowFromClasspath("emit.yaml"));
+```
+
+A [WorkflowDefinition](core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java) object is immutable and, therefore, thread-safe. It is used to execute as many workflow instances as desired.
+
+To execute a workflow, we first create a [WorkflowInstance](core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java) object (its initial status is PENDING) and then invoke the `start` method on it (its status is changed to RUNNING). The `start` method returns a [CompletableFuture](https://docs.oracle.com/javase/8/docs/api/java/util/concurrent/CompletableFuture.html), which we use to indicate that a log message should be printed when the workflow is completed.
+
+```java
+ WorkflowInstance waitingInstance = listenDefinition.instance(Map.of());
+ waitingInstance
+ .start()
+ .thenAccept(node -> logger.info("Waiting instance completed with result {}", node));
+```
+
+As soon as the workflow execution reach the point where it waits for events to arrive, control is returned to the calling thread. Since the execution is not blocking, we can execute another workflow instance while the first one is waiting.
+
+We will send an event with a temperature that does not satisfy the criteria, so the listen instance will continue waiting. We use a regular Java `Map` to pass parameters to the workflow instance that sends the event. Note that since we want to wait till the event is published, we call `join` after `start`, telling the `CompletableFuture` to wait for workflow completion.
+
+```java
+ emitDefinition.instance(Map.of("temperature", 35)).start().join();
+ ```
+
+ It's time to complete the waiting instance and send an event with the expected temperature. We do so by reusing `emitDefinition`.
+
+```java
+ emitDefinition.instance(Map.of("temperature", 39)).start().join();
+ ```
+
+After that, listen instance will be completed and we will see this log message
+
+```java
+[pool-1-thread-1] INFO events.EventExample - Waiting instance completed with result [{"temperature":39}]
+```
+The source code of the example is [here](../examples/events/src/main/java/events/EventExample.java)
+
diff --git a/impl/bom/pom.xml b/impl/bom/pom.xml
deleted file mode 100644
index 604a8300..00000000
--- a/impl/bom/pom.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-
- 4.0.0
-
- io.serverlessworkflow
- serverlessworkflow-impl
- 7.0.0-alpha5.1
-
- serverlessworkflow-impl-bom
- pom
-
-
- io.serverlessworkflow
- serverlessworkflow-impl-core
-
-
- io.serverlessworkflow
- serverlessworkflow-impl-http
-
-
-
\ No newline at end of file
diff --git a/impl/core/pom.xml b/impl/core/pom.xml
index 9fac9df6..a5fac29a 100644
--- a/impl/core/pom.xml
+++ b/impl/core/pom.xml
@@ -1,59 +1,63 @@
-
- 4.0.0
-
- io.serverlessworkflow
- serverlessworkflow-impl
- 7.0.0-alpha5.1
-
- serverlessworkflow-impl-core
-
- 1.1.0
- 5.2.3
-
-
-
- io.serverlessworkflow
- serverlessworkflow-api
- 7.0.0-alpha5.1
-
-
- com.github.f4b6a3
- ulid-creator
- ${version.com.github.f4b6a3}
-
-
- com.networknt
- json-schema-validator
-
-
- net.thisptr
- jackson-jq
- ${version.net.thisptr}
-
-
- org.junit.jupiter
- junit-jupiter-api
- test
-
-
- org.junit.jupiter
- junit-jupiter-engine
- test
-
-
- org.junit.jupiter
- junit-jupiter-params
- test
-
-
- org.assertj
- assertj-core
- test
-
-
- ch.qos.logback
- logback-classic
- test
-
-
+
+ 4.0.0
+
+ io.serverlessworkflow
+ serverlessworkflow-impl
+ 8.0.0-SNAPSHOT
+
+ serverlessworkflow-impl-core
+ Serverless Workflow :: Impl :: Core
+
+
+ io.serverlessworkflow
+ serverlessworkflow-api
+ ${project.version}
+
+
+ io.cloudevents
+ cloudevents-api
+
+
+ io.cloudevents
+ cloudevents-json-jackson
+
+
+ com.github.f4b6a3
+ ulid-creator
+
+
+ com.networknt
+ json-schema-validator
+
+
+ net.thisptr
+ jackson-jq
+
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-params
+ test
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+ ch.qos.logback
+ logback-classic
+ test
+
+
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/DefaultExecutorServiceFactory.java b/impl/core/src/main/java/io/serverlessworkflow/impl/DefaultExecutorServiceFactory.java
new file mode 100644
index 00000000..1ac1f759
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/DefaultExecutorServiceFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl;
+
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+
+public class DefaultExecutorServiceFactory implements ExecutorServiceFactory {
+
+ private static final ExecutorServiceFactory instance = new DefaultExecutorServiceFactory();
+
+ public static ExecutorServiceFactory instance() {
+ return instance;
+ }
+
+ private static class ExecutorServiceHolder {
+ private static ExecutorService instance = Executors.newCachedThreadPool();
+ }
+
+ @Override
+ public ExecutorService get() {
+ return ExecutorServiceHolder.instance;
+ }
+
+ private DefaultExecutorServiceFactory() {}
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/ExpressionHolder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/ExpressionHolder.java
new file mode 100644
index 00000000..f899f186
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/ExpressionHolder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl;
+
+import java.util.function.BiFunction;
+
+public interface ExpressionHolder extends BiFunction {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java b/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java
index 91b1b6c5..cf5598e7 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/LongFilter.java
@@ -15,7 +15,5 @@
*/
package io.serverlessworkflow.impl;
-import java.util.function.BiFunction;
-
@FunctionalInterface
-public interface LongFilter extends BiFunction, Long> {}
+public interface LongFilter extends ExpressionHolder {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java b/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java
index 5d0a648e..2fbec647 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/StringFilter.java
@@ -15,7 +15,5 @@
*/
package io.serverlessworkflow.impl;
-import java.util.function.BiFunction;
-
@FunctionalInterface
-public interface StringFilter extends BiFunction, String> {}
+public interface StringFilter extends ExpressionHolder {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java b/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java
index dde5a315..4fc3d1f4 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/TaskContext.java
@@ -16,76 +16,64 @@
package io.serverlessworkflow.impl;
import com.fasterxml.jackson.databind.JsonNode;
-import io.serverlessworkflow.api.types.FlowDirective;
-import io.serverlessworkflow.api.types.FlowDirectiveEnum;
import io.serverlessworkflow.api.types.TaskBase;
+import io.serverlessworkflow.impl.executors.TransitionInfo;
import java.time.Instant;
import java.util.HashMap;
import java.util.Map;
+import java.util.Optional;
-public class TaskContext {
+public class TaskContext {
private final JsonNode rawInput;
- private final T task;
+ private final TaskBase task;
private final WorkflowPosition position;
private final Instant startedAt;
+ private final String taskName;
+ private final Map contextVariables;
+ private final Optional parentContext;
private JsonNode input;
private JsonNode output;
private JsonNode rawOutput;
- private FlowDirective flowDirective;
- private Map contextVariables;
private Instant completedAt;
+ private TransitionInfo transition;
- public TaskContext(JsonNode input, WorkflowPosition position) {
- this(input, null, position, Instant.now(), input, input, input, null, new HashMap<>());
- }
-
- public TaskContext(JsonNode input, TaskContext> taskContext, T task) {
- this(
- input,
- task,
- taskContext.position,
- Instant.now(),
- input,
- input,
- input,
- task.getThen(),
- new HashMap<>(taskContext.variables()));
+ public TaskContext(
+ JsonNode input,
+ WorkflowPosition position,
+ Optional parentContext,
+ String taskName,
+ TaskBase task) {
+ this(input, parentContext, taskName, task, position, Instant.now(), input, input, input);
}
private TaskContext(
JsonNode rawInput,
- T task,
+ Optional parentContext,
+ String taskName,
+ TaskBase task,
WorkflowPosition position,
Instant startedAt,
JsonNode input,
JsonNode output,
- JsonNode rawOutput,
- FlowDirective flowDirective,
- Map contextVariables) {
+ JsonNode rawOutput) {
this.rawInput = rawInput;
+ this.parentContext = parentContext;
+ this.taskName = taskName;
this.task = task;
this.position = position;
this.startedAt = startedAt;
this.input = input;
this.output = output;
this.rawOutput = rawOutput;
- this.flowDirective = flowDirective;
- this.contextVariables = contextVariables;
+ this.contextVariables =
+ parentContext.map(p -> new HashMap<>(p.contextVariables)).orElseGet(HashMap::new);
}
- public TaskContext copy() {
- return new TaskContext(
- rawInput,
- task,
- position.copy(),
- startedAt,
- input,
- output,
- rawOutput,
- flowDirective,
- new HashMap<>(contextVariables));
+ public TaskContext copy() {
+ return new TaskContext(
+ rawInput, parentContext, taskName, task, position, startedAt, input, output, rawOutput);
}
public void input(JsonNode input) {
@@ -102,54 +90,64 @@ public JsonNode rawInput() {
return rawInput;
}
- public T task() {
+ public TaskBase task() {
return task;
}
- public void rawOutput(JsonNode output) {
+ public TaskContext rawOutput(JsonNode output) {
this.rawOutput = output;
this.output = output;
+ return this;
}
public JsonNode rawOutput() {
return rawOutput;
}
- public void output(JsonNode output) {
+ public TaskContext output(JsonNode output) {
this.output = output;
+ return this;
}
public JsonNode output() {
return output;
}
- public void flowDirective(FlowDirective flowDirective) {
- this.flowDirective = flowDirective;
- }
-
- public FlowDirective flowDirective() {
- return flowDirective == null
- ? new FlowDirective().withFlowDirectiveEnum(FlowDirectiveEnum.CONTINUE)
- : flowDirective;
+ public WorkflowPosition position() {
+ return position;
}
public Map variables() {
return contextVariables;
}
- public WorkflowPosition position() {
- return position;
- }
-
public Instant startedAt() {
return startedAt;
}
- public void completedAt(Instant instant) {
+ public Optional parent() {
+ return parentContext;
+ }
+
+ public String taskName() {
+ return taskName;
+ }
+
+ public TaskContext completedAt(Instant instant) {
this.completedAt = instant;
+ return this;
}
public Instant completedAt() {
return completedAt;
}
+
+ public TransitionInfo transition() {
+ return transition;
+ }
+
+ public TaskContext transition(TransitionInfo transition) {
+ this.transition = transition;
+ return this;
+ }
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java
index f36c23f6..b998c57d 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowApplication.java
@@ -18,6 +18,9 @@
import com.github.f4b6a3.ulid.UlidCreator;
import io.serverlessworkflow.api.types.Document;
import io.serverlessworkflow.api.types.Workflow;
+import io.serverlessworkflow.impl.events.EventConsumer;
+import io.serverlessworkflow.impl.events.EventPublisher;
+import io.serverlessworkflow.impl.events.InMemoryEvents;
import io.serverlessworkflow.impl.executors.DefaultTaskExecutorFactory;
import io.serverlessworkflow.impl.executors.TaskExecutorFactory;
import io.serverlessworkflow.impl.expressions.ExpressionFactory;
@@ -47,29 +50,22 @@ public class WorkflowApplication implements AutoCloseable {
private final WorkflowPositionFactory positionFactory;
private final ExecutorServiceFactory executorFactory;
private final RuntimeDescriptorFactory runtimeDescriptorFactory;
-
- private ExecutorService executorService;
-
- public WorkflowApplication(
- TaskExecutorFactory taskFactory,
- ExpressionFactory exprFactory,
- ResourceLoaderFactory resourceLoaderFactory,
- SchemaValidatorFactory schemaValidatorFactory,
- WorkflowPositionFactory positionFactory,
- WorkflowIdFactory idFactory,
- RuntimeDescriptorFactory runtimeDescriptorFactory,
- ExecutorServiceFactory executorFactory,
- Collection listeners) {
- this.taskFactory = taskFactory;
- this.exprFactory = exprFactory;
- this.resourceLoaderFactory = resourceLoaderFactory;
- this.schemaValidatorFactory = schemaValidatorFactory;
- this.positionFactory = positionFactory;
- this.idFactory = idFactory;
- this.runtimeDescriptorFactory = runtimeDescriptorFactory;
- this.executorFactory = executorFactory;
- this.listeners = listeners;
+ private final EventConsumer, ?> eventConsumer;
+ private final EventPublisher eventPublisher;
+
+ private WorkflowApplication(Builder builder) {
+ this.taskFactory = builder.taskFactory;
+ this.exprFactory = builder.exprFactory;
+ this.resourceLoaderFactory = builder.resourceLoaderFactory;
+ this.schemaValidatorFactory = builder.schemaValidatorFactory;
+ this.positionFactory = builder.positionFactory;
+ this.idFactory = builder.idFactory;
+ this.runtimeDescriptorFactory = builder.descriptorFactory;
+ this.executorFactory = builder.executorFactory;
+ this.listeners = builder.listeners != null ? builder.listeners : Collections.emptySet();
this.definitions = new ConcurrentHashMap<>();
+ this.eventConsumer = builder.eventConsumer;
+ this.eventPublisher = builder.eventPublisher;
}
public TaskExecutorFactory taskFactory() {
@@ -96,6 +92,10 @@ public Collection listeners() {
return listeners;
}
+ public EventPublisher eventPublisher() {
+ return eventPublisher;
+ }
+
public WorkflowIdFactory idFactory() {
return idFactory;
}
@@ -109,6 +109,8 @@ public static class Builder {
private WorkflowPositionFactory positionFactory = () -> new QueueWorkflowPosition();
private WorkflowIdFactory idFactory = () -> UlidCreator.getMonotonicUlid().toString();
private ExecutorServiceFactory executorFactory = () -> Executors.newCachedThreadPool();
+ private EventConsumer, ?> eventConsumer = InMemoryEvents.get();
+ private EventPublisher eventPublisher = InMemoryEvents.get();
private RuntimeDescriptorFactory descriptorFactory =
() -> new RuntimeDescriptor("reference impl", "1.0.0_alpha", Collections.emptyMap());
@@ -162,19 +164,18 @@ public Builder withDescriptorFactory(RuntimeDescriptorFactory factory) {
return this;
}
+ public Builder withEventConsumer(EventConsumer, ?> eventConsumer) {
+ this.eventConsumer = eventConsumer;
+ return this;
+ }
+
+ public Builder withEventPublisher(EventPublisher eventPublisher) {
+ this.eventPublisher = eventPublisher;
+ return this;
+ }
+
public WorkflowApplication build() {
- return new WorkflowApplication(
- taskFactory,
- exprFactory,
- resourceLoaderFactory,
- schemaValidatorFactory,
- positionFactory,
- idFactory,
- descriptorFactory,
- executorFactory,
- listeners == null
- ? Collections.emptySet()
- : Collections.unmodifiableCollection(listeners));
+ return new WorkflowApplication(this);
}
}
@@ -190,7 +191,7 @@ public WorkflowDefinition workflowDefinition(Workflow workflow) {
}
@Override
- public void close() throws Exception {
+ public void close() {
for (WorkflowDefinition definition : definitions.values()) {
definition.close();
}
@@ -205,12 +206,12 @@ public RuntimeDescriptorFactory runtimeDescriptorFactory() {
return runtimeDescriptorFactory;
}
+ @SuppressWarnings("rawtypes")
+ public EventConsumer eventConsumer() {
+ return eventConsumer;
+ }
+
public ExecutorService executorService() {
- synchronized (executorFactory) {
- if (executorService == null) {
- executorService = executorFactory.get();
- }
- }
- return executorService;
+ return executorFactory.get();
}
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java
index f45f1b84..96890c8b 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowContext.java
@@ -20,6 +20,7 @@
public class WorkflowContext {
private final WorkflowDefinition definition;
private final WorkflowInstance instance;
+ private JsonNode context;
WorkflowContext(WorkflowDefinition definition, WorkflowInstance instance) {
this.definition = definition;
@@ -31,11 +32,11 @@ public WorkflowInstance instance() {
}
public JsonNode context() {
- return instance.context();
+ return context;
}
public void context(JsonNode context) {
- this.instance.context(context);
+ this.context = context;
}
public WorkflowDefinition definition() {
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java
index df5b70e1..1a789616 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowDefinition.java
@@ -19,21 +19,15 @@
import io.serverlessworkflow.api.types.Input;
import io.serverlessworkflow.api.types.Output;
-import io.serverlessworkflow.api.types.TaskBase;
import io.serverlessworkflow.api.types.Workflow;
import io.serverlessworkflow.impl.executors.TaskExecutor;
-import io.serverlessworkflow.impl.executors.TaskExecutorFactory;
-import io.serverlessworkflow.impl.expressions.ExpressionFactory;
+import io.serverlessworkflow.impl.executors.TaskExecutorHelper;
import io.serverlessworkflow.impl.json.JsonUtils;
import io.serverlessworkflow.impl.jsonschema.SchemaValidator;
-import io.serverlessworkflow.impl.jsonschema.SchemaValidatorFactory;
import io.serverlessworkflow.impl.resources.ResourceLoader;
import java.nio.file.Path;
import java.util.Collection;
-import java.util.Map;
import java.util.Optional;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ExecutorService;
public class WorkflowDefinition implements AutoCloseable {
@@ -42,16 +36,13 @@ public class WorkflowDefinition implements AutoCloseable {
private Optional outputSchemaValidator = Optional.empty();
private Optional inputFilter = Optional.empty();
private Optional outputFilter = Optional.empty();
- private final Map> taskExecutors =
- new ConcurrentHashMap<>();
- private final ResourceLoader resourceLoader;
private final WorkflowApplication application;
+ private final TaskExecutor> taskExecutor;
private WorkflowDefinition(
WorkflowApplication application, Workflow workflow, ResourceLoader resourceLoader) {
this.workflow = workflow;
this.application = application;
- this.resourceLoader = resourceLoader;
if (workflow.getInput() != null) {
Input input = workflow.getInput();
this.inputSchemaValidator =
@@ -64,6 +55,13 @@ private WorkflowDefinition(
getSchemaValidator(application.validatorFactory(), resourceLoader, output.getSchema());
this.outputFilter = buildWorkflowFilter(application.expressionFactory(), output.getAs());
}
+ this.taskExecutor =
+ TaskExecutorHelper.createExecutorList(
+ application.positionFactory().get(),
+ workflow.getDo(),
+ workflow,
+ application,
+ resourceLoader);
}
static WorkflowDefinition of(WorkflowApplication application, Workflow workflow) {
@@ -75,15 +73,19 @@ static WorkflowDefinition of(WorkflowApplication application, Workflow workflow,
application, workflow, application.resourceLoaderFactory().getResourceLoader(path));
}
- public WorkflowInstance execute(Object input) {
+ public WorkflowInstance instance(Object input) {
return new WorkflowInstance(this, JsonUtils.fromValue(input));
}
- public Optional inputSchemaValidator() {
+ Optional inputSchemaValidator() {
return inputSchemaValidator;
}
- public Optional inputFilter() {
+ TaskExecutor> startTask() {
+ return taskExecutor;
+ }
+
+ Optional inputFilter() {
return inputFilter;
}
@@ -95,51 +97,22 @@ public Collection listeners() {
return application.listeners();
}
- public Map> taskExecutors() {
- return taskExecutors;
- }
-
- public TaskExecutorFactory taskFactory() {
- return application.taskFactory();
- }
-
- public Optional outputFilter() {
+ Optional outputFilter() {
return outputFilter;
}
- public WorkflowIdFactory idFactory() {
- return application.idFactory();
- }
-
- public Optional outputSchemaValidator() {
+ Optional outputSchemaValidator() {
return outputSchemaValidator;
}
- public ExpressionFactory expressionFactory() {
- return application.expressionFactory();
- }
-
- public SchemaValidatorFactory validatorFactory() {
- return application.validatorFactory();
- }
-
- public ResourceLoader resourceLoader() {
-
- return resourceLoader;
- }
-
- public WorkflowPositionFactory positionFactory() {
- return application.positionFactory();
- }
-
- public ExecutorService executorService() {
- return application.executorService();
- }
-
public RuntimeDescriptorFactory runtimeDescriptorFactory() {
return application.runtimeDescriptorFactory();
}
+ public WorkflowApplication application() {
+ return application;
+ }
+
@Override
public void close() {
// TODO close resourcers hold for uncompleted process instances, if any
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java
index 1823be94..b72cdbb0 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowError.java
@@ -26,13 +26,13 @@ public static Builder error(String type, int status) {
return new Builder(type, status);
}
- public static Builder communication(int status, TaskContext> context, Exception ex) {
+ public static Builder communication(int status, TaskContext context, Exception ex) {
return new Builder(COMM_TYPE, status)
.instance(context.position().jsonPointer())
.title(ex.getMessage());
}
- public static Builder runtime(int status, TaskContext> context, Exception ex) {
+ public static Builder runtime(int status, TaskContext context, Exception ex) {
return new Builder(RUNTIME_TYPE, status)
.instance(context.position().jsonPointer())
.title(ex.getMessage());
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java
index 7d25df48..4475cacd 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowFilter.java
@@ -19,5 +19,5 @@
@FunctionalInterface
public interface WorkflowFilter {
- JsonNode apply(WorkflowContext workflow, TaskContext> task, JsonNode node);
+ JsonNode apply(WorkflowContext workflow, TaskContext task, JsonNode node);
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java
index f81a6f24..2e55c484 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowInstance.java
@@ -15,42 +15,62 @@
*/
package io.serverlessworkflow.impl;
-import static io.serverlessworkflow.impl.json.JsonUtils.toJavaValue;
-
import com.fasterxml.jackson.databind.JsonNode;
-import com.fasterxml.jackson.databind.node.NullNode;
import io.serverlessworkflow.impl.executors.TaskExecutorHelper;
+import io.serverlessworkflow.impl.json.JsonUtils;
import java.time.Instant;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicReference;
public class WorkflowInstance {
private final AtomicReference status;
- private final TaskContext> taskContext;
private final String id;
private final JsonNode input;
- private final Instant startedAt;
- private final AtomicReference context;
+
+ private WorkflowContext workflowContext;
+ private WorkflowDefinition definition;
+ private Instant startedAt;
+ private Instant completedAt;
+ private volatile JsonNode output;
+ private CompletableFuture completableFuture;
WorkflowInstance(WorkflowDefinition definition, JsonNode input) {
- this.id = definition.idFactory().get();
+ this.id = definition.application().idFactory().get();
this.input = input;
+ this.definition = definition;
+ this.status = new AtomicReference<>(WorkflowStatus.PENDING);
definition.inputSchemaValidator().ifPresent(v -> v.validate(input));
+ }
+
+ public CompletableFuture start() {
this.startedAt = Instant.now();
- WorkflowContext workflowContext = new WorkflowContext(definition, this);
- taskContext = new TaskContext<>(input, definition.positionFactory().get());
- definition
- .inputFilter()
- .ifPresent(f -> taskContext.input(f.apply(workflowContext, taskContext, input)));
- status = new AtomicReference<>(WorkflowStatus.RUNNING);
- context = new AtomicReference<>(NullNode.getInstance());
- TaskExecutorHelper.processTaskList(definition.workflow().getDo(), workflowContext, taskContext);
- definition
- .outputFilter()
- .ifPresent(
- f ->
- taskContext.output(f.apply(workflowContext, taskContext, taskContext.rawOutput())));
- definition.outputSchemaValidator().ifPresent(v -> v.validate(taskContext.output()));
+ this.workflowContext = new WorkflowContext(definition, this);
+ this.status.set(WorkflowStatus.RUNNING);
+ this.completableFuture =
+ TaskExecutorHelper.processTaskList(
+ definition.startTask(),
+ workflowContext,
+ Optional.empty(),
+ definition
+ .inputFilter()
+ .map(f -> f.apply(workflowContext, null, input))
+ .orElse(input))
+ .thenApply(this::whenCompleted);
+ return completableFuture;
+ }
+
+ private JsonNode whenCompleted(JsonNode node) {
+ output =
+ workflowContext
+ .definition()
+ .outputFilter()
+ .map(f -> f.apply(workflowContext, null, node))
+ .orElse(node);
+ workflowContext.definition().outputSchemaValidator().ifPresent(v -> v.validate(output));
status.compareAndSet(WorkflowStatus.RUNNING, WorkflowStatus.COMPLETED);
+ completedAt = Instant.now();
+ return output;
}
public String id() {
@@ -61,12 +81,12 @@ public Instant startedAt() {
return startedAt;
}
- public JsonNode input() {
- return input;
+ public Instant completedAt() {
+ return completedAt;
}
- public JsonNode context() {
- return context.get();
+ public JsonNode input() {
+ return input;
}
public WorkflowStatus status() {
@@ -78,14 +98,10 @@ public void status(WorkflowStatus state) {
}
public Object output() {
- return toJavaValue(taskContext.output());
+ return JsonUtils.toJavaValue(outputAsJsonNode());
}
public JsonNode outputAsJsonNode() {
- return taskContext.output();
- }
-
- void context(JsonNode context) {
- this.context.set(context);
+ return output;
}
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java
index 0866ba05..5feaf04e 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/WorkflowUtils.java
@@ -24,6 +24,7 @@
import io.serverlessworkflow.api.types.SchemaExternal;
import io.serverlessworkflow.api.types.SchemaInline;
import io.serverlessworkflow.api.types.SchemaUnion;
+import io.serverlessworkflow.api.types.UriTemplate;
import io.serverlessworkflow.impl.expressions.Expression;
import io.serverlessworkflow.impl.expressions.ExpressionFactory;
import io.serverlessworkflow.impl.expressions.ExpressionUtils;
@@ -35,8 +36,10 @@
import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
+import java.net.URI;
import java.util.Map;
import java.util.Optional;
+import java.util.function.Function;
public class WorkflowUtils {
@@ -81,6 +84,25 @@ public static Optional buildWorkflowFilter(
: Optional.empty();
}
+ public static ExpressionHolder buildExpressionHolder(
+ ExpressionFactory exprFactory,
+ String expression,
+ T literal,
+ Function converter) {
+ return expression != null
+ ? buildExpressionHolder(buildWorkflowFilter(exprFactory, expression), converter)
+ : buildExpressionHolder(literal);
+ }
+
+ private static ExpressionHolder buildExpressionHolder(
+ WorkflowFilter filter, Function converter) {
+ return (w, t) -> converter.apply(filter.apply(w, t, t.input()));
+ }
+
+ private static ExpressionHolder buildExpressionHolder(T literal) {
+ return (w, t) -> literal;
+ }
+
public static Optional buildWorkflowFilter(
ExpressionFactory exprFactory, ExportAs as) {
return as != null
@@ -109,7 +131,7 @@ private static StringFilter toString(String literal) {
return (w, t) -> literal;
}
- private static WorkflowFilter buildWorkflowFilter(
+ public static WorkflowFilter buildWorkflowFilter(
ExpressionFactory exprFactory, String str, Object object) {
if (str != null) {
return buildWorkflowFilter(exprFactory, str);
@@ -148,4 +170,9 @@ public static WorkflowFilter buildWorkflowFilter(ExpressionFactory exprFactory,
public static Optional optionalFilter(ExpressionFactory exprFactory, String str) {
return str != null ? Optional.of(buildWorkflowFilter(exprFactory, str)) : Optional.empty();
}
+
+ public static String toString(UriTemplate template) {
+ URI uri = template.getLiteralUri();
+ return uri != null ? uri.toString() : template.getLiteralUriTemplate();
+ }
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/AbstractTypeConsumer.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/AbstractTypeConsumer.java
new file mode 100644
index 00000000..a3222342
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/AbstractTypeConsumer.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import io.cloudevents.CloudEvent;
+import io.serverlessworkflow.api.types.EventFilter;
+import io.serverlessworkflow.api.types.EventProperties;
+import io.serverlessworkflow.impl.WorkflowApplication;
+import java.util.AbstractCollection;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.function.Consumer;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class AbstractTypeConsumer
+ implements EventConsumer {
+
+ private static final Logger logger = LoggerFactory.getLogger(AbstractTypeConsumer.class);
+
+ protected abstract void registerToAll(Consumer consumer);
+
+ protected abstract void unregisterFromAll();
+
+ protected abstract void register(String topicName, Consumer consumer);
+
+ protected abstract void unregister(String topicName);
+
+ private Map registrations = new ConcurrentHashMap<>();
+
+ @Override
+ public TypeEventRegistrationBuilder listen(
+ EventFilter register, WorkflowApplication application) {
+ EventProperties properties = register.getWith();
+ String type = properties.getType();
+ return new TypeEventRegistrationBuilder(
+ type, new DefaultCloudEventPredicate(properties, application.expressionFactory()));
+ }
+
+ @Override
+ public Collection listenToAll(WorkflowApplication application) {
+ return List.of(new TypeEventRegistrationBuilder(null, null));
+ }
+
+ private static class CloudEventConsumer extends AbstractCollection
+ implements Consumer {
+ private Collection registrations = new CopyOnWriteArrayList<>();
+
+ @Override
+ public void accept(CloudEvent ce) {
+ logger.debug("Received cloud event {}", ce);
+ for (TypeEventRegistration registration : registrations) {
+ if (registration.predicate().test(ce)) {
+ registration.consumer().accept(ce);
+ }
+ }
+ }
+
+ @Override
+ public boolean add(TypeEventRegistration registration) {
+ return registrations.add(registration);
+ }
+
+ @Override
+ public boolean remove(Object registration) {
+ return registrations.remove(registration);
+ }
+
+ @Override
+ public Iterator iterator() {
+ return registrations.iterator();
+ }
+
+ @Override
+ public int size() {
+ return registrations.size();
+ }
+ }
+
+ public TypeEventRegistration register(
+ TypeEventRegistrationBuilder builder, Consumer ce) {
+ if (builder.type() == null) {
+ registerToAll(ce);
+ return new TypeEventRegistration(null, ce, null);
+ } else {
+ TypeEventRegistration registration =
+ new TypeEventRegistration(builder.type(), ce, builder.cePredicate());
+ registrations
+ .computeIfAbsent(
+ registration.type(),
+ k -> {
+ CloudEventConsumer consumer = new CloudEventConsumer();
+ register(k, consumer);
+ return consumer;
+ })
+ .add(registration);
+ return registration;
+ }
+ }
+
+ @Override
+ public void unregister(TypeEventRegistration registration) {
+ if (registration.type() == null) {
+ unregisterFromAll();
+ } else {
+ registrations.computeIfPresent(
+ registration.type(),
+ (k, v) -> {
+ v.remove(registration);
+ if (v.isEmpty()) {
+ unregister(registration.type());
+ return null;
+ } else {
+ return v;
+ }
+ });
+ }
+ }
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventAttrPredicate.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventAttrPredicate.java
new file mode 100644
index 00000000..6029d484
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventAttrPredicate.java
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+@FunctionalInterface
+public interface CloudEventAttrPredicate {
+ boolean test(T value);
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventPredicate.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventPredicate.java
new file mode 100644
index 00000000..a790e371
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventPredicate.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import io.cloudevents.CloudEvent;
+
+public interface CloudEventPredicate {
+ boolean test(CloudEvent event);
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventUtils.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventUtils.java
new file mode 100644
index 00000000..1b2709b8
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/CloudEventUtils.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.NullNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+import io.cloudevents.CloudEvent;
+import io.cloudevents.CloudEventData;
+import io.cloudevents.core.builder.CloudEventBuilder;
+import io.cloudevents.jackson.JsonCloudEventData;
+import io.serverlessworkflow.impl.json.JsonUtils;
+import java.io.IOException;
+import java.io.UncheckedIOException;
+import java.time.OffsetDateTime;
+import java.time.ZoneOffset;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.Map;
+
+public class CloudEventUtils {
+
+ public static JsonNode toJsonNode(CloudEvent event) {
+ ObjectNode result = JsonUtils.mapper().createObjectNode();
+ if (event.getData() != null) {
+ result.set("data", toJsonNode(event.getData()));
+ }
+ if (event.getSubject() != null) {
+ result.put("subject", event.getSubject());
+ }
+ if (event.getDataContentType() != null) {
+ result.put("datacontenttype", event.getDataContentType());
+ }
+ result.put("id", event.getId());
+ result.put("source", event.getSource().toString());
+ result.put("type", event.getType());
+ result.put("specversion", event.getSpecVersion().toString());
+ if (event.getDataSchema() != null) {
+ result.put("dataschema", event.getDataSchema().toString());
+ }
+ if (event.getTime() != null) {
+ result.put("time", event.getTime().toString());
+ }
+ event
+ .getExtensionNames()
+ .forEach(n -> result.set(n, JsonUtils.fromValue(event.getExtension(n))));
+ return result;
+ }
+
+ public static OffsetDateTime toOffset(Date date) {
+ return date.toInstant().atOffset(ZoneOffset.UTC);
+ }
+
+ public static CloudEventBuilder addExtension(
+ CloudEventBuilder builder, String name, JsonNode value) {
+ if (value.isTextual()) {
+ builder.withExtension(name, value.asText());
+ } else if (value.isBoolean()) {
+ builder.withExtension(name, value.isBoolean());
+ } else if (value.isNumber()) {
+ builder.withExtension(name, value.numberValue());
+ }
+ return builder;
+ }
+
+ public static JsonNode toJsonNode(CloudEventData data) {
+ if (data == null) {
+ return NullNode.instance;
+ }
+ try {
+ return data instanceof JsonCloudEventData
+ ? ((JsonCloudEventData) data).getNode()
+ : JsonUtils.mapper().readTree(data.toBytes());
+ } catch (IOException io) {
+ throw new UncheckedIOException(io);
+ }
+ }
+
+ public static Map extensions(CloudEvent event) {
+ Map result = new LinkedHashMap<>();
+ for (String name : event.getExtensionNames()) {
+ result.put(name, event.getExtension(name));
+ }
+ return result;
+ }
+
+ private CloudEventUtils() {}
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/DefaultCloudEventPredicate.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/DefaultCloudEventPredicate.java
new file mode 100644
index 00000000..6eb35995
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/DefaultCloudEventPredicate.java
@@ -0,0 +1,154 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import io.cloudevents.CloudEvent;
+import io.serverlessworkflow.api.types.EventData;
+import io.serverlessworkflow.api.types.EventDataschema;
+import io.serverlessworkflow.api.types.EventProperties;
+import io.serverlessworkflow.api.types.EventSource;
+import io.serverlessworkflow.api.types.EventTime;
+import io.serverlessworkflow.api.types.UriTemplate;
+import io.serverlessworkflow.impl.WorkflowFilter;
+import io.serverlessworkflow.impl.WorkflowUtils;
+import io.serverlessworkflow.impl.expressions.Expression;
+import io.serverlessworkflow.impl.expressions.ExpressionFactory;
+import io.serverlessworkflow.impl.json.JsonUtils;
+import java.net.URI;
+import java.time.OffsetDateTime;
+import java.util.Map;
+import java.util.Objects;
+
+public class DefaultCloudEventPredicate implements CloudEventPredicate {
+
+ private final CloudEventAttrPredicate idFilter;
+ private final CloudEventAttrPredicate sourceFilter;
+ private final CloudEventAttrPredicate subjectFilter;
+ private final CloudEventAttrPredicate contentTypeFilter;
+ private final CloudEventAttrPredicate typeFilter;
+ private final CloudEventAttrPredicate dataSchemaFilter;
+ private final CloudEventAttrPredicate timeFilter;
+ private final CloudEventAttrPredicate dataFilter;
+ private final CloudEventAttrPredicate additionalFilter;
+
+ private static final CloudEventAttrPredicate isTrue() {
+ return x -> true;
+ }
+
+ public DefaultCloudEventPredicate(EventProperties properties, ExpressionFactory exprFactory) {
+ idFilter = stringFilter(properties.getId());
+ subjectFilter = stringFilter(properties.getSubject());
+ typeFilter = stringFilter(properties.getType());
+ contentTypeFilter = stringFilter(properties.getDatacontenttype());
+ sourceFilter = sourceFilter(properties.getSource(), exprFactory);
+ dataSchemaFilter = dataSchemaFilter(properties.getDataschema(), exprFactory);
+ timeFilter = offsetTimeFilter(properties.getTime(), exprFactory);
+ dataFilter = dataFilter(properties.getData(), exprFactory);
+ additionalFilter = additionalFilter(properties.getAdditionalProperties(), exprFactory);
+ }
+
+ private CloudEventAttrPredicate additionalFilter(
+ Map additionalProperties, ExpressionFactory exprFactory) {
+ return additionalProperties != null && !additionalProperties.isEmpty()
+ ? from(WorkflowUtils.buildWorkflowFilter(exprFactory, null, additionalProperties))
+ : isTrue();
+ }
+
+ private CloudEventAttrPredicate from(WorkflowFilter filter) {
+ return d -> filter.apply(null, null, d).asBoolean();
+ }
+
+ private CloudEventAttrPredicate dataFilter(
+ EventData data, ExpressionFactory exprFactory) {
+ return data != null
+ ? from(
+ WorkflowUtils.buildWorkflowFilter(
+ exprFactory, data.getRuntimeExpression(), data.getObject()))
+ : isTrue();
+ }
+
+ private CloudEventAttrPredicate offsetTimeFilter(
+ EventTime time, ExpressionFactory exprFactory) {
+ if (time != null) {
+ if (time.getRuntimeExpression() != null) {
+ final Expression expr = exprFactory.getExpression(time.getRuntimeExpression());
+ return s -> evalExpr(expr, toString(s));
+ } else if (time.getLiteralTime() != null) {
+ return s -> Objects.equals(s, CloudEventUtils.toOffset(time.getLiteralTime()));
+ }
+ }
+ return isTrue();
+ }
+
+ private CloudEventAttrPredicate dataSchemaFilter(
+ EventDataschema dataSchema, ExpressionFactory exprFactory) {
+ if (dataSchema != null) {
+ if (dataSchema.getExpressionDataSchema() != null) {
+ final Expression expr = exprFactory.getExpression(dataSchema.getExpressionDataSchema());
+ return s -> evalExpr(expr, toString(s));
+ } else if (dataSchema.getLiteralDataSchema() != null) {
+ return templateFilter(dataSchema.getLiteralDataSchema());
+ }
+ }
+ return isTrue();
+ }
+
+ private CloudEventAttrPredicate stringFilter(String str) {
+ return str == null ? isTrue() : x -> x.equals(str);
+ }
+
+ private CloudEventAttrPredicate sourceFilter(
+ EventSource source, ExpressionFactory exprFactory) {
+ if (source != null) {
+ if (source.getRuntimeExpression() != null) {
+ final Expression expr = exprFactory.getExpression(source.getRuntimeExpression());
+ return s -> evalExpr(expr, toString(s));
+ } else if (source.getUriTemplate() != null) {
+ return templateFilter(source.getUriTemplate());
+ }
+ }
+ return isTrue();
+ }
+
+ private CloudEventAttrPredicate templateFilter(UriTemplate template) {
+ if (template.getLiteralUri() != null) {
+ return u -> Objects.equals(u, template.getLiteralUri());
+ }
+ throw new UnsupportedOperationException("Template not supporte here yet");
+ }
+
+ private String toString(T uri) {
+ return uri != null ? uri.toString() : null;
+ }
+
+ private boolean evalExpr(Expression expr, T value) {
+ return expr.eval(null, null, JsonUtils.fromValue(value)).asBoolean();
+ }
+
+ @Override
+ public boolean test(CloudEvent event) {
+ return idFilter.test(event.getId())
+ && sourceFilter.test(event.getSource())
+ && subjectFilter.test(event.getSubject())
+ && contentTypeFilter.test(event.getDataContentType())
+ && typeFilter.test(event.getType())
+ && dataSchemaFilter.test(event.getDataSchema())
+ && timeFilter.test(event.getTime())
+ && dataFilter.test(CloudEventUtils.toJsonNode(event.getData()))
+ && additionalFilter.test(JsonUtils.fromValue(CloudEventUtils.extensions(event)));
+ }
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventConsumer.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventConsumer.java
new file mode 100644
index 00000000..00c1619e
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventConsumer.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import io.cloudevents.CloudEvent;
+import io.serverlessworkflow.api.types.EventFilter;
+import io.serverlessworkflow.impl.WorkflowApplication;
+import java.util.Collection;
+import java.util.function.Consumer;
+
+public interface EventConsumer {
+
+ V listen(EventFilter filter, WorkflowApplication workflowApplication);
+
+ Collection listenToAll(WorkflowApplication workflowApplication);
+
+ T register(V builder, Consumer consumer);
+
+ void unregister(T register);
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventPublisher.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventPublisher.java
new file mode 100644
index 00000000..08cc121d
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventPublisher.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import io.cloudevents.CloudEvent;
+import java.util.concurrent.CompletableFuture;
+
+public interface EventPublisher {
+ CompletableFuture publish(CloudEvent event);
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistration.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistration.java
new file mode 100644
index 00000000..923647d5
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistration.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+public interface EventRegistration {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistrationBuilder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistrationBuilder.java
new file mode 100644
index 00000000..e81723ff
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/EventRegistrationBuilder.java
@@ -0,0 +1,18 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+public interface EventRegistrationBuilder {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/InMemoryEvents.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/InMemoryEvents.java
new file mode 100644
index 00000000..714d89d0
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/InMemoryEvents.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.events;
+
+import io.cloudevents.CloudEvent;
+import io.serverlessworkflow.impl.DefaultExecutorServiceFactory;
+import java.util.Map;
+import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicReference;
+import java.util.function.Consumer;
+
+/*
+ * Straightforward implementation of in memory event broker.
+ * User might invoke notifyCE to simulate event reception.
+ */
+public class InMemoryEvents extends AbstractTypeConsumer implements EventPublisher {
+
+ private static InMemoryEvents instance = new InMemoryEvents();
+
+ private InMemoryEvents() {}
+
+ public static InMemoryEvents get() {
+ return instance;
+ }
+
+ private Map> topicMap = new ConcurrentHashMap<>();
+
+ private AtomicReference> allConsumerRef = new AtomicReference<>();
+
+ @Override
+ protected void register(String topicName, Consumer consumer) {
+ topicMap.put(topicName, consumer);
+ }
+
+ @Override
+ protected void unregister(String topicName) {
+ topicMap.remove(topicName);
+ }
+
+ @Override
+ public CompletableFuture publish(CloudEvent ce) {
+ return CompletableFuture.runAsync(
+ () -> {
+ Consumer allConsumer = allConsumerRef.get();
+ if (allConsumer != null) {
+ allConsumer.accept(ce);
+ }
+ Consumer consumer = topicMap.get(ce.getType());
+ if (consumer != null) {
+ consumer.accept(ce);
+ }
+ },
+ DefaultExecutorServiceFactory.instance().get());
+ }
+
+ @Override
+ protected void registerToAll(Consumer consumer) {
+ allConsumerRef.set(consumer);
+ }
+
+ @Override
+ protected void unregisterFromAll() {
+ allConsumerRef.set(null);
+ }
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistration.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistration.java
new file mode 100644
index 00000000..8fdf2388
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistration.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.serverlessworkflow.impl.events;
+
+import io.cloudevents.CloudEvent;
+import java.util.function.Consumer;
+
+public record TypeEventRegistration(
+ String type, Consumer consumer, CloudEventPredicate predicate)
+ implements EventRegistration {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistrationBuilder.java b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistrationBuilder.java
new file mode 100644
index 00000000..bd504a76
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/events/TypeEventRegistrationBuilder.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.serverlessworkflow.impl.events;
+
+public record TypeEventRegistrationBuilder(String type, CloudEventPredicate cePredicate)
+ implements EventRegistrationBuilder {}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java
index f5ee1136..f51b7a01 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/AbstractTaskExecutor.java
@@ -19,98 +19,198 @@
import com.fasterxml.jackson.databind.JsonNode;
import io.serverlessworkflow.api.types.Export;
+import io.serverlessworkflow.api.types.FlowDirective;
import io.serverlessworkflow.api.types.Input;
import io.serverlessworkflow.api.types.Output;
import io.serverlessworkflow.api.types.TaskBase;
+import io.serverlessworkflow.api.types.Workflow;
import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
import io.serverlessworkflow.impl.WorkflowContext;
-import io.serverlessworkflow.impl.WorkflowDefinition;
import io.serverlessworkflow.impl.WorkflowFilter;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.WorkflowStatus;
import io.serverlessworkflow.impl.jsonschema.SchemaValidator;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
import java.time.Instant;
+import java.util.Iterator;
+import java.util.Map;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
public abstract class AbstractTaskExecutor implements TaskExecutor {
protected final T task;
+ protected final String taskName;
+ protected final WorkflowPosition position;
+ private final Optional inputProcessor;
+ private final Optional outputProcessor;
+ private final Optional contextProcessor;
+ private final Optional inputSchemaValidator;
+ private final Optional outputSchemaValidator;
+ private final Optional contextSchemaValidator;
- private Optional inputProcessor = Optional.empty();
- private Optional outputProcessor = Optional.empty();
- private Optional contextProcessor = Optional.empty();
- private Optional inputSchemaValidator = Optional.empty();
- private Optional outputSchemaValidator = Optional.empty();
- private Optional contextSchemaValidator = Optional.empty();
+ public abstract static class AbstractTaskExecutorBuilder
+ implements TaskExecutorBuilder {
+ private Optional inputProcessor = Optional.empty();
+ private Optional outputProcessor = Optional.empty();
+ private Optional contextProcessor = Optional.empty();
+ private Optional inputSchemaValidator = Optional.empty();
+ private Optional outputSchemaValidator = Optional.empty();
+ private Optional contextSchemaValidator = Optional.empty();
+ protected final WorkflowPosition position;
+ protected final T task;
+ protected final String taskName;
+ protected final WorkflowApplication application;
+ protected final Workflow workflow;
+ protected final ResourceLoader resourceLoader;
- protected AbstractTaskExecutor(T task, WorkflowDefinition definition) {
- this.task = task;
- buildInputProcessors(definition);
- buildOutputProcessors(definition);
- buildContextProcessors(definition);
- }
+ private TaskExecutor instance;
- private void buildInputProcessors(WorkflowDefinition definition) {
- if (task.getInput() != null) {
- Input input = task.getInput();
- this.inputProcessor = buildWorkflowFilter(definition.expressionFactory(), input.getFrom());
- this.inputSchemaValidator =
- getSchemaValidator(
- definition.validatorFactory(), definition.resourceLoader(), input.getSchema());
+ protected AbstractTaskExecutorBuilder(
+ WorkflowPosition position,
+ T task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
+ this.workflow = workflow;
+ this.taskName = position.last().toString();
+ this.position = position;
+ this.task = task;
+ this.application = application;
+ this.resourceLoader = resourceLoader;
+ if (task.getInput() != null) {
+ Input input = task.getInput();
+ this.inputProcessor = buildWorkflowFilter(application.expressionFactory(), input.getFrom());
+ this.inputSchemaValidator =
+ getSchemaValidator(application.validatorFactory(), resourceLoader, input.getSchema());
+ }
+ if (task.getOutput() != null) {
+ Output output = task.getOutput();
+ this.outputProcessor = buildWorkflowFilter(application.expressionFactory(), output.getAs());
+ this.outputSchemaValidator =
+ getSchemaValidator(application.validatorFactory(), resourceLoader, output.getSchema());
+ }
+ if (task.getExport() != null) {
+ Export export = task.getExport();
+ if (export.getAs() != null) {
+ this.contextProcessor =
+ buildWorkflowFilter(application.expressionFactory(), export.getAs());
+ }
+ this.contextSchemaValidator =
+ getSchemaValidator(application.validatorFactory(), resourceLoader, export.getSchema());
+ }
}
- }
- private void buildOutputProcessors(WorkflowDefinition definition) {
- if (task.getOutput() != null) {
- Output output = task.getOutput();
- this.outputProcessor = buildWorkflowFilter(definition.expressionFactory(), output.getAs());
- this.outputSchemaValidator =
- getSchemaValidator(
- definition.validatorFactory(), definition.resourceLoader(), output.getSchema());
+ protected final TransitionInfoBuilder next(
+ FlowDirective flowDirective, Map> connections) {
+ if (flowDirective == null) {
+ return TransitionInfoBuilder.of(next(connections));
+ }
+ if (flowDirective.getFlowDirectiveEnum() != null) {
+ switch (flowDirective.getFlowDirectiveEnum()) {
+ case CONTINUE:
+ return TransitionInfoBuilder.of(next(connections));
+ case END:
+ return TransitionInfoBuilder.end();
+ case EXIT:
+ return TransitionInfoBuilder.exit();
+ }
+ }
+ return TransitionInfoBuilder.of(connections.get(flowDirective.getString()));
}
- }
- private void buildContextProcessors(WorkflowDefinition definition) {
- if (task.getExport() != null) {
- Export export = task.getExport();
- if (export.getAs() != null) {
- this.contextProcessor = buildWorkflowFilter(definition.expressionFactory(), export.getAs());
+ private TaskExecutorBuilder> next(Map> connections) {
+ Iterator> iter = connections.values().iterator();
+ TaskExecutorBuilder> next = null;
+ while (iter.hasNext()) {
+ TaskExecutorBuilder> item = iter.next();
+ if (item == this) {
+ next = iter.hasNext() ? iter.next() : null;
+ break;
+ }
}
- this.contextSchemaValidator =
- getSchemaValidator(
- definition.validatorFactory(), definition.resourceLoader(), export.getSchema());
+ return next;
}
+
+ public TaskExecutor build() {
+ if (instance == null) {
+ instance = buildInstance();
+ }
+ return instance;
+ }
+
+ protected abstract TaskExecutor buildInstance();
}
- @Override
- public TaskContext apply(
- WorkflowContext workflowContext, TaskContext> parentContext, JsonNode input) {
- TaskContext taskContext = new TaskContext<>(input, parentContext, task);
- if (TaskExecutorHelper.isActive(workflowContext)) {
+ protected AbstractTaskExecutor(AbstractTaskExecutorBuilder builder) {
+ this.task = builder.task;
+ this.taskName = builder.taskName;
+ this.position = builder.position;
+ this.inputProcessor = builder.inputProcessor;
+ this.outputProcessor = builder.outputProcessor;
+ this.contextProcessor = builder.contextProcessor;
+ this.inputSchemaValidator = builder.inputSchemaValidator;
+ this.outputSchemaValidator = builder.outputSchemaValidator;
+ this.contextSchemaValidator = builder.contextSchemaValidator;
+ }
- workflowContext
- .definition()
- .listeners()
- .forEach(l -> l.onTaskStarted(parentContext.position(), task));
+ protected final CompletableFuture executeNext(
+ CompletableFuture future, WorkflowContext workflow) {
+ return future.thenCompose(
+ t -> {
+ TransitionInfo transition = t.transition();
+ if (transition.isEndNode()) {
+ workflow.instance().status(WorkflowStatus.COMPLETED);
+ } else if (transition.next() != null) {
+ return transition.next().apply(workflow, t.parent(), t.output());
+ }
+ return CompletableFuture.completedFuture(t);
+ });
+ }
- inputSchemaValidator.ifPresent(s -> s.validate(taskContext.rawInput()));
- inputProcessor.ifPresent(
- p -> taskContext.input(p.apply(workflowContext, taskContext, taskContext.rawInput())));
- internalExecute(workflowContext, taskContext);
- outputProcessor.ifPresent(
- p -> taskContext.output(p.apply(workflowContext, taskContext, taskContext.rawOutput())));
- outputSchemaValidator.ifPresent(s -> s.validate(taskContext.output()));
- contextProcessor.ifPresent(
- p ->
- workflowContext.context(
- p.apply(workflowContext, taskContext, workflowContext.context())));
- contextSchemaValidator.ifPresent(s -> s.validate(workflowContext.context()));
- taskContext.completedAt(Instant.now());
- workflowContext
- .definition()
- .listeners()
- .forEach(l -> l.onTaskEnded(parentContext.position(), task));
+ @Override
+ public CompletableFuture apply(
+ WorkflowContext workflowContext, Optional parentContext, JsonNode input) {
+ TaskContext taskContext = new TaskContext(input, position, parentContext, taskName, task);
+ CompletableFuture completable = CompletableFuture.completedFuture(taskContext);
+ if (!TaskExecutorHelper.isActive(workflowContext)) {
+ return completable;
}
- return taskContext;
+ return executeNext(
+ completable
+ .thenApply(
+ t -> {
+ workflowContext
+ .definition()
+ .listeners()
+ .forEach(l -> l.onTaskStarted(position, task));
+ inputSchemaValidator.ifPresent(s -> s.validate(t.rawInput()));
+ inputProcessor.ifPresent(
+ p -> taskContext.input(p.apply(workflowContext, t, t.rawInput())));
+ return t;
+ })
+ .thenCompose(t -> execute(workflowContext, t))
+ .thenApply(
+ t -> {
+ outputProcessor.ifPresent(
+ p -> t.output(p.apply(workflowContext, t, t.rawOutput())));
+ outputSchemaValidator.ifPresent(s -> s.validate(t.output()));
+ contextProcessor.ifPresent(
+ p ->
+ workflowContext.context(
+ p.apply(workflowContext, t, workflowContext.context())));
+ contextSchemaValidator.ifPresent(s -> s.validate(workflowContext.context()));
+ t.completedAt(Instant.now());
+ workflowContext
+ .definition()
+ .listeners()
+ .forEach(l -> l.onTaskEnded(position, task));
+ return t;
+ }),
+ workflowContext);
}
- protected abstract void internalExecute(WorkflowContext workflow, TaskContext taskContext);
+ protected abstract CompletableFuture execute(
+ WorkflowContext workflow, TaskContext taskContext);
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java
index 535057fa..2a3d1ae9 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallTaskExecutor.java
@@ -15,23 +15,51 @@
*/
package io.serverlessworkflow.impl.executors;
+import com.fasterxml.jackson.databind.JsonNode;
import io.serverlessworkflow.api.types.TaskBase;
+import io.serverlessworkflow.api.types.Workflow;
import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
import io.serverlessworkflow.impl.WorkflowContext;
-import io.serverlessworkflow.impl.WorkflowDefinition;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
+import java.util.concurrent.CompletableFuture;
-public class CallTaskExecutor extends AbstractTaskExecutor {
+public class CallTaskExecutor extends RegularTaskExecutor {
private final CallableTask callable;
- protected CallTaskExecutor(T task, WorkflowDefinition definition, CallableTask callable) {
- super(task, definition);
- this.callable = callable;
- callable.init(task, definition);
+ public static class CallTaskExecutorBuilder
+ extends RegularTaskExecutorBuilder {
+ private CallableTask callable;
+
+ protected CallTaskExecutorBuilder(
+ WorkflowPosition position,
+ T task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader,
+ CallableTask callable) {
+ super(position, task, workflow, application, resourceLoader);
+ this.callable = callable;
+ callable.init(task, application, resourceLoader);
+ }
+
+ @Override
+ public TaskExecutor buildInstance() {
+ return new CallTaskExecutor(this);
+ }
+ }
+
+ protected CallTaskExecutor(CallTaskExecutorBuilder builder) {
+ super(builder);
+ this.callable = builder.callable;
}
@Override
- protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) {
- taskContext.rawOutput(callable.apply(workflow, taskContext, taskContext.input()));
+ protected CompletableFuture internalExecute(
+ WorkflowContext workflow, TaskContext taskContext) {
+ return callable.apply(workflow, taskContext, taskContext.input());
}
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java
index ffb94912..ecff0662 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/CallableTask.java
@@ -18,13 +18,16 @@
import com.fasterxml.jackson.databind.JsonNode;
import io.serverlessworkflow.api.types.TaskBase;
import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
import io.serverlessworkflow.impl.WorkflowContext;
-import io.serverlessworkflow.impl.WorkflowDefinition;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
+import java.util.concurrent.CompletableFuture;
public interface CallableTask {
- void init(T task, WorkflowDefinition definition);
+ void init(T task, WorkflowApplication application, ResourceLoader loader);
- JsonNode apply(WorkflowContext workflowContext, TaskContext taskContext, JsonNode input);
+ CompletableFuture apply(
+ WorkflowContext workflowContext, TaskContext taskContext, JsonNode input);
boolean accept(Class extends TaskBase> clazz);
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java
index e7dd07db..0499fced 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DefaultTaskExecutorFactory.java
@@ -23,7 +23,21 @@
import io.serverlessworkflow.api.types.CallTask;
import io.serverlessworkflow.api.types.Task;
import io.serverlessworkflow.api.types.TaskBase;
-import io.serverlessworkflow.impl.WorkflowDefinition;
+import io.serverlessworkflow.api.types.Workflow;
+import io.serverlessworkflow.impl.WorkflowApplication;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.executors.CallTaskExecutor.CallTaskExecutorBuilder;
+import io.serverlessworkflow.impl.executors.DoExecutor.DoExecutorBuilder;
+import io.serverlessworkflow.impl.executors.EmitExecutor.EmitExecutorBuilder;
+import io.serverlessworkflow.impl.executors.ForExecutor.ForExecutorBuilder;
+import io.serverlessworkflow.impl.executors.ForkExecutor.ForkExecutorBuilder;
+import io.serverlessworkflow.impl.executors.ListenExecutor.ListenExecutorBuilder;
+import io.serverlessworkflow.impl.executors.RaiseExecutor.RaiseExecutorBuilder;
+import io.serverlessworkflow.impl.executors.SetExecutor.SetExecutorBuilder;
+import io.serverlessworkflow.impl.executors.SwitchExecutor.SwitchExecutorBuilder;
+import io.serverlessworkflow.impl.executors.TryExecutor.TryExecutorBuilder;
+import io.serverlessworkflow.impl.executors.WaitExecutor.WaitExecutorBuilder;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
import java.util.ServiceLoader;
import java.util.ServiceLoader.Provider;
@@ -39,42 +53,86 @@ protected DefaultTaskExecutorFactory() {}
private ServiceLoader callTasks = ServiceLoader.load(CallableTask.class);
- public TaskExecutor extends TaskBase> getTaskExecutor(
- Task task, WorkflowDefinition definition) {
+ @Override
+ public TaskExecutorBuilder extends TaskBase> getTaskExecutor(
+ WorkflowPosition position,
+ Task task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
if (task.getCallTask() != null) {
CallTask callTask = task.getCallTask();
if (callTask.getCallHTTP() != null) {
- return new CallTaskExecutor<>(
- callTask.getCallHTTP(), definition, findCallTask(CallHTTP.class));
+ return new CallTaskExecutorBuilder<>(
+ position,
+ callTask.getCallHTTP(),
+ workflow,
+ application,
+ resourceLoader,
+ findCallTask(CallHTTP.class));
} else if (callTask.getCallAsyncAPI() != null) {
- return new CallTaskExecutor<>(
- callTask.getCallAsyncAPI(), definition, findCallTask(CallAsyncAPI.class));
+ return new CallTaskExecutorBuilder<>(
+ position,
+ callTask.getCallAsyncAPI(),
+ workflow,
+ application,
+ resourceLoader,
+ findCallTask(CallAsyncAPI.class));
} else if (callTask.getCallGRPC() != null) {
- return new CallTaskExecutor<>(
- callTask.getCallGRPC(), definition, findCallTask(CallGRPC.class));
+ return new CallTaskExecutorBuilder<>(
+ position,
+ callTask.getCallGRPC(),
+ workflow,
+ application,
+ resourceLoader,
+ findCallTask(CallGRPC.class));
} else if (callTask.getCallOpenAPI() != null) {
- return new CallTaskExecutor<>(
- callTask.getCallOpenAPI(), definition, findCallTask(CallOpenAPI.class));
+ return new CallTaskExecutorBuilder<>(
+ position,
+ callTask.getCallOpenAPI(),
+ workflow,
+ application,
+ resourceLoader,
+ findCallTask(CallOpenAPI.class));
} else if (callTask.getCallFunction() != null) {
- return new CallTaskExecutor<>(
- callTask.getCallFunction(), definition, findCallTask(CallFunction.class));
+ return new CallTaskExecutorBuilder<>(
+ position,
+ callTask.getCallFunction(),
+ workflow,
+ application,
+ resourceLoader,
+ findCallTask(CallFunction.class));
}
} else if (task.getSwitchTask() != null) {
- return new SwitchExecutor(task.getSwitchTask(), definition);
+ return new SwitchExecutorBuilder(
+ position, task.getSwitchTask(), workflow, application, resourceLoader);
} else if (task.getDoTask() != null) {
- return new DoExecutor(task.getDoTask(), definition);
+ return new DoExecutorBuilder(
+ position, task.getDoTask(), workflow, application, resourceLoader);
} else if (task.getSetTask() != null) {
- return new SetExecutor(task.getSetTask(), definition);
+ return new SetExecutorBuilder(
+ position, task.getSetTask(), workflow, application, resourceLoader);
} else if (task.getForTask() != null) {
- return new ForExecutor(task.getForTask(), definition);
+ return new ForExecutorBuilder(
+ position, task.getForTask(), workflow, application, resourceLoader);
} else if (task.getRaiseTask() != null) {
- return new RaiseExecutor(task.getRaiseTask(), definition);
+ return new RaiseExecutorBuilder(
+ position, task.getRaiseTask(), workflow, application, resourceLoader);
} else if (task.getTryTask() != null) {
- return new TryExecutor(task.getTryTask(), definition);
+ return new TryExecutorBuilder(
+ position, task.getTryTask(), workflow, application, resourceLoader);
} else if (task.getForkTask() != null) {
- return new ForkExecutor(task.getForkTask(), definition);
+ return new ForkExecutorBuilder(
+ position, task.getForkTask(), workflow, application, resourceLoader);
} else if (task.getWaitTask() != null) {
- return new WaitExecutor(task.getWaitTask(), definition);
+ return new WaitExecutorBuilder(
+ position, task.getWaitTask(), workflow, application, resourceLoader);
+ } else if (task.getListenTask() != null) {
+ return new ListenExecutorBuilder(
+ position, task.getListenTask(), workflow, application, resourceLoader);
+ } else if (task.getEmitTask() != null) {
+ return new EmitExecutorBuilder(
+ position, task.getEmitTask(), workflow, application, resourceLoader);
}
throw new UnsupportedOperationException(task.get().getClass().getName() + " not supported yet");
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java
index c5dbc4fd..a35e4a87 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/DoExecutor.java
@@ -15,19 +15,51 @@
*/
package io.serverlessworkflow.impl.executors;
+import com.fasterxml.jackson.databind.JsonNode;
import io.serverlessworkflow.api.types.DoTask;
+import io.serverlessworkflow.api.types.Workflow;
import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
import io.serverlessworkflow.impl.WorkflowContext;
-import io.serverlessworkflow.impl.WorkflowDefinition;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
-public class DoExecutor extends AbstractTaskExecutor {
+public class DoExecutor extends RegularTaskExecutor {
- protected DoExecutor(DoTask task, WorkflowDefinition definition) {
- super(task, definition);
+ private final TaskExecutor> taskExecutor;
+
+ public static class DoExecutorBuilder extends RegularTaskExecutorBuilder {
+ private TaskExecutor> taskExecutor;
+
+ protected DoExecutorBuilder(
+ WorkflowPosition position,
+ DoTask task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
+ super(position, task, workflow, application, resourceLoader);
+ taskExecutor =
+ TaskExecutorHelper.createExecutorList(
+ position, task.getDo(), workflow, application, resourceLoader);
+ }
+
+ @Override
+ public TaskExecutor buildInstance() {
+ return new DoExecutor(this);
+ }
+ }
+
+ private DoExecutor(DoExecutorBuilder builder) {
+ super(builder);
+ this.taskExecutor = builder.taskExecutor;
}
@Override
- protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) {
- TaskExecutorHelper.processTaskList(task.getDo(), workflow, taskContext);
+ protected CompletableFuture internalExecute(
+ WorkflowContext workflow, TaskContext taskContext) {
+ return TaskExecutorHelper.processTaskList(
+ taskExecutor, workflow, Optional.of(taskContext), taskContext.input());
}
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/EmitExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/EmitExecutor.java
new file mode 100644
index 00000000..7a8eb09d
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/EmitExecutor.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.executors;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import io.cloudevents.CloudEvent;
+import io.cloudevents.core.builder.CloudEventBuilder;
+import io.cloudevents.jackson.JsonCloudEventData;
+import io.serverlessworkflow.api.types.EmitTask;
+import io.serverlessworkflow.api.types.EventData;
+import io.serverlessworkflow.api.types.EventDataschema;
+import io.serverlessworkflow.api.types.EventProperties;
+import io.serverlessworkflow.api.types.EventSource;
+import io.serverlessworkflow.api.types.EventTime;
+import io.serverlessworkflow.api.types.Workflow;
+import io.serverlessworkflow.impl.ExpressionHolder;
+import io.serverlessworkflow.impl.StringFilter;
+import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
+import io.serverlessworkflow.impl.WorkflowContext;
+import io.serverlessworkflow.impl.WorkflowFilter;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.WorkflowUtils;
+import io.serverlessworkflow.impl.events.CloudEventUtils;
+import io.serverlessworkflow.impl.expressions.ExpressionFactory;
+import io.serverlessworkflow.impl.json.JsonUtils;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
+import java.net.URI;
+import java.time.OffsetDateTime;
+import java.util.Map;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.concurrent.CompletableFuture;
+
+public class EmitExecutor extends RegularTaskExecutor {
+
+ private final EventPropertiesBuilder props;
+
+ public static class EmitExecutorBuilder extends RegularTaskExecutorBuilder {
+
+ private EventPropertiesBuilder eventBuilder;
+
+ protected EmitExecutorBuilder(
+ WorkflowPosition position,
+ EmitTask task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
+ super(position, task, workflow, application, resourceLoader);
+ this.eventBuilder =
+ EventPropertiesBuilder.build(
+ task.getEmit().getEvent().getWith(), application.expressionFactory());
+ }
+
+ @Override
+ public TaskExecutor buildInstance() {
+ return new EmitExecutor(this);
+ }
+ }
+
+ private EmitExecutor(EmitExecutorBuilder builder) {
+ super(builder);
+ this.props = builder.eventBuilder;
+ }
+
+ @Override
+ protected CompletableFuture internalExecute(
+ WorkflowContext workflow, TaskContext taskContext) {
+ return workflow
+ .definition()
+ .application()
+ .eventPublisher()
+ .publish(buildCloudEvent(workflow, taskContext))
+ .thenApply(v -> taskContext.input());
+ }
+
+ private CloudEvent buildCloudEvent(WorkflowContext workflow, TaskContext taskContext) {
+ io.cloudevents.core.v1.CloudEventBuilder ceBuilder = CloudEventBuilder.v1();
+ ceBuilder.withId(
+ props
+ .idFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .orElse(UUID.randomUUID().toString()));
+ ceBuilder.withSource(
+ props
+ .sourceFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .map(URI::create)
+ .orElse(URI.create("reference-impl")));
+ ceBuilder.withType(
+ props
+ .typeFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .orElseThrow(
+ () -> new IllegalArgumentException("Type is required for emitting events")));
+ props
+ .timeFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .ifPresent(value -> ceBuilder.withTime(value));
+ props
+ .subjectFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .ifPresent(value -> ceBuilder.withSubject(value));
+ props
+ .dataSchemaFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .ifPresent(value -> ceBuilder.withDataSchema(URI.create(value)));
+ props
+ .contentTypeFilter()
+ .map(filter -> filter.apply(workflow, taskContext))
+ .ifPresent(value -> ceBuilder.withDataContentType(value));
+ props
+ .dataFilter()
+ .map(filter -> filter.apply(workflow, taskContext, taskContext.input()))
+ .ifPresent(value -> ceBuilder.withData(JsonCloudEventData.wrap(value)));
+ props
+ .additionalFilter()
+ .map(filter -> filter.apply(workflow, taskContext, taskContext.input()))
+ .ifPresent(
+ value ->
+ value
+ .fields()
+ .forEachRemaining(
+ e -> CloudEventUtils.addExtension(ceBuilder, e.getKey(), e.getValue())));
+ return ceBuilder.build();
+ }
+
+ private static record EventPropertiesBuilder(
+ Optional idFilter,
+ Optional sourceFilter,
+ Optional subjectFilter,
+ Optional contentTypeFilter,
+ Optional typeFilter,
+ Optional dataSchemaFilter,
+ Optional> timeFilter,
+ Optional dataFilter,
+ Optional additionalFilter) {
+
+ public static EventPropertiesBuilder build(
+ EventProperties properties, ExpressionFactory exprFactory) {
+ Optional idFilter = buildFilter(exprFactory, properties.getId());
+ EventSource source = properties.getSource();
+ Optional sourceFilter =
+ source == null
+ ? Optional.empty()
+ : Optional.of(
+ WorkflowUtils.buildStringFilter(
+ exprFactory,
+ source.getRuntimeExpression(),
+ WorkflowUtils.toString(source.getUriTemplate())));
+ Optional subjectFilter = buildFilter(exprFactory, properties.getSubject());
+ Optional contentTypeFilter =
+ buildFilter(exprFactory, properties.getDatacontenttype());
+ Optional typeFilter = buildFilter(exprFactory, properties.getType());
+ EventDataschema dataSchema = properties.getDataschema();
+ Optional dataSchemaFilter =
+ dataSchema == null
+ ? Optional.empty()
+ : Optional.of(
+ WorkflowUtils.buildStringFilter(
+ exprFactory,
+ dataSchema.getExpressionDataSchema(),
+ WorkflowUtils.toString(dataSchema.getLiteralDataSchema())));
+ EventTime time = properties.getTime();
+ Optional> timeFilter =
+ time == null
+ ? Optional.empty()
+ : Optional.of(
+ WorkflowUtils.buildExpressionHolder(
+ exprFactory,
+ time.getRuntimeExpression(),
+ CloudEventUtils.toOffset(time.getLiteralTime()),
+ JsonUtils::toOffsetDateTime));
+ EventData data = properties.getData();
+ Optional dataFilter =
+ properties.getData() == null
+ ? Optional.empty()
+ : Optional.of(
+ WorkflowUtils.buildWorkflowFilter(
+ exprFactory, data.getRuntimeExpression(), data.getObject()));
+ Map ceAttrs = properties.getAdditionalProperties();
+ Optional additionalFilter =
+ ceAttrs == null || ceAttrs.isEmpty()
+ ? Optional.empty()
+ : Optional.of(WorkflowUtils.buildWorkflowFilter(exprFactory, null, ceAttrs));
+ return new EventPropertiesBuilder(
+ idFilter,
+ sourceFilter,
+ subjectFilter,
+ contentTypeFilter,
+ typeFilter,
+ dataSchemaFilter,
+ timeFilter,
+ dataFilter,
+ additionalFilter);
+ }
+
+ private static Optional buildFilter(ExpressionFactory exprFactory, String str) {
+ return str == null
+ ? Optional.empty()
+ : Optional.of(WorkflowUtils.buildStringFilter(exprFactory, str));
+ }
+ }
+}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java
index cb4ecec0..8f7e04f1 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForExecutor.java
@@ -18,32 +18,67 @@
import com.fasterxml.jackson.databind.JsonNode;
import io.serverlessworkflow.api.types.ForTask;
import io.serverlessworkflow.api.types.ForTaskConfiguration;
+import io.serverlessworkflow.api.types.Workflow;
import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
import io.serverlessworkflow.impl.WorkflowContext;
-import io.serverlessworkflow.impl.WorkflowDefinition;
import io.serverlessworkflow.impl.WorkflowFilter;
+import io.serverlessworkflow.impl.WorkflowPosition;
import io.serverlessworkflow.impl.WorkflowUtils;
+import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
import java.util.Iterator;
import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
-public class ForExecutor extends AbstractTaskExecutor {
+public class ForExecutor extends RegularTaskExecutor {
private final WorkflowFilter collectionExpr;
private final Optional whileExpr;
+ private final TaskExecutor> taskExecutor;
- protected ForExecutor(ForTask task, WorkflowDefinition definition) {
- super(task, definition);
- ForTaskConfiguration forConfig = task.getFor();
- this.collectionExpr =
- WorkflowUtils.buildWorkflowFilter(definition.expressionFactory(), forConfig.getIn());
- this.whileExpr = WorkflowUtils.optionalFilter(definition.expressionFactory(), task.getWhile());
+ public static class ForExecutorBuilder extends RegularTaskExecutorBuilder {
+ private WorkflowFilter collectionExpr;
+ private Optional whileExpr;
+ private TaskExecutor> taskExecutor;
+
+ protected ForExecutorBuilder(
+ WorkflowPosition position,
+ ForTask task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
+ super(position, task, workflow, application, resourceLoader);
+ ForTaskConfiguration forConfig = task.getFor();
+ this.collectionExpr =
+ WorkflowUtils.buildWorkflowFilter(application.expressionFactory(), forConfig.getIn());
+ this.whileExpr =
+ WorkflowUtils.optionalFilter(application.expressionFactory(), task.getWhile());
+ this.taskExecutor =
+ TaskExecutorHelper.createExecutorList(
+ position, task.getDo(), workflow, application, resourceLoader);
+ }
+
+ @Override
+ public TaskExecutor buildInstance() {
+ return new ForExecutor(this);
+ }
+ }
+
+ protected ForExecutor(ForExecutorBuilder builder) {
+ super(builder);
+ this.collectionExpr = builder.collectionExpr;
+ this.whileExpr = builder.whileExpr;
+ this.taskExecutor = builder.taskExecutor;
}
@Override
- protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) {
+ protected CompletableFuture internalExecute(
+ WorkflowContext workflow, TaskContext taskContext) {
Iterator iter =
collectionExpr.apply(workflow, taskContext, taskContext.input()).iterator();
int i = 0;
+ CompletableFuture future = CompletableFuture.completedFuture(taskContext.input());
while (iter.hasNext()
&& whileExpr
.map(w -> w.apply(workflow, taskContext, taskContext.rawOutput()))
@@ -52,7 +87,12 @@ protected void internalExecute(WorkflowContext workflow, TaskContext ta
JsonNode item = iter.next();
taskContext.variables().put(task.getFor().getEach(), item);
taskContext.variables().put(task.getFor().getAt(), i++);
- TaskExecutorHelper.processTaskList(task.getDo(), workflow, taskContext);
+ future =
+ future.thenCompose(
+ input ->
+ TaskExecutorHelper.processTaskList(
+ taskExecutor, workflow, Optional.of(taskContext), input));
}
+ return future;
}
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java
index e0ce3b02..85bd3f22 100644
--- a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ForkExecutor.java
@@ -16,95 +16,95 @@
package io.serverlessworkflow.impl.executors;
import com.fasterxml.jackson.databind.JsonNode;
-import io.serverlessworkflow.api.types.FlowDirectiveEnum;
import io.serverlessworkflow.api.types.ForkTask;
import io.serverlessworkflow.api.types.ForkTaskConfiguration;
-import io.serverlessworkflow.api.types.TaskItem;
+import io.serverlessworkflow.api.types.Workflow;
import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
import io.serverlessworkflow.impl.WorkflowContext;
-import io.serverlessworkflow.impl.WorkflowDefinition;
-import io.serverlessworkflow.impl.WorkflowStatus;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.executors.RegularTaskExecutor.RegularTaskExecutorBuilder;
import io.serverlessworkflow.impl.json.JsonUtils;
-import java.lang.reflect.UndeclaredThrowableException;
-import java.util.ArrayList;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
import java.util.HashMap;
-import java.util.List;
import java.util.Map;
-import java.util.concurrent.ExecutionException;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
+import java.util.stream.Collectors;
import java.util.stream.Stream;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-public class ForkExecutor extends AbstractTaskExecutor {
+public class ForkExecutor extends RegularTaskExecutor {
- private static final Logger logger = LoggerFactory.getLogger(ForkExecutor.class);
private final ExecutorService service;
+ private final Map> taskExecutors;
+ private final boolean compete;
- protected ForkExecutor(ForkTask task, WorkflowDefinition definition) {
- super(task, definition);
- service = definition.executorService();
- }
+ public static class ForkExecutorBuilder extends RegularTaskExecutorBuilder {
- @Override
- protected void internalExecute(WorkflowContext workflow, TaskContext taskContext) {
- ForkTaskConfiguration forkConfig = task.getFork();
+ private final Map> taskExecutors;
+ private final boolean compete;
- if (!forkConfig.getBranches().isEmpty()) {
- Map>> futures = new HashMap<>();
- int index = 0;
- for (TaskItem item : forkConfig.getBranches()) {
- final int i = index++;
- futures.put(
- item.getName(),
- service.submit(() -> executeBranch(workflow, taskContext.copy(), item, i)));
- }
- List>> results = new ArrayList<>();
- for (Map.Entry>> entry : futures.entrySet()) {
- try {
- results.add(Map.entry(entry.getKey(), entry.getValue().get()));
- } catch (ExecutionException ex) {
- Throwable cause = ex.getCause();
- if (cause instanceof RuntimeException) {
- throw (RuntimeException) cause;
- } else {
- throw new UndeclaredThrowableException(ex);
- }
- } catch (InterruptedException ex) {
- logger.warn("Branch {} was interrupted, no result will be recorded", entry.getKey(), ex);
- }
- }
- if (!results.isEmpty()) {
- Stream>> sortedStream =
- results.stream()
- .sorted(
- (arg1, arg2) ->
- arg1.getValue().completedAt().compareTo(arg2.getValue().completedAt()));
- taskContext.rawOutput(
- forkConfig.isCompete()
- ? sortedStream.map(e -> e.getValue().output()).findFirst().orElseThrow()
- : sortedStream
- .map(
- e ->
- JsonUtils.mapper()
- .createObjectNode()
- .set(e.getKey(), e.getValue().output()))
- .collect(JsonUtils.arrayNodeCollector()));
- }
+ protected ForkExecutorBuilder(
+ WorkflowPosition position,
+ ForkTask task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
+ super(position, task, workflow, application, resourceLoader);
+ ForkTaskConfiguration forkConfig = task.getFork();
+ this.taskExecutors =
+ TaskExecutorHelper.createBranchList(
+ position, forkConfig.getBranches(), workflow, application, resourceLoader);
+ this.compete = forkConfig.isCompete();
}
+
+ @Override
+ public TaskExecutor buildInstance() {
+ return new ForkExecutor(this);
+ }
+ }
+
+ protected ForkExecutor(ForkExecutorBuilder builder) {
+ super(builder);
+ service = builder.application.executorService();
+ this.taskExecutors = builder.taskExecutors;
+ this.compete = builder.compete;
}
- private TaskContext> executeBranch(
- WorkflowContext workflow, TaskContext taskContext, TaskItem taskItem, int index) {
- taskContext.position().addIndex(index);
- TaskContext> result =
- TaskExecutorHelper.executeTask(workflow, taskContext, taskItem, taskContext.input());
- if (result.flowDirective() != null
- && result.flowDirective().getFlowDirectiveEnum() == FlowDirectiveEnum.END) {
- workflow.instance().status(WorkflowStatus.COMPLETED);
+ @Override
+ protected CompletableFuture internalExecute(
+ WorkflowContext workflow, TaskContext taskContext) {
+ Map> futures = new HashMap<>();
+ CompletableFuture initial = CompletableFuture.completedFuture(taskContext);
+ for (Map.Entry> entry : taskExecutors.entrySet()) {
+ futures.put(
+ entry.getKey(),
+ initial.thenComposeAsync(
+ t -> entry.getValue().apply(workflow, Optional.of(t), t.input()), service));
}
- taskContext.position().back();
- return result;
+ return CompletableFuture.allOf(
+ futures.values().toArray(new CompletableFuture>[futures.size()]))
+ .thenApply(
+ i ->
+ combine(
+ futures.entrySet().stream()
+ .collect(Collectors.toMap(Entry::getKey, e -> e.getValue().join()))));
+ }
+
+ private JsonNode combine(Map futures) {
+
+ Stream> sortedStream =
+ futures.entrySet().stream()
+ .sorted(
+ (arg1, arg2) ->
+ arg1.getValue().completedAt().compareTo(arg2.getValue().completedAt()));
+ return compete
+ ? sortedStream.map(e -> e.getValue().output()).findFirst().orElseThrow()
+ : sortedStream
+ .map(
+ e -> JsonUtils.mapper().createObjectNode().set(e.getKey(), e.getValue().output()))
+ .collect(JsonUtils.arrayNodeCollector());
}
}
diff --git a/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ListenExecutor.java b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ListenExecutor.java
new file mode 100644
index 00000000..e351bae2
--- /dev/null
+++ b/impl/core/src/main/java/io/serverlessworkflow/impl/executors/ListenExecutor.java
@@ -0,0 +1,314 @@
+/*
+ * Copyright 2020-Present The Serverless Workflow Specification Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package io.serverlessworkflow.impl.executors;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import io.cloudevents.CloudEvent;
+import io.serverlessworkflow.api.types.AllEventConsumptionStrategy;
+import io.serverlessworkflow.api.types.AnyEventConsumptionStrategy;
+import io.serverlessworkflow.api.types.EventConsumptionStrategy;
+import io.serverlessworkflow.api.types.EventFilter;
+import io.serverlessworkflow.api.types.ListenTask;
+import io.serverlessworkflow.api.types.ListenTaskConfiguration;
+import io.serverlessworkflow.api.types.ListenTaskConfiguration.ListenAndReadAs;
+import io.serverlessworkflow.api.types.ListenTo;
+import io.serverlessworkflow.api.types.OneEventConsumptionStrategy;
+import io.serverlessworkflow.api.types.SubscriptionIterator;
+import io.serverlessworkflow.api.types.Until;
+import io.serverlessworkflow.api.types.Workflow;
+import io.serverlessworkflow.impl.TaskContext;
+import io.serverlessworkflow.impl.WorkflowApplication;
+import io.serverlessworkflow.impl.WorkflowContext;
+import io.serverlessworkflow.impl.WorkflowFilter;
+import io.serverlessworkflow.impl.WorkflowPosition;
+import io.serverlessworkflow.impl.WorkflowStatus;
+import io.serverlessworkflow.impl.WorkflowUtils;
+import io.serverlessworkflow.impl.events.CloudEventUtils;
+import io.serverlessworkflow.impl.events.EventConsumer;
+import io.serverlessworkflow.impl.events.EventRegistration;
+import io.serverlessworkflow.impl.events.EventRegistrationBuilder;
+import io.serverlessworkflow.impl.json.JsonUtils;
+import io.serverlessworkflow.impl.resources.ResourceLoader;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+import java.util.function.BiConsumer;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+public abstract class ListenExecutor extends RegularTaskExecutor {
+
+ protected final EventRegistrationBuilderCollection regBuilders;
+ protected final Optional> loop;
+ protected final Function converter;
+ protected final EventConsumer eventConsumer;
+
+ private static record EventRegistrationBuilderCollection(
+ Collection registrations, boolean isAnd) {}
+
+ public static class ListenExecutorBuilder extends RegularTaskExecutorBuilder {
+
+ private EventRegistrationBuilderCollection registrations;
+ private WorkflowFilter until;
+ private EventRegistrationBuilderCollection untilRegistrations;
+ private TaskExecutor> loop;
+ private Function converter = this::defaultCEConverter;
+
+ private EventRegistrationBuilderCollection allEvents(AllEventConsumptionStrategy allStrategy) {
+ return new EventRegistrationBuilderCollection(from(allStrategy.getAll()), true);
+ }
+
+ private EventRegistrationBuilderCollection anyEvents(AnyEventConsumptionStrategy anyStrategy) {
+ List eventFilters = anyStrategy.getAny();
+ return new EventRegistrationBuilderCollection(
+ eventFilters.isEmpty() ? registerToAll() : from(eventFilters), false);
+ }
+
+ private EventRegistrationBuilderCollection oneEvent(OneEventConsumptionStrategy oneStrategy) {
+ return new EventRegistrationBuilderCollection(List.of(from(oneStrategy.getOne())), true);
+ }
+
+ protected ListenExecutorBuilder(
+ WorkflowPosition position,
+ ListenTask task,
+ Workflow workflow,
+ WorkflowApplication application,
+ ResourceLoader resourceLoader) {
+ super(position, task, workflow, application, resourceLoader);
+ ListenTaskConfiguration listen = task.getListen();
+ ListenTo to = listen.getTo();
+ if (to.getAllEventConsumptionStrategy() != null) {
+ registrations = allEvents(to.getAllEventConsumptionStrategy());
+ } else if (to.getAnyEventConsumptionStrategy() != null) {
+ AnyEventConsumptionStrategy any = to.getAnyEventConsumptionStrategy();
+ registrations = anyEvents(any);
+ Until untilDesc = any.getUntil();
+ if (untilDesc != null) {
+ if (untilDesc.getAnyEventUntilCondition() != null) {
+ until =
+ WorkflowUtils.buildWorkflowFilter(
+ application.expressionFactory(), untilDesc.getAnyEventUntilCondition());
+ } else if (untilDesc.getAnyEventUntilConsumed() != null) {
+ EventConsumptionStrategy strategy = untilDesc.getAnyEventUntilConsumed();
+ if (strategy.getAllEventConsumptionStrategy() != null) {
+ untilRegistrations = allEvents(strategy.getAllEventConsumptionStrategy());
+ } else if (strategy.getAnyEventConsumptionStrategy() != null) {
+ untilRegistrations = anyEvents(strategy.getAnyEventConsumptionStrategy());
+ } else if (strategy.getOneEventConsumptionStrategy() != null) {
+ untilRegistrations = oneEvent(strategy.getOneEventConsumptionStrategy());
+ }
+ }
+ }
+ } else if (to.getOneEventConsumptionStrategy() != null) {
+ registrations = oneEvent(to.getOneEventConsumptionStrategy());
+ }
+ SubscriptionIterator forEach = task.getForeach();
+ if (forEach != null) {
+ loop =
+ TaskExecutorHelper.createExecutorList(
+ position, forEach.getDo(), workflow, application, resourceLoader);
+ }
+ ListenAndReadAs readAs = listen.getRead();
+ if (readAs != null) {
+ switch (readAs) {
+ case ENVELOPE:
+ converter = CloudEventUtils::toJsonNode;
+ default:
+ case DATA:
+ converter = this::defaultCEConverter;
+ break;
+ }
+ }
+ }
+
+ private Collection registerToAll() {
+ return application.eventConsumer().listenToAll(application);
+ }
+
+ private JsonNode defaultCEConverter(CloudEvent ce) {
+ return CloudEventUtils.toJsonNode(ce.getData());
+ }
+
+ private Collection from(List filters) {
+ return filters.stream().map(this::from).collect(Collectors.toList());
+ }
+
+ private EventRegistrationBuilder from(EventFilter filter) {
+ return application.eventConsumer().listen(filter, application);
+ }
+
+ @Override
+ public TaskExecutor buildInstance() {
+ return registrations.isAnd() ? new AndListenExecutor(this) : new OrListenExecutor(this);
+ }
+ }
+
+ public static class AndListenExecutor extends ListenExecutor {
+
+ public AndListenExecutor(ListenExecutorBuilder builder) {
+ super(builder);
+ }
+
+ protected void internalProcessCe(
+ JsonNode node,
+ ArrayNode arrayNode,
+ WorkflowContext workflow,
+ TaskContext taskContext,
+ CompletableFuture future) {
+ arrayNode.add(node);
+ future.complete(node);
+ }
+ }
+
+ public static class OrListenExecutor extends ListenExecutor {
+
+ private final Optional until;
+ private final EventRegistrationBuilderCollection untilRegBuilders;
+
+ public OrListenExecutor(ListenExecutorBuilder builder) {
+ super(builder);
+ this.until = Optional.ofNullable(builder.until);
+ this.untilRegBuilders = builder.untilRegistrations;
+ }
+
+ @Override
+ protected CompletableFuture> buildFuture(
+ EventRegistrationBuilderCollection regCollection,
+ Collection registrations,
+ BiConsumer> consumer) {
+ CompletableFuture> combinedFuture =
+ super.buildFuture(regCollection, registrations, consumer);
+ if (untilRegBuilders != null) {
+ Collection untilRegistrations = new ArrayList<>();
+ CompletableFuture> untilFuture =
+ combine(untilRegBuilders, untilRegistrations, (ce, f) -> f.complete(null));
+ untilFuture.thenAccept(
+ v -> {
+ combinedFuture.complete(null);
+ untilRegistrations.forEach(reg -> eventConsumer.unregister(reg));
+ });
+ }
+ return combinedFuture;
+ }
+
+ protected void internalProcessCe(
+ JsonNode node,
+ ArrayNode arrayNode,
+ WorkflowContext workflow,
+ TaskContext taskContext,
+ CompletableFuture future) {
+ arrayNode.add(node);
+ if ((until.isEmpty()
+ || until
+ .filter(u -> u.apply(workflow, taskContext, arrayNode).asBoolean())
+ .isPresent())
+ && untilRegBuilders == null) {
+ future.complete(node);
+ }
+ }
+ }
+
+ protected abstract void internalProcessCe(
+ JsonNode node,
+ ArrayNode arrayNode,
+ WorkflowContext workflow,
+ TaskContext taskContext,
+ CompletableFuture future);
+
+ @Override
+ protected CompletableFuture internalExecute(
+ WorkflowContext workflow, TaskContext taskContext) {
+ ArrayNode output = JsonUtils.mapper().createArrayNode();
+ Collection registrations = new ArrayList<>();
+ workflow.instance().status(WorkflowStatus.WAITING);
+ return buildFuture(
+ regBuilders,
+ registrations,
+ (BiConsumer>)
+ ((ce, future) ->
+ processCe(converter.apply(ce), output, workflow, taskContext, future)))
+ .thenApply(
+ v -> {
+ workflow.instance().status(WorkflowStatus.RUNNING);
+ registrations.forEach(reg -> eventConsumer.unregister(reg));
+ return output;
+ });
+ }
+
+ protected CompletableFuture> buildFuture(
+ EventRegistrationBuilderCollection regCollection,
+ Collection registrations,
+ BiConsumer> consumer) {
+ return combine(regCollection, registrations, consumer);
+ }
+
+ protected final CompletableFuture> combine(
+ EventRegistrationBuilderCollection regCollection,
+ Collection registrations,
+ BiConsumer> consumer) {
+ CompletableFuture[] futures =
+ regCollection.registrations().stream()
+ .map(reg -> toCompletable(reg, registrations, consumer))
+ .toArray(size -> new CompletableFuture[size]);
+ return regCollection.isAnd()
+ ? CompletableFuture.allOf(futures)
+ : CompletableFuture.anyOf(futures);
+ }
+
+ private CompletableFuture toCompletable(
+ EventRegistrationBuilder regBuilder,
+ Collection registrations,
+ BiConsumer